SHARE
TWEET

Untitled

a guest Jun 27th, 2019 63 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1.  
  2.  
  3. Application
  4. Tools
  5. Configuration
  6. Local logs
  7. Server stacks
  8. Server metrics
  9. Log Type: directory.info
  10. Log Upload Time: Thu Jun 27 13:45:17 +0300 2019
  11. Log Length: 36026
  12. Showing 4096 bytes of 36026 total. Click here for the full log.
  13.  2018 ./__spark_libs__/xbean-asm5-shaded-4.4.jar
  14. 37438188  384 -r-xr-xr-x   1 yarn     hadoop     390733 Dec  6  2018 ./__spark_libs__/parquet-format-2.3.1.jar
  15. 37438189  788 -r-xr-xr-x   1 yarn     hadoop     805046 Dec  6  2018 ./__spark_libs__/zookeeper-3.4.6.3.1.0.0-78.jar
  16. 37438190  248 -r-xr-xr-x   1 yarn     hadoop     250989 Dec  6  2018 ./__spark_libs__/parquet-hadoop-1.8.3.jar
  17. 37438191  120 -r-xr-xr-x   1 yarn     hadoop     120512 Dec  6  2018 ./__spark_libs__/spark-repl_2.11-2.3.2.3.1.0.0-78.jar
  18. 37438192 2732 -r-xr-xr-x   1 yarn     hadoop    2796935 Dec  6  2018 ./__spark_libs__/parquet-hadoop-bundle-1.6.0.jar
  19. 37438193 2280 -r-xr-xr-x   1 yarn     hadoop    2333186 Dec  6  2018 ./__spark_libs__/zstd-jni-1.3.2-2.jar
  20. 37438194 1024 -r-xr-xr-x   1 yarn     hadoop    1048115 Dec  6  2018 ./__spark_libs__/parquet-jackson-1.8.3.jar
  21. 37438195  524 -r-xr-xr-x   1 yarn     hadoop     533455 Dec  6  2018 ./__spark_libs__/protobuf-java-2.5.0.jar
  22. 37438196  120 -r-xr-xr-x   1 yarn     hadoop     122774 Dec  6  2018 ./__spark_libs__/py4j-0.10.7.jar
  23. 37438197   96 -r-xr-xr-x   1 yarn     hadoop      94796 Dec  6  2018 ./__spark_libs__/pyrolite-4.13.jar
  24. 37438198 15128 -r-xr-xr-x   1 yarn     hadoop   15487351 Dec  6  2018 ./__spark_libs__/scala-compiler-2.11.8.jar
  25. 37438199 5612 -r-xr-xr-x   1 yarn     hadoop    5744974 Dec  6  2018 ./__spark_libs__/scala-library-2.11.8.jar
  26. 37438200 1776 -r-xr-xr-x   1 yarn     hadoop    1818085 Dec  6  2018 ./__spark_libs__/spark-hive-thriftserver_2.11-2.3.2.3.1.0.0-78.jar
  27. 37438201  416 -r-xr-xr-x   1 yarn     hadoop     423753 Dec  6  2018 ./__spark_libs__/scala-parser-combinators_2.11-1.0.4.jar
  28. 37438202 4468 -r-xr-xr-x   1 yarn     hadoop    4573750 Dec  6  2018 ./__spark_libs__/scala-reflect-2.11.8.jar
  29. 37438203  656 -r-xr-xr-x   1 yarn     hadoop     671138 Dec  6  2018 ./__spark_libs__/scala-xml_2.11-1.0.5.jar
  30. 37438204  788 -r-xr-xr-x   1 yarn     hadoop     802818 Dec  6  2018 ./__spark_libs__/scalap-2.11.8.jar
  31. 37438205 3444 -r-xr-xr-x   1 yarn     hadoop    3522616 Dec  6  2018 ./__spark_libs__/shapeless_2.11-2.3.2.jar
  32. 37438206   40 -r-xr-xr-x   1 yarn     hadoop      40509 Dec  6  2018 ./__spark_libs__/slf4j-api-1.7.16.jar
  33. 37438207   12 -r-xr-xr-x   1 yarn     hadoop       9939 Dec  6  2018 ./__spark_libs__/slf4j-log4j12-1.7.16.jar
  34. 37447136   48 -r-xr-xr-x   1 yarn     hadoop      48720 Dec  6  2018 ./__spark_libs__/snappy-0.2.jar
  35. 37447137 1032 -r-xr-xr-x   1 yarn     hadoop    1056168 Dec  6  2018 ./__spark_libs__/snappy-java-1.1.2.6.jar
  36. 37447138 1308 -r-xr-xr-x   1 yarn     hadoop    1335820 Dec  6  2018 ./__spark_libs__/spark-hive_2.11-2.3.2.3.1.0.0-78.jar
  37. 37447139 8812 -r-xr-xr-x   1 yarn     hadoop    9021968 Dec  6  2018 ./__spark_libs__/spark-catalyst_2.11-2.3.2.3.1.0.0-78.jar
  38. 37447140   52 -r-xr-xr-x   1 yarn     hadoop      53042 Dec  6  2018 ./__spark_libs__/spark-kvstore_2.11-2.3.2.3.1.0.0-78.jar
  39. 37447141 12828 -r-xr-xr-x   1 yarn     hadoop   13134090 Dec  6  2018 ./__spark_libs__/spark-core_2.11-2.3.2.3.1.0.0-78.jar
  40. 37447142   80 -r-xr-xr-x   1 yarn     hadoop      80174 Dec  6  2018 ./__spark_libs__/spark-launcher_2.11-2.3.2.3.1.0.0-78.jar
  41. 37447143  696 -r-xr-xr-x   1 yarn     hadoop     708861 Dec  6  2018 ./__spark_libs__/spark-graphx_2.11-2.3.2.3.1.0.0-78.jar
  42. 37447144  504 -r-xr-xr-x   1 yarn     hadoop     515306 Dec  6  2018 ./__spark_libs__/spark-hadoop-cloud_2.11-2.3.2.3.1.0.0-78.jar
  43. 37447145 2328 -r-xr-xr-x   1 yarn     hadoop    2381972 Dec  6  2018 ./__spark_libs__/spark-network-common_2.11-2.3.2.3.1.0.0-78.jar
  44. 37447146   68 -r-xr-xr-x   1 yarn     hadoop      67640 Dec  6  2018 ./__spark_libs__/spark-network-shuffle_2.11-2.3.2.3.1.0.0-78.jar
  45. 37447147   32 -r-xr-xr-x   1 yarn     hadoop      30094 Dec  6  2018 ./__spark_libs__/spark-sketch_2.11-2.3.2.3.1.0.0-78.jar
  46. 37447148 2124 -r-xr-xr-x   1 yarn     hadoop    2171189 Dec  6  2018 ./__spark_libs__/spark-streaming_2.11-2.3.2.3.1.0.0-78.jar
  47. 37447149   48 -r-xr-xr-x   1 yarn     hadoop      48971 Dec  6  2018 ./__spark_libs__/spark-unsafe_2.11-2.3.2.3.1.0.0-78.jar
  48. broken symlinks(find -L . -maxdepth 5 -type l -ls):
  49.  
  50. Log Type: launch_container.sh
  51. Log Upload Time: Thu Jun 27 13:45:17 +0300 2019
  52. Log Length: 5949
  53. Showing 4096 bytes of 5949 total. Click here for the full log.
  54. rt NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
  55. export NM_AUX_SERVICE_spark2_shuffle=""
  56. export SPARK_YARN_STAGING_DIR="hdfs://testcluster/user/ngorodnov/.sparkStaging/application_1560759961486_0455"
  57. export APP_SUBMIT_TIME_ENV="1561631756494"
  58. export TIMELINE_FLOW_NAME_TAG="ru.croc.rosbank.cri.system.Main"
  59. export TIMELINE_FLOW_VERSION_TAG="1"
  60. export PYTHONHASHSEED="0"
  61. export APPLICATION_WEB_PROXY_BASE="/proxy/application_1560759961486_0455"
  62. export CLASSPATH="$PWD:$PWD/__spark_conf__:$PWD/__spark_libs__/*:$HADOOP_CONF_DIR:/usr/hdp/3.1.0.0-78/hadoop/*:/usr/hdp/3.1.0.0-78/hadoop/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/3.1.0.0-78/hadoop/lib/hadoop-lzo-0.6.0.3.1.0.0-78.jar:/etc/hadoop/conf/secure:$PWD/__spark_conf__/__hadoop_conf__"
  63. export SPARK_USER="ngorodnov"
  64. export TIMELINE_FLOW_RUN_ID_TAG="1561631756495"
  65. echo "Setting up job resources"
  66. ln -sf "/hadoop/yarn/local/usercache/ngorodnov/filecache/172/agg_calculator-1.0.0-jar-with-dependencies.jar" "__app__.jar"
  67. ln -sf "/hadoop/yarn/local/usercache/ngorodnov/filecache/171/__spark_conf__.zip" "__spark_conf__"
  68. ln -sf "/hadoop/yarn/local/filecache/421/adj_calc.conf" "adj_calc.conf"
  69. ln -sf "/hadoop/yarn/local/filecache/11/spark2-hdp-hive-archive.tar.gz" "__hive_libs__"
  70. ln -sf "/hadoop/yarn/local/filecache/10/spark2-hdp-yarn-archive.tar.gz" "__spark_libs__"
  71. echo "Copying debugging information"
  72. # Creating copy of launch script
  73. cp "launch_container.sh" "/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/launch_container.sh"
  74. chmod 640 "/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/launch_container.sh"
  75. # Determining directory contents
  76. echo "ls -l:" 1>"/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/directory.info"
  77. ls -l 1>>"/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/directory.info"
  78. echo "find -L . -maxdepth 5 -ls:" 1>>"/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/directory.info"
  79. find -L . -maxdepth 5 -ls 1>>"/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/directory.info"
  80. echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/directory.info"
  81. find -L . -maxdepth 5 -type l -ls 1>>"/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/directory.info"
  82. echo "Launching container"
  83. exec /bin/bash -c "LD_LIBRARY_PATH="/usr/hdp/current/hadoop-client/lib/native:/usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64:$LD_LIBRARY_PATH" $JAVA_HOME/bin/java -server -Xmx1024m -Djava.io.tmpdir=$PWD/tmp -Dhdp.version=3.1.0.0-78 -Dspark.yarn.app.container.log.dir=/hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001 org.apache.spark.deploy.yarn.ApplicationMaster --class 'ru.croc.rosbank.cri.system.Main' --jar file:/home/ngorodnov/MY_JAR/agg_calculator-1.0.0-jar-with-dependencies.jar --arg 'BRANCH_ID=0000' --arg 'BUSINESS_DATE=2019-02-14 21:00:00.0 ' --arg 'BATCH_ID=1' --arg 'DB_NAME=cri' --arg 'CONFIG=adj_calc.conf' --properties-file $PWD/__spark_conf__/__spark_conf__.properties 1> /hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/stdout 2> /hadoop/yarn/log/application_1560759961486_0455/container_e41_1560759961486_0455_01_000001/stderr"
  84.  
  85. Log Type: prelaunch.err
  86. Log Upload Time: Thu Jun 27 13:45:17 +0300 2019
  87. Log Length: 0
  88.  
  89. Log Type: prelaunch.out
  90. Log Upload Time: Thu Jun 27 13:45:17 +0300 2019
  91. Log Length: 100
  92. Setting up env variables
  93. Setting up job resources
  94. Copying debugging information
  95. Launching container
  96.  
  97. Log Type: stderr
  98. Log Upload Time: Thu Jun 27 13:45:17 +0300 2019
  99. Log Length: 3408511
  100. Showing 4096 bytes of 3408511 total. Click here for the full log.
  101. erated in 12.868744 ms
  102. 19/06/27 13:40:25 INFO CodeGenerator: Code generated in 10.571408 ms
  103. 19/06/27 13:40:26 INFO CodeGenerator: Code generated in 22.256257 ms
  104. 19/06/27 13:40:33 ERROR ApplicationMaster: User class threw exception: java.lang.NullPointerException
  105. java.lang.NullPointerException
  106.     at org.apache.spark.sql.Dataset$$anonfun$33.apply(Dataset.scala:2195)
  107.     at org.apache.spark.sql.Dataset$$anonfun$33.apply(Dataset.scala:2195)
  108.     at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  109.     at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  110.     at scala.collection.immutable.Map$Map1.foreach(Map.scala:116)
  111.     at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
  112.     at scala.collection.AbstractTraversable.map(Traversable.scala:104)
  113.     at org.apache.spark.sql.Dataset.withColumns(Dataset.scala:2195)
  114.     at org.apache.spark.sql.Dataset.withColumn(Dataset.scala:2164)
  115.     at ru.croc.rosbank.cri.system.Main$.main(Main.scala:37)
  116.     at ru.croc.rosbank.cri.system.Main.main(Main.scala)
  117.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  118.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  119.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  120.     at java.lang.reflect.Method.invoke(Method.java:498)
  121.     at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$4.run(ApplicationMaster.scala:721)
  122. 19/06/27 13:40:33 INFO ApplicationMaster: Final app status: FAILED, exitCode: 15, (reason: User class threw exception: java.lang.NullPointerException
  123.     at org.apache.spark.sql.Dataset$$anonfun$33.apply(Dataset.scala:2195)
  124.     at org.apache.spark.sql.Dataset$$anonfun$33.apply(Dataset.scala:2195)
  125.     at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  126.     at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
  127.     at scala.collection.immutable.Map$Map1.foreach(Map.scala:116)
  128.     at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
  129.     at scala.collection.AbstractTraversable.map(Traversable.scala:104)
  130.     at org.apache.spark.sql.Dataset.withColumns(Dataset.scala:2195)
  131.     at org.apache.spark.sql.Dataset.withColumn(Dataset.scala:2164)
  132.     at ru.croc.rosbank.cri.system.Main$.main(Main.scala:37)
  133.     at ru.croc.rosbank.cri.system.Main.main(Main.scala)
  134.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  135.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  136.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  137.     at java.lang.reflect.Method.invoke(Method.java:498)
  138.     at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$4.run(ApplicationMaster.scala:721)
  139. )
  140. 19/06/27 13:40:33 INFO SparkContext: Invoking stop() from shutdown hook
  141. 19/06/27 13:40:33 INFO AbstractConnector: Stopped Spark@1ccedba9{HTTP/1.1,[http/1.1]}{0.0.0.0:0}
  142. 19/06/27 13:40:33 INFO SparkUI: Stopped Spark web UI at http://ds03.localdomain:43771
  143. 19/06/27 13:40:33 INFO YarnAllocator: Driver requested a total number of 0 executor(s).
  144. 19/06/27 13:40:33 INFO YarnClusterSchedulerBackend: Shutting down all executors
  145. 19/06/27 13:40:33 INFO YarnSchedulerBackend$YarnDriverEndpoint: Asking each executor to shut down
  146. 19/06/27 13:40:33 INFO SchedulerExtensionServices: Stopping SchedulerExtensionServices
  147. (serviceOption=None,
  148.  services=List(),
  149.  started=false)
  150. 19/06/27 13:40:33 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
  151. 19/06/27 13:40:33 INFO MemoryStore: MemoryStore cleared
  152. 19/06/27 13:40:33 INFO BlockManager: BlockManager stopped
  153. 19/06/27 13:40:33 INFO BlockManagerMaster: BlockManagerMaster stopped
  154. 19/06/27 13:40:33 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
  155. 19/06/27 13:40:33 INFO SparkContext: Successfully stopped SparkContext
  156. 19/06/27 13:40:33 INFO ShutdownHookManager: Shutdown hook called
  157. 19/06/27 13:40:33 INFO ShutdownHookManager: Deleting directory /hadoop/yarn/local/usercache/ngorodnov/appcache/application_1560759961486_0455/spark-545f870a-ad93-49e1-8855-ef7690991893
  158.  
  159. Log Type: stdout
  160. Log Upload Time: Thu Jun 27 13:45:17 +0300 2019
  161. Log Length: 193
  162. INFO: Configuration was loaded from file: adj_calc.conf
  163. adj_cri.
  164. CRILIST_ACC IS LOADED
  165. Debug
  166. ==============================
  167. Debug step12FinalTransactionsTemplate
  168. ==============================
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top