Client
export HADOOP_OPTS="$HADOOP_OPTS -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000" hadoop --loglevel INFO jar /soft/hadoop-3.1.3/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.3.jar wordcount /tmp/input1 /tmp/outputa1
- 运行实例
- 进程
/usr/java/jdk1.8.0_131/bin/java -Dproc_jar -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Dyarn.log.dir=/soft/hadoop-3.1.3/logs -Dyarn.log.file=hadoop.log -Dyarn.home.dir=/soft/hadoop-3.1.3 -Dyarn.root.logger=INFO,console -Djava.library.path=/soft/hadoop-3.1.3/lib/native -Dhadoop.log.dir=/soft/hadoop-3.1.3/logs -Dhadoop.log.file=hadoop.log -Dhadoop.home.dir=/soft/hadoop-3.1.3 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,console -Dhadoop.policy.file=hadoop-policy.xml -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.util.RunJar /soft/hadoop-3.1.3/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.3.jar wordcount /tmp/input1 /tmp/outputa1
- 生成文件
[root@hadoop3 hadoop-3.1.3]# hdfs dfs -ls -h /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003
Found 4 items
-rw-r--r-- 10 root supergroup 309.0 K 2021-01-21 20:31 /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.jar
-rw-r--r-- 10 root supergroup 109 2021-01-21 21:02 /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.split
-rw-r--r-- 1 root supergroup 21 2021-01-21 21:04 /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.splitmetainfo
-rw-r--r-- 1 root supergroup 181.6 K 2021-01-21 21:34 /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.xml
[root@hadoop3 hadoop-3.1.3]# hdfs dfs -cat /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.xml|grep jar
<property><name>mapreduce.job.jar</name><value>/tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.jar</value><final>false</final><source>programmatically</source></property>
[root@hadoop3 hadoop-3.1.3]# hdfs dfs -cat /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.split
SPL/org.apache.hadoop.mapreduce.lib.input.FileSplit%hdfs://localhost:9000/tmp/input1/test
[root@hadoop3 hadoop-3.1.3]#
[root@hadoop3 hadoop-3.1.3]# hdfs dfs -cat /tmp/hadoop-yarn/staging/root/.staging/job_1611148054091_0003/job.splitmetainfo
META-SPLhadoop3
[root@hadoop3 hadoop-3.1.3]#
AM
hadoop --loglevel INFO jar /soft/hadoop-3.1.3/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.3.jar wordcount -D yarn.app.mapreduce.am.command-opts="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000" /tmp/input1 /tmp/output8
- 运行实例
- 进程
|-java,2960 -Dproc_nodemanager -Djava.net.preferIPv4Stack=true -Dyarn.log.dir=/soft/hadoop-3.1.3/logs -Dyarn.log.file=hadoop-root-nodemanager-hadoop3.log -Dyarn.home.dir=/soft/hadoop-3.1.3 -Dyarn.root.logger=INFO,console -Djava.library.path=/soft/hadoop-3.1.3/lib/native -Dhadoop.log.dir=/soft/hadoop-3.1.3/logs -Dhadoop.log.file=hadoop-root-nodemanager-hadoop3.log -Dhadoop.home.dir=/soft/hadoop-3.1.3 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,RFA -Dhadoop.policy.file=hadoop-policy.xml -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.yarn.server.nodemanager.NodeManager
| |-bash,17089 /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/default_container_executor.sh
| | `-bash,17091 -c /usr/java/jdk1.8.0_131/bin/java -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 org.apache.hadoop.mapreduce.v2.app.MRAppMaster 1>/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/stdout 2>/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/stderr
| | `-java,17104 -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 org.apache.hadoop.mapreduce.v2.app.MRAppMaster
- 进程执行目录
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/default_container_executor.sh
#!/bin/bash
/bin/bash "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/default_container_executor_session.sh"
rc=$?
echo $rc > "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611148054091_0008/container_1611148054091_0008_01_000001/container_1611148054091_0008_01_000001.pid.exitcode.tmp"
/bin/mv -f "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611148054091_0008/container_1611148054091_0008_01_000001/container_1611148054091_0008_01_000001.pid.exitcode.tmp" "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611148054091_0008/container_1611148054091_0008_01_000001/container_1611148054091_0008_01_000001.pid.exitcode"
exit $rc
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/default_container_executor_session.sh
#!/bin/bash
echo $$ > /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611148054091_0008/container_1611148054091_0008_01_000001/container_1611148054091_0008_01_000001.pid.tmp
/bin/mv -f /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611148054091_0008/container_1611148054091_0008_01_000001/container_1611148054091_0008_01_000001.pid.tmp /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611148054091_0008/container_1611148054091_0008_01_000001/container_1611148054091_0008_01_000001.pid
exec setsid /bin/bash "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/launch_container.sh"
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/launch_container.sh
#!/bin/bash
set -o pipefail -e
export PRELAUNCH_OUT="/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/prelaunch.out"
exec >"${PRELAUNCH_OUT}"
export PRELAUNCH_ERR="/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/prelaunch.err"
exec 2>"${PRELAUNCH_ERR}"
echo "Setting up env variables"
export JAVA_HOME=${JAVA_HOME:-"/usr/java/jdk1.8.0_131"}
export HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/soft/conf/hadoop"}
export HADOOP_YARN_HOME=${HADOOP_YARN_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_HOME=${HADOOP_HOME:-"/soft/hadoop-3.1.3"}
export PATH=${PATH:-"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/java/jdk1.8.0_131/bin:/usr/java/jdk1.8.0_131/bin:/soft/hadoop-3.1.3/bin:/soft/hive-3.1.2/bin"}
export LANG=${LANG:-"zh_CN.UTF-8"}
export HADOOP_TOKEN_FILE_LOCATION="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001/container_tokens"
export CONTAINER_ID="container_1611148054091_0008_01_000001"
export NM_PORT="41223"
export NM_HOST="hadoop3"
export NM_HTTP_PORT="8042"
export LOCAL_DIRS="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008"
export LOCAL_USER_DIRS="/tmp/hadoop-root/nm-local-dir/usercache/root/"
export LOG_DIRS="/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001"
export USER="root"
export LOGNAME="root"
export HOME="/home/"
export PWD="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/container_1611148054091_0008_01_000001"
export JVM_PID="$$"
export MALLOC_ARENA_MAX="4"
export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
export APPLICATION_WEB_PROXY_BASE="/proxy/application_1611148054091_0008"
export SHELL="/bin/bash"
export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
export CLASSPATH="$PWD:$HADOOP_CONF_DIR:$HADOOP_COMMON_HOME/share/hadoop/common/*:$HADOOP_COMMON_HOME/share/hadoop/common/lib/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*:$HADOOP_YARN_HOME/share/hadoop/yarn/*:$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*:job.jar/*:job.jar/classes/:job.jar/lib/*:$PWD/*"
export APP_SUBMIT_TIME_ENV="1611408578383"
export LD_LIBRARY_PATH="$PWD:$HADOOP_COMMON_HOME/lib/native"
echo "Setting up job resources"
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/13/job.xml" "job.xml"
mkdir -p jobSubmitDir
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/12/job.split" "jobSubmitDir/job.split"
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/11/job.jar" "job.jar"
mkdir -p jobSubmitDir
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/10/job.splitmetainfo" "jobSubmitDir/job.splitmetainfo"
echo "Copying debugging information"
# Creating copy of launch script
cp "launch_container.sh" "/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/launch_container.sh"
chmod 640 "/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/launch_container.sh"
# Determining directory contents
echo "ls -l:" 1>"/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/directory.info"
ls -l 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/directory.info"
echo "find -L . -maxdepth 5 -ls:" 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/directory.info"
find -L . -maxdepth 5 -ls 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/directory.info"
echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/directory.info"
find -L . -maxdepth 5 -type l -ls 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/directory.info"
echo "Launching container"
exec /bin/bash -c "$JAVA_HOME/bin/java -Djava.io.tmpdir=$PWD/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 org.apache.hadoop.mapreduce.v2.app.MRAppMaster 1>/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/stdout 2>/soft/hadoop-3.1.3/logs/userlogs/application_1611148054091_0008/container_1611148054091_0008_01_000001/stderr "
[root@hadoop3 hadoop-3.1.3]# tree -a /tmp/hadoop-root/nm-local-dir/
/tmp/hadoop-root/nm-local-dir/
|-- filecache
|-- nmPrivate
| `-- application_1611148054091_0008
| `-- container_1611148054091_0008_02_000001
| |-- .container_1611148054091_0008_02_000001.tokens.crc
| |-- .launch_container.sh.crc
| |-- container_1611148054091_0008_02_000001.pid
| |-- container_1611148054091_0008_02_000001.tokens
| `-- launch_container.sh
`-- usercache
`-- root
|-- appcache
| `-- application_1611148054091_0008
| |-- container_1611148054091_0008_02_000001
| | |-- .container_tokens.crc
| | |-- .default_container_executor.sh.crc
| | |-- .default_container_executor_session.sh.crc
| | |-- .launch_container.sh.crc
| | |-- container_tokens
| | |-- default_container_executor.sh
| | |-- default_container_executor_session.sh
| | |-- job.jar -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/11/job.jar
| | |-- job.xml -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/13/job.xml
| | |-- jobSubmitDir
| | | |-- job.split -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/12/job.split
| | | `-- job.splitmetainfo -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611148054091_0008/filecache/10/job.splitmetainfo
| | |-- launch_container.sh
| | `-- tmp
| `-- filecache
| |-- 10
| | |-- .job.splitmetainfo.crc
| | `-- job.splitmetainfo
| |-- 11
| | `-- job.jar
| | `-- job.jar
| |-- 12
| | |-- .job.split.crc
| | `-- job.split
| `-- 13
| |-- .job.xml.crc
| `-- job.xml
`-- filecache
Map
/usr/java/jdk1.8.0_131/bin/java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1605039875997_0023/container_1605039875997_0023_01_000002/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1605039875997_0023/container_1605039875997_0023_01_000002 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog org.apache.hadoop.mapred.YarnChild 172.17.0.2 46435 attempt_1605039875997_0023_m_000000_0 2
hadoop --loglevel INFO jar /soft/hadoop-3.1.3/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.3.jar wordcount -D mapreduce.task.timeout=100000000 -D mapreduce.map.java.opts="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m" -D mapreduce.input.fileinputformat.split.maxsize=1000 -D mapreduce.task.io.sort.mb=1 -D mapreduce.job.reduces=3 /tmp/data.txt /tmp/o7
- 进程
pstree -al -H 995
|-java -Dproc_nodemanager -Djava.net.preferIPv4Stack=true -Dyarn.log.dir=/soft/hadoop-3.1.3/logs -Dyarn.log.file=hadoop-root-nodemanager-hadoop3.log -Dyarn.home.dir=/soft/hadoop-3.1.3 -Dyarn.root.logger=INFO,console -Djava.library.path=/soft/hadoop-3.1.3/lib/native -Dhadoop.log.dir=/soft/hadoop-3.1.3/logs -Dhadoop.log.file=hadoop-root-nodemanager-hadoop3.log -Dhadoop.home.dir=/soft/hadoop-3.1.3 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,RFA -Dhadoop.policy.file=hadoop-policy.xml -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.yarn.server.nodemanager.NodeManager
| |-bash /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/default_container_executor.sh
| | `-bash -c /usr/java/jdk1.8.0_131/bin/java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog org.apache.hadoop.mapred.YarnChild 172.17.0.4 45767 attempt_1611734493242_0014_m_000000_1 3 1>/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/stdout 2>/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/stderr
| | `-java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog org.apache.hadoop.mapred.YarnChild 172.17.0.4 45767 attempt_1611734493242_0014_m_000000_1 3
- 进程执行目录
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/default_container_executor.sh
#!/bin/bash
/bin/bash "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/default_container_executor_session.sh"
rc=$?
echo $rc > "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611734493242_0014/container_1611734493242_0014_01_000003/container_1611734493242_0014_01_000003.pid.exitcode.tmp"
/bin/mv -f "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611734493242_0014/container_1611734493242_0014_01_000003/container_1611734493242_0014_01_000003.pid.exitcode.tmp" "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611734493242_0014/container_1611734493242_0014_01_000003/container_1611734493242_0014_01_000003.pid.exitcode"
exit $rc
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/default_container_executor_session.sh
#!/bin/bash
echo $$ > /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611734493242_0014/container_1611734493242_0014_01_000003/container_1611734493242_0014_01_000003.pid.tmp
/bin/mv -f /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611734493242_0014/container_1611734493242_0014_01_000003/container_1611734493242_0014_01_000003.pid.tmp /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1611734493242_0014/container_1611734493242_0014_01_000003/container_1611734493242_0014_01_000003.pid
exec setsid /bin/bash "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/launch_container.sh"
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/launch_container.sh
#!/bin/bash
set -o pipefail -e
export PRELAUNCH_OUT="/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/prelaunch.out"
exec >"${PRELAUNCH_OUT}"
export PRELAUNCH_ERR="/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/prelaunch.err"
exec 2>"${PRELAUNCH_ERR}"
echo "Setting up env variables"
export JAVA_HOME=${JAVA_HOME:-"/usr/java/jdk1.8.0_131"}
export HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/soft/conf/hadoop"}
export HADOOP_YARN_HOME=${HADOOP_YARN_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_HOME=${HADOOP_HOME:-"/soft/hadoop-3.1.3"}
export PATH=${PATH:-"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin"}
export HADOOP_TOKEN_FILE_LOCATION="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003/container_tokens"
export CONTAINER_ID="container_1611734493242_0014_01_000003"
export NM_PORT="43845"
export NM_HOST="hadoop3"
export NM_HTTP_PORT="8042"
export LOCAL_DIRS="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014"
export LOCAL_USER_DIRS="/tmp/hadoop-root/nm-local-dir/usercache/root/"
export LOG_DIRS="/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003"
export USER="root"
export LOGNAME="root"
export HOME="/home/"
export PWD="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000003"
export JVM_PID="$$"
export MALLOC_ARENA_MAX="4"
export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
export STDOUT_LOGFILE_ENV="/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/stdout"
export SHELL="/bin/bash"
export HADOOP_ROOT_LOGGER="INFO,console"
export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
export CLASSPATH="$PWD:$HADOOP_CONF_DIR:$HADOOP_COMMON_HOME/share/hadoop/common/*:$HADOOP_COMMON_HOME/share/hadoop/common/lib/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*:$HADOOP_YARN_HOME/share/hadoop/yarn/*:$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*:job.jar/*:job.jar/classes/:job.jar/lib/*:$PWD/*"
export LD_LIBRARY_PATH="$PWD:$HADOOP_COMMON_HOME/lib/native"
export STDERR_LOGFILE_ENV="/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/stderr"
export HADOOP_CLIENT_OPTS=""
echo "Setting up job resources"
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/11/job.jar" "job.jar"
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/13/job.xml" "job.xml"
echo "Copying debugging information"
# Creating copy of launch script
cp "launch_container.sh" "/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/launch_container.sh"
chmod 640 "/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/launch_container.sh"
# Determining directory contents
echo "ls -l:" 1>"/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/directory.info"
ls -l 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/directory.info"
echo "find -L . -maxdepth 5 -ls:" 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/directory.info"
find -L . -maxdepth 5 -ls 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/directory.info"
echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/directory.info"
find -L . -maxdepth 5 -type l -ls 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/directory.info"
echo "Launching container"
exec /bin/bash -c "$JAVA_HOME/bin/java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=$PWD/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog org.apache.hadoop.mapred.YarnChild 172.17.0.4 45767 attempt_1611734493242_0014_m_000000_1 3 1>/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/stdout 2>/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000003/stderr "
[root@hadoop3 hadoop-3.1.3]# tree -a /tmp/hadoop-root/nm-local-dir/
/tmp/hadoop-root/nm-local-dir/
|-- filecache
|-- nmPrivate
| `-- application_1611734493242_0014
| |-- container_1611734493242_0014_01_000001
| | |-- .container_1611734493242_0014_01_000001.tokens.crc
| | |-- .launch_container.sh.crc
| | |-- container_1611734493242_0014_01_000001.pid
| | |-- container_1611734493242_0014_01_000001.tokens
| | `-- launch_container.sh
| `-- container_1611734493242_0014_01_000003
| |-- .container_1611734493242_0014_01_000003.tokens.crc
| |-- .launch_container.sh.crc
| |-- container_1611734493242_0014_01_000003.pid
| |-- container_1611734493242_0014_01_000003.tokens
| `-- launch_container.sh
`-- usercache
`-- root
|-- appcache
| `-- application_1611734493242_0014
| |-- container_1611734493242_0014_01_000001
| | |-- .container_tokens.crc
| | |-- .default_container_executor.sh.crc
| | |-- .default_container_executor_session.sh.crc
| | |-- .launch_container.sh.crc
| | |-- container_tokens
| | |-- default_container_executor.sh
| | |-- default_container_executor_session.sh
| | |-- job.jar -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/11/job.jar
| | |-- job.xml -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/13/job.xml
| | |-- jobSubmitDir
| | | |-- job.split -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/12/job.split
| | | `-- job.splitmetainfo -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/10/job.splitmetainfo
| | |-- launch_container.sh
| | `-- tmp
| | `-- jetty-0.0.0.0-33217-mapreduce-_-any-7507049593585889762.dir
| | `-- webapp
| | `-- .keep
| |-- container_1611734493242_0014_01_000003
| | |-- .container_tokens.crc
| | |-- .default_container_executor.sh.crc
| | |-- .default_container_executor_session.sh.crc
| | |-- .launch_container.sh.crc
| | |-- container_tokens
| | |-- default_container_executor.sh
| | |-- default_container_executor_session.sh
| | |-- job.jar -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/11/job.jar
| | |-- job.xml -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/filecache/13/job.xml
| | |-- launch_container.sh
| | `-- tmp
| `-- filecache
| |-- 10
| | |-- .job.splitmetainfo.crc
| | `-- job.splitmetainfo
| |-- 11
| | `-- job.jar
| | `-- job.jar
| |-- 12
| | |-- .job.split.crc
| | `-- job.split
| `-- 13
| |-- .job.xml.crc
| `-- job.xml
`-- filecache
|-- 10
| |-- .reduce.xml.crc
| `-- reduce.xml
|-- 11
| |-- .map.xml.crc
| `-- map.xml
|-- 12
| |-- .reduce.xml.crc
| `-- reduce.xml
|-- 13
| |-- .map.xml.crc
| `-- map.xml
[root@hadoop3 application_1611734493242_0026]# pwd
/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0026
[root@hadoop3 application_1611734493242_0026]# ll
total 6292
drwxr-xr-x 2 root root 4096 Feb 11 09:28 attempt_1611734493242_0026_m_000000_0
-rw-r--r-- 1 root root 381200 Feb 11 09:12 attempt_1611734493242_0026_m_000000_0_spill_0.out
-rw-r--r-- 1 root root 387174 Feb 11 09:12 attempt_1611734493242_0026_m_000000_0_spill_1.out
-rw-r--r-- 1 root root 403915 Feb 11 09:15 attempt_1611734493242_0026_m_000000_0_spill_10.out
-rw-r--r-- 1 root root 403902 Feb 11 09:15 attempt_1611734493242_0026_m_000000_0_spill_11.out
-rw-r--r-- 1 root root 403902 Feb 11 09:16 attempt_1611734493242_0026_m_000000_0_spill_12.out
-rw-r--r-- 1 root root 403915 Feb 11 09:16 attempt_1611734493242_0026_m_000000_0_spill_13.out
-rw-r--r-- 1 root root 403915 Feb 11 09:16 attempt_1611734493242_0026_m_000000_0_spill_14.out
-rw-r--r-- 1 root root 388146 Feb 11 09:20 attempt_1611734493242_0026_m_000000_0_spill_15.out
-rw-r--r-- 1 root root 387174 Feb 11 09:12 attempt_1611734493242_0026_m_000000_0_spill_2.out
-rw-r--r-- 1 root root 402471 Feb 11 09:13 attempt_1611734493242_0026_m_000000_0_spill_3.out
-rw-r--r-- 1 root root 403915 Feb 11 09:13 attempt_1611734493242_0026_m_000000_0_spill_4.out
-rw-r--r-- 1 root root 403915 Feb 11 09:13 attempt_1611734493242_0026_m_000000_0_spill_5.out
-rw-r--r-- 1 root root 403902 Feb 11 09:14 attempt_1611734493242_0026_m_000000_0_spill_6.out
-rw-r--r-- 1 root root 403902 Feb 11 09:14 attempt_1611734493242_0026_m_000000_0_spill_7.out
-rw-r--r-- 1 root root 403915 Feb 11 09:14 attempt_1611734493242_0026_m_000000_0_spill_8.out
-rw-r--r-- 1 root root 403915 Feb 11 09:15 attempt_1611734493242_0026_m_000000_0_spill_9.out
drwx--x--- 4 root root 4096 Feb 11 09:11 container_1611734493242_0026_01_000001
drwx--x--- 3 root root 4096 Feb 11 09:11 container_1611734493242_0026_01_000002
drwx--x--- 6 root root 4096 Feb 11 09:11 filecache
drwxr-xr-x 3 root root 4096 Feb 11 09:21 output
drwxr-xr-x 2 root root 4096 Feb 11 09:11 work
[root@hadoop3 application_1611734493242_0026]# ll attempt_1611734493242_0026_m_000000_0
total 0
-rw-r--r-- 1 root root 0 Feb 11 09:28 intermediate.1
[root@hadoop3 application_1611734493242_0026]# ll output/attempt_1611734493242_0026_m_000000_0/
total 6244
-rw-r--r-- 1 root root 6388908 Feb 11 09:46 file.out
-rw-r--r-- 1 root root 80 Feb 11 09:46 file.out.index
[root@hadoop3 hadoop-3.1.3]# netstat -tunlp |grep 45767
tcp 0 0 0.0.0.0:45767 0.0.0.0:* LISTEN 39121/java
[root@hadoop3 hadoop-3.1.3]# ps -ef |grep 39121
root 39121 39107 2 22:21 ? 00:00:26 /usr/java/jdk1.8.0_131/bin/java -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1611734493242_0014/container_1611734493242_0014_01_000001/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1611734493242_0014/container_1611734493242_0014_01_000001 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Xmx1024m org.apache.hadoop.mapreduce.v2.app.MRAppMaster
Reduce
/usr/java/jdk1.8.0_131/bin/java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1610952022777_0001/container_1610952022777_0001_01_000004/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1610952022777_0001/container_1610952022777_0001_01_000004 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Dyarn.app.mapreduce.shuffle.logger=INFO,shuffleCLA -Dyarn.app.mapreduce.shuffle.logfile=syslog.shuffle -Dyarn.app.mapreduce.shuffle.log.filesize=0 -Dyarn.app.mapreduce.shuffle.log.backups=0 org.apache.hadoop.mapred.YarnChild 172.17.0.4 41045 attempt_1610952022777_0001_r_000000_0 4
hadoop --loglevel INFO jar /soft/hadoop-3.1.3/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.3.jar wordcount -D mapreduce.task.timeout=100000000 -D mapreduce.reduce.java.opts="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m" /tmp/data.txt /tmp/o7
- 进程
├─java -Dproc_nodemanager -Djava.net.preferIPv4Stack=true -Dyarn.log.dir=/soft/hadoop-3.1.3/logs -Dyarn.log.file=hadoop-root-nodemanager-hadoop3.log -Dyarn.home.dir=/soft/hadoop-3.1.3 -Dyarn.root.logger=INFO,console -Djava.library.path=/soft/hadoop-3.1.3/lib/native -Dhadoop.log.dir=/soft/hadoop-3.1.3/logs -Dhadoop.log.file=hadoop-root-nodemanager-hadoop3.log -Dhadoop.home.dir=/soft/hadoop-3.1.3 -Dhadoop.id.str=root -Dhadoop.root.logger=INFO,RFA -Dhadoop.policy.file=hadoop-policy.xml -Dhadoop.security.logger=INFO,NullAppender org.apache.hadoop.yarn.server.nodemanager.NodeManager
│ ├─bash /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000001/default_container_executor.sh
│ │ └─bash -c /usr/java/jdk1.8.0_131/bin/java -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000001/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000001 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Xmx1024m org.apache.hadoop.mapreduce.v2.app.MRAppMaster 1>/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000001/stdout 2>/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000001/stderr
│ │ └─java -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000001/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000001 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Xmx1024m org.apache.hadoop.mapreduce.v2.app.MRAppMaster
│ │ └─85*[{java}]
│ ├─bash /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/default_container_executor.sh
│ │ └─bash -c /usr/java/jdk1.8.0_131/bin/java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Dyarn.app.mapreduce.shuffle.logger=INFO,shuffleCLA -Dyarn.app.mapreduce.shuffle.logfile=syslog.shuffle -Dyarn.app.mapreduce.shuffle.log.filesize=0 -Dyarn.app.mapreduce.shuffle.log.backups=0 org.apache.hadoop.mapred.YarnChild 172.17.0.3 41227 attempt_1613991969743_0002_r_000000_0 3 1>/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/stdout 2>/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/stderr
│ │ └─java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Dyarn.app.mapreduce.shuffle.logger=INFO,shuffleCLA -Dyarn.app.mapreduce.shuffle.logfile=syslog.shuffle -Dyarn.app.mapreduce.shuffle.log.filesize=0 -Dyarn.app.mapreduce.shuffle.log.backups=0 org.apache.hadoop.mapred.YarnChild 172.17.0.3 41227 attempt_1613991969743_0002_r_000000_0 3
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/default_container_executor.sh
#!/bin/bash
/bin/bash "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/default_container_executor_session.sh"
rc=$?
echo $rc > "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1613991969743_0002/container_1613991969743_0002_01_000003/container_1613991969743_0002_01_000003.pid.exitcode.tmp"
/bin/mv -f "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1613991969743_0002/container_1613991969743_0002_01_000003/container_1613991969743_0002_01_000003.pid.exitcode.tmp" "/tmp/hadoop-root/nm-local-dir/nmPrivate/application_1613991969743_0002/container_1613991969743_0002_01_000003/container_1613991969743_0002_01_000003.pid.exitcode"
exit $rc
[root@hadoop3 hadoop-3.1.3]#
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/default_container_executor_session.sh
#!/bin/bash
echo $$ > /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1613991969743_0002/container_1613991969743_0002_01_000003/container_1613991969743_0002_01_000003.pid.tmp
/bin/mv -f /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1613991969743_0002/container_1613991969743_0002_01_000003/container_1613991969743_0002_01_000003.pid.tmp /tmp/hadoop-root/nm-local-dir/nmPrivate/application_1613991969743_0002/container_1613991969743_0002_01_000003/container_1613991969743_0002_01_000003.pid
exec setsid /bin/bash "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/launch_container.sh"[root@hadoop3 hadoop-3.1.3]#
[root@hadoop3 hadoop-3.1.3]#
[root@hadoop3 hadoop-3.1.3]# cat /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/launch_container.sh
#!/bin/bash
set -o pipefail -e
export PRELAUNCH_OUT="/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/prelaunch.out"
exec >"${PRELAUNCH_OUT}"
export PRELAUNCH_ERR="/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/prelaunch.err"
exec 2>"${PRELAUNCH_ERR}"
echo "Setting up env variables"
export JAVA_HOME=${JAVA_HOME:-"/usr/java/jdk1.8.0_131"}
export HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/soft/conf/hadoop"}
export HADOOP_YARN_HOME=${HADOOP_YARN_HOME:-"/soft/hadoop-3.1.3"}
export HADOOP_HOME=${HADOOP_HOME:-"/soft/hadoop-3.1.3"}
export PATH=${PATH:-"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin"}
export HADOOP_TOKEN_FILE_LOCATION="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003/container_tokens"
export CONTAINER_ID="container_1613991969743_0002_01_000003"
export NM_PORT="39053"
export NM_HOST="hadoop3"
export NM_HTTP_PORT="8042"
export LOCAL_DIRS="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002"
export LOCAL_USER_DIRS="/tmp/hadoop-root/nm-local-dir/usercache/root/"
export LOG_DIRS="/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003"
export USER="root"
export LOGNAME="root"
export HOME="/home/"
export PWD="/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/container_1613991969743_0002_01_000003"
export JVM_PID="$$"
export MALLOC_ARENA_MAX="4"
export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
export STDOUT_LOGFILE_ENV="/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/stdout"
export SHELL="/bin/bash"
export HADOOP_ROOT_LOGGER="INFO,console"
export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
export CLASSPATH="$PWD:$HADOOP_CONF_DIR:$HADOOP_COMMON_HOME/share/hadoop/common/*:$HADOOP_COMMON_HOME/share/hadoop/common/lib/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/*:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*:$HADOOP_YARN_HOME/share/hadoop/yarn/*:$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*:job.jar/*:job.jar/classes/:job.jar/lib/*:$PWD/*"
export LD_LIBRARY_PATH="$PWD:$HADOOP_COMMON_HOME/lib/native"
export STDERR_LOGFILE_ENV="/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/stderr"
export HADOOP_CLIENT_OPTS=""
echo "Setting up job resources"
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/13/job.xml" "job.xml"
ln -sf -- "/tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/11/job.jar" "job.jar"
echo "Copying debugging information"
# Creating copy of launch script
cp "launch_container.sh" "/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/launch_container.sh"
chmod 640 "/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/launch_container.sh"
# Determining directory contents
echo "ls -l:" 1>"/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/directory.info"
ls -l 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/directory.info"
echo "find -L . -maxdepth 5 -ls:" 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/directory.info"
find -L . -maxdepth 5 -ls 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/directory.info"
echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/directory.info"
find -L . -maxdepth 5 -type l -ls 1>>"/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/directory.info"
echo "Launching container"
exec /bin/bash -c "$JAVA_HOME/bin/java -Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000 -Xmx1024m -Djava.io.tmpdir=$PWD/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Dyarn.app.mapreduce.shuffle.logger=INFO,shuffleCLA -Dyarn.app.mapreduce.shuffle.logfile=syslog.shuffle -Dyarn.app.mapreduce.shuffle.log.filesize=0 -Dyarn.app.mapreduce.shuffle.log.backups=0 org.apache.hadoop.mapred.YarnChild 172.17.0.3 41227 attempt_1613991969743_0002_r_000000_0 3 1>/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/stdout 2>/soft/hadoop-3.1.3/logs/userlogs/application_1613991969743_0002/container_1613991969743_0002_01_000003/stderr "
[root@hadoop3 hadoop-3.1.3]# tree -a /tmp/hadoop-root/nm-local-dir/
/tmp/hadoop-root/nm-local-dir/
├── filecache
├── nmPrivate
│ └── application_1613991969743_0002
│ ├── container_1613991969743_0002_01_000001
│ │ ├── container_1613991969743_0002_01_000001.pid
│ │ ├── container_1613991969743_0002_01_000001.tokens
│ │ ├── .container_1613991969743_0002_01_000001.tokens.crc
│ │ ├── launch_container.sh
│ │ └── .launch_container.sh.crc
│ └── container_1613991969743_0002_01_000003
│ ├── container_1613991969743_0002_01_000003.pid
│ ├── container_1613991969743_0002_01_000003.tokens
│ ├── .container_1613991969743_0002_01_000003.tokens.crc
│ ├── launch_container.sh
│ └── .launch_container.sh.crc
└── usercache
└── root
├── appcache
│ └── application_1613991969743_0002
│ ├── container_1613991969743_0002_01_000001
│ │ ├── container_tokens
│ │ ├── .container_tokens.crc
│ │ ├── default_container_executor_session.sh
│ │ ├── .default_container_executor_session.sh.crc
│ │ ├── default_container_executor.sh
│ │ ├── .default_container_executor.sh.crc
│ │ ├── job.jar -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/11/job.jar
│ │ ├── jobSubmitDir
│ │ │ ├── job.split -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/12/job.split
│ │ │ └── job.splitmetainfo -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/10/job.splitmetainfo
│ │ ├── job.xml -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/13/job.xml
│ │ ├── launch_container.sh
│ │ ├── .launch_container.sh.crc
│ │ └── tmp
│ │ └── jetty-0.0.0.0-33049-mapreduce-_-any-663241850766727606.dir
│ │ └── webapp
│ │ └── .keep
│ ├── container_1613991969743_0002_01_000003
│ │ ├── container_tokens
│ │ ├── .container_tokens.crc
│ │ ├── default_container_executor_session.sh
│ │ ├── .default_container_executor_session.sh.crc
│ │ ├── default_container_executor.sh
│ │ ├── .default_container_executor.sh.crc
│ │ ├── job.jar -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/11/job.jar
│ │ ├── job.xml -> /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/filecache/13/job.xml
│ │ ├── launch_container.sh
│ │ ├── .launch_container.sh.crc
│ │ └── tmp
│ ├── filecache
│ │ ├── 10
│ │ │ ├── job.splitmetainfo
│ │ │ └── .job.splitmetainfo.crc
│ │ ├── 11
│ │ │ └── job.jar
│ │ │ └── job.jar
│ │ ├── 12
│ │ │ ├── job.split
│ │ │ └── .job.split.crc
│ │ └── 13
│ │ ├── job.xml
│ │ └── .job.xml.crc
│ ├── output
│ │ └── attempt_1613991969743_0002_m_000000_0
│ │ ├── file.out
│ │ └── file.out.index
│ └── work
└── filecache
[root@hadoop3 hadoop-3.1.3]# ll /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/output/attempt_1613991969743_0002_m_000000_0/
total 8
-rw-r--r-- 1 root root 1836 2月 22 20:40 file.out
-rw-r--r-- 1 root root 32 2月 22 20:40 file.out.index
[root@hadoop3 hadoop-3.1.3]# ll /tmp/hadoop-root/nm-local-dir/usercache/root/appcache/application_1613991969743_0002/output/attempt_1613991969743_0002_r_000000_0/
total 4
-rw-r--r-- 1 root root 1836 2月 22 21:39 map_0.out.merged
[root@hadoop3 hadoop-3.1.3]# hdfs dfs -ls /tmp/o1/_temporary/1/_temporary/attempt_1613991969743_0002_r_000000_0/part-r-00000
-rw-r--r-- 1 root supergroup 0 2021-02-22 21:44 /tmp/o1/_temporary/1/_temporary/attempt_1613991969743_0002_r_000000_0/part-r-00000
[root@hadoop3 hadoop-3.1.3]# hdfs dfs -ls /tmp/o1
Found 2 items
-rw-r--r-- 1 root supergroup 0 2021-02-22 21:55 /tmp/o1/_SUCCESS
-rw-r--r-- 1 root supergroup 1306 2021-02-22 21:52 /tmp/o1/part-r-00000