hbase错误日志如下
core file size (blocks, -c) 0
data seg size (kbytes, -d) unlimited
scheduling priority (-e) 0
file size (blocks, -f) unlimited
pending signals (-i) 47749
max locked memory (kbytes, -l) 64
max memory size (kbytes, -m) unlimited
open files (-n) 65536
pipe size (512 bytes, -p) 8
POSIX message queues (bytes, -q) 819200
real-time priority (-r) 0
stack size (kbytes, -s) 8192
cpu time (seconds, -t) unlimited
max user processes (-u) 131072
virtual memory (kbytes, -v) unlimited
file locks (-x) unlimited
2023-02-06 20:14:50,430 INFO [main] master.HMaster: STARTING service HMaster
2023-02-06 20:14:50,431 INFO [main] util.VersionInfo: HBase 2.4.16
2023-02-06 20:14:50,431 INFO [main] util.VersionInfo: Source code repository git://17342ca4031d/home/zhangduo/hbase-rm/output/hbase revision=d1714710877653691e2125bd94b68a5b484a3a06
2023-02-06 20:14:50,431 INFO [main] util.VersionInfo: Compiled by zhangduo on Wed Feb 1 09:46:35 UTC 2023
2023-02-06 20:14:50,431 INFO [main] util.VersionInfo: From source with checksum 1ca7bcc2d1de1933beaeb5a1c380582712f11ed1bb1863308703335f7e230127010b1836d4b73df8f5a3baf6bbe4b33dbf7fcec2b28512d7acf5055d00d0c06b
2023-02-06 20:14:50,554 INFO [main] util.ServerCommandLine: hbase.tmp.dir: /opt/module/hbase/tmp
2023-02-06 20:14:50,555 INFO [main] util.ServerCommandLine: hbase.rootdir: hdfs://mycluster/hbase
2023-02-06 20:14:50,555 INFO [main] util.ServerCommandLine: hbase.cluster.distributed: true
2023-02-06 20:14:50,555 INFO [main] util.ServerCommandLine: hbase.zookeeper.quorum: hadoop102,hadoop103,hadoop104
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:PATH=/opt/module/miniconda3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/opt/module/java//bin:/opt/module/hadoop-3.1.3/bin:/opt/module/hadoop-3.1.3/sbin:/opt/module/kafka/bin:/opt/module/hive/bin:/opt/module/spark/bin:/opt/module/hbase/bin:/opt/module/zookeeper-3.5.7//bin:/opt/module/hadoop-3.1.3/bin:/opt/module/hadoop-3.1.3/sbin:/root/bin:/opt/module/java//bin:/opt/module/hadoop/bin:/opt/module/hadoop/sbin:/opt/module/kafka/bin:/opt/module/hive/bin:/opt/module/spark/bin:/opt/module/hbase/bin:/opt/module/zookeeper-3.5.7//bin:/opt/module/java//bin:/opt/module/hadoop/bin:/opt/module/hadoop/sbin:/opt/module/kafka/bin:/opt/module/hive/bin:/opt/module/spark/bin:/opt/module/hbase/bin:/opt/module/zookeeper-3.5.7//bin:/opt/module/java//bin:/opt/module/hadoop-3.1.3/bin:/opt/module/hadoop-3.1.3/sbin:/opt/module/kafka/bin:/opt/module/hive/bin:/opt/module/spark/bin:/opt/module/hbase/bin:/opt/module/zookeeper-3.5.7//bin
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:HADOOP_CONF_DIR=/opt/module/hadoop-3.1.3/etc/hadoop
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:HISTCONTROL=ignoredups
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:MAIL=/var/spool/mail/root
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:LD_LIBRARY_PATH=:/opt/module/hadoop-3.1.3/lib/native
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:LOGNAME=root
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:JVM_PID=119379
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:HBASE_REST_OPTS=
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:PWD=/opt/module/hbase
2023-02-06 20:14:50,559 INFO [main] util.ServerCommandLine: env:HBASE_ROOT_LOGGER=INFO,RFA
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:LESSOPEN=||/usr/bin/lesspipe.sh %s
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:SHELL=/bin/bash
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HBASE_ENV_INIT=true
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HIVE_HOME=/opt/module/hive
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HBASE_MANAGES_ZK=false
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:SED=sed
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HADOOP_HOME=/opt/module/hadoop-3.1.3
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HBASE_NICENESS=0
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HBASE_OPTS=-XX:+UseConcMarkSweepGC -Djava.security.auth.login.config=/opt/module/hbase/conf/zk-jaas.conf -Djava.util.logging.config.class=org.apache.hadoop.hbase.logging.JulToSlf4jInitializer -Dhbase.log.dir=/opt/module/hbase/logs -Dhbase.log.file=hbase-root-master-hadoop102.log -Dhbase.home.dir=/opt/module/hbase -Dhbase.id.str=root -Dhbase.root.logger=INFO,RFA -Djava.library.path=/opt/module/hadoop-3.1.3/lib/native -Dhbase.security.logger=INFO,RFAS
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HBASE_SECURITY_LOGGER=INFO,RFAS
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: 1:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=01;36:*.au=01;36:*.flac=01;36:*.mid=01;36:*.midi=01;36:*.mka=01;36:*.mp3=01;36:*.mpc=01;36:*.ogg=01;36:*.ra=01;36:*.wav=01;36:*.axa=01;36:*.oga=01;36:*.spx=01;36:*.xspf=01;36:
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:SHLVL=4
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:ZOOKEEPER_HOME=/opt/module/zookeeper-3.5.7/
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HBASE_LOGFILE=hbase-root-master-hadoop102.log
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:HISTSIZE=1000
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:JAVA_HOME=/opt/module/java/
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:KAFKA_HOME=/opt/module/kafka
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:TERM=xterm
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:LANG=zh_CN.UTF-8
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: env:XDG_SESSION_ID=130
2023-02-06 20:14:50,560 INFO [main] util.ServerCommandLine: 3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-server-router-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-server-tests-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-server-timeline-pluginstorage-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-server-web-proxy-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-services-api-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/hadoop-yarn-services-core-3.1.3.jar:/opt/module/hadoop-3.1.3/share/hadoop/yarn/lib:/opt/module/hadoop-3.1.3/share/hadoop/yarn/sources:/opt/module/hadoop-3.1.3/share/hadoop/yarn/test:/opt/module/hadoop-3.1.3/share/hadoop/yarn/timelineservice:/opt/module/hadoop-3.1.3/share/hadoop/yarn/webapps:/opt/module/hadoop-3.1.3/share/hadoop/yarn/yarn-service-examples
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:SPARK_HOME=/opt/module/spark
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HBASE_IDENT_STRING=root
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HBASE_ZNODE_FILE=/tmp/hbase-root-master.znode
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:SSH_TTY=/dev/pts/3
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:SSH_CLIENT=192.168.20.1 60708 22
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HBASE_LOG_PREFIX=hbase-root-master-hadoop102
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HBASE_LOG_DIR=/opt/module/hbase/logs
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:USER=root
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: persistence.asm-2.7.4.jar:/opt/module/hbase/lib/jdk11/org.eclipse.persistence.core-2.7.4.jar:/opt/module/hbase/lib/jdk11/org.eclipse.persistence.moxy-2.7.4.jar:/opt/module/hbase/lib/jdk11/org.eclipse.persistence.sdo-2.7.4.jar:/opt/module/hbase/lib/jdk11/pfl-asm-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-basic-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-basic-tools-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-dynamic-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-tf-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-tf-tools-4.0.1.jar:/opt/module/hbase/lib/jdk11/policy-2.7.6.jar:/opt/module/hbase/lib/jdk11/release-documentation-2.3.2-docbook.zip:/opt/module/hbase/lib/jdk11/saaj-impl-1.5.1.jar:/opt/module/hbase/lib/jdk11/samples-2.3.2.zip:/opt/module/hbase/lib/jdk11/sdo-eclipselink-plugin-2.3.2.jar:/opt/module/hbase/lib/jdk11/stax-ex-1.8.1.jar:/opt/module/hbase/lib/jdk11/streambuffer-1.5.7.jar:/opt/module/hbase/lib/jdk11/txw2-2.3.2.jar:/opt/module/hbase/lib/client-facing-thirdparty/slf4j-reload4j-1.7.33.jar
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:SSH_CONNECTION=192.168.20.1 60708 192.168.20.62 22
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HBASE_AUTOSTART_FILE=/tmp/hbase-root-master.autostart
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HOSTNAME=hadoop102
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:GREP=grep
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:XDG_RUNTIME_DIR=/run/user/0
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HBASE_THRIFT_OPTS=
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HBASE_HOME=/opt/module/hbase
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:HOME=/root
2023-02-06 20:14:50,561 INFO [main] util.ServerCommandLine: env:MALLOC_ARENA_MAX=4
2023-02-06 20:14:50,581 INFO [main] util.ServerCommandLine: vmName=Java HotSpot(TM) 64-Bit Server VM, vmVendor=Oracle Corporation, vmVersion=11.0.18+9-LTS-195
2023-02-06 20:14:50,581 INFO [main] util.ServerCommandLine: vmInputArguments=[-Dproc_master, -XX:OnOutOfMemoryError=kill -9 %p, -XX:+UseConcMarkSweepGC, -Djava.security.auth.login.config=/opt/module/hbase/conf/zk-jaas.conf, -Djava.util.logging.config.class=org.apache.hadoop.hbase.logging.JulToSlf4jInitializer, -Dhbase.log.dir=/opt/module/hbase/logs, -Dhbase.log.file=hbase-root-master-hadoop102.log, -Dhbase.home.dir=/opt/module/hbase, -Dhbase.id.str=root, -Dhbase.root.logger=INFO,RFA, -Djava.library.path=/opt/module/hadoop-3.1.3/lib/native, -Dhbase.security.logger=INFO,RFAS]
2023-02-06 20:14:50,963 INFO [main] metrics.MetricRegistries: Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl
2023-02-06 20:14:51,671 INFO [main] regionserver.RSRpcServices: master/hadoop102:16000 server-side Connection retries=45
2023-02-06 20:14:51,690 INFO [main] ipc.RpcExecutor: Instantiated default.FPBQ.Fifo with queueClass=class java.util.concurrent.LinkedBlockingQueue; numCallQueues=3, maxQueueLength=300, handlerCount=30
2023-02-06 20:14:51,694 INFO [main] ipc.RpcExecutor: Instantiated priority.RWQ.Fifo with queueClass=class java.util.concurrent.LinkedBlockingQueue; numCallQueues=2, maxQueueLength=300, handlerCount=20
2023-02-06 20:14:51,694 INFO [main] ipc.RWQueueRpcExecutor: priority.RWQ.Fifo writeQueues=1 writeHandlers=2 readQueues=1 readHandlers=18 scanQueues=0 scanHandlers=0
2023-02-06 20:14:51,694 INFO [main] ipc.RpcExecutor: Instantiated replication.FPBQ.Fifo with queueClass=class java.util.concurrent.LinkedBlockingQueue; numCallQueues=1, maxQueueLength=300, handlerCount=3
2023-02-06 20:14:51,694 INFO [main] ipc.RpcExecutor: Instantiated metaPriority.FPBQ.Fifo with queueClass=class java.util.concurrent.LinkedBlockingQueue; numCallQueues=1, maxQueueLength=300, handlerCount=1
2023-02-06 20:14:51,898 INFO [main] ipc.RpcServerFactory: Creating org.apache.hadoop.hbase.ipc.NettyRpcServer hosting hbase.pb.MasterService, hbase.pb.RegionServerStatusService, hbase.pb.LockService, hbase.pb.HbckService, hbase.pb.ClientMetaService, hbase.pb.ClientService, hbase.pb.AdminService
2023-02-06 20:14:51,926 INFO [main] Configuration.deprecation: hbase.ipc.server.reservoir.initial.buffer.size is deprecated. Instead, use hbase.server.allocator.buffer.size
2023-02-06 20:14:51,926 INFO [main] Configuration.deprecation: hbase.ipc.server.reservoir.initial.max is deprecated. Instead, use hbase.server.allocator.max.buffer.count
2023-02-06 20:14:51,990 INFO [main] ipc.NettyRpcServer: Bind to /192.168.20.62:16000
2023-02-06 20:14:52,125 INFO [main] security.UserGroupInformation: Login successful for user hadoop/hadoop102@ZHT.COM using keytab file /etc/security/keytab/hadoop.keytab
2023-02-06 20:14:53,142 INFO [main] fs.HFileSystem: Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks
2023-02-06 20:14:53,147 INFO [main] fs.HFileSystem: Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks
2023-02-06 20:14:53,165 INFO [main] zookeeper.RecoverableZooKeeper: Process identifier=master:16000 connecting to ZooKeeper ensemble=hadoop102:2181,hadoop103:2181,hadoop104:2181
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:zookeeper.version=3.5.7-f0fdd52973d373ffd9c86b81d99842dc2c7f660e, built on 02/10/2020 11:30 GMT
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:host.name=hadoop102
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:java.version=11.0.18
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:java.vendor=Oracle Corporation
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:java.home=/opt/module/java
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: persistence.asm-2.7.4.jar:/opt/module/hbase/lib/jdk11/org.eclipse.persistence.core-2.7.4.jar:/opt/module/hbase/lib/jdk11/org.eclipse.persistence.moxy-2.7.4.jar:/opt/module/hbase/lib/jdk11/org.eclipse.persistence.sdo-2.7.4.jar:/opt/module/hbase/lib/jdk11/pfl-asm-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-basic-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-basic-tools-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-dynamic-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-tf-4.0.1.jar:/opt/module/hbase/lib/jdk11/pfl-tf-tools-4.0.1.jar:/opt/module/hbase/lib/jdk11/policy-2.7.6.jar:/opt/module/hbase/lib/jdk11/release-documentation-2.3.2-docbook.zip:/opt/module/hbase/lib/jdk11/saaj-impl-1.5.1.jar:/opt/module/hbase/lib/jdk11/samples-2.3.2.zip:/opt/module/hbase/lib/jdk11/sdo-eclipselink-plugin-2.3.2.jar:/opt/module/hbase/lib/jdk11/stax-ex-1.8.1.jar:/opt/module/hbase/lib/jdk11/streambuffer-1.5.7.jar:/opt/module/hbase/lib/jdk11/txw2-2.3.2.jar:/opt/module/hbase/lib/client-facing-thirdparty/slf4j-reload4j-1.7.33.jar
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:java.library.path=/opt/module/hadoop-3.1.3/lib/native
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:java.io.tmpdir=/tmp
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:java.compiler=<NA>
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:os.name=Linux
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:os.arch=amd64
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:os.version=3.10.0-1127.el7.x86_64
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:user.name=root
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:user.home=/root
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:user.dir=/opt/module/hbase
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:os.memory.free=126MB
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:os.memory.max=2925MB
2023-02-06 20:14:53,170 INFO [main] zookeeper.ZooKeeper: Client environment:os.memory.total=181MB
2023-02-06 20:14:53,171 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop102:2181,hadoop103:2181,hadoop104:2181 sessionTimeout=90000 watcher=org.apache.hadoop.hbase.zookeeper.PendingWatcher@4b425577
2023-02-06 20:14:53,174 INFO [main] common.X509Util: Setting -D jdk.tls.rejectClientInitiatedRenegotiation=true to disable client-initiated TLS renegotiation
2023-02-06 20:14:53,177 INFO [main] zookeeper.ClientCnxnSocket: jute.maxbuffer value is 4194304 Bytes
2023-02-06 20:14:53,182 INFO [main] zookeeper.ClientCnxn: zookeeper.request.timeout value is 0. feature enabled=
2023-02-06 20:14:53,198 INFO [main-SendThread(hadoop103:2181)] zookeeper.Login: Client successfully logged in.
2023-02-06 20:14:53,199 INFO [Thread-5] zookeeper.Login: TGT refresh thread started.
2023-02-06 20:14:53,204 INFO [main-SendThread(hadoop103:2181)] client.ZooKeeperSaslClient: Client will use GSSAPI as SASL mechanism.
2023-02-06 20:14:53,209 INFO [Thread-5] zookeeper.Login: TGT valid starting at: Mon Feb 06 20:14:53 CST 2023
2023-02-06 20:14:53,209 INFO [Thread-5] zookeeper.Login: TGT expires: Tue Feb 07 20:14:53 CST 2023
2023-02-06 20:14:53,210 INFO [Thread-5] zookeeper.Login: TGT refresh sleeping until: Tue Feb 07 15:56:21 CST 2023
2023-02-06 20:14:53,216 INFO [main-SendThread(hadoop103:2181)] zookeeper.ClientCnxn: Opening socket connection to server hadoop103/192.168.20.63:2181. Will attempt to SASL-authenticate using Login Context section 'Client'
2023-02-06 20:14:53,233 INFO [main-SendThread(hadoop103:2181)] zookeeper.ClientCnxn: Socket connection established, initiating session, client: /192.168.20.62:49078, server: hadoop103/192.168.20.63:2181
2023-02-06 20:14:53,359 INFO [main-SendThread(hadoop103:2181)] zookeeper.ClientCnxn: Session establishment complete on server hadoop103/192.168.20.63:2181, sessionid = 0x300021049630004, negotiated timeout = 40000
2023-02-06 20:14:55,720 INFO [main] util.log: Logging initialized @5716ms to org.apache.hbase.thirdparty.org.eclipse.jetty.util.log.Slf4jLog
2023-02-06 20:14:55,869 INFO [main] http.HttpServer: Added global filter 'safety' (class=org.apache.hadoop.hbase.http.HttpServer$QuotingInputFilter)
2023-02-06 20:14:55,870 INFO [main] http.HttpServer: Added global filter 'clickjackingprevention' (class=org.apache.hadoop.hbase.http.ClickjackingPreventionFilter)
2023-02-06 20:14:55,870 INFO [main] http.HttpServer: Added global filter 'securityheaders' (class=org.apache.hadoop.hbase.http.SecurityHeadersFilter)
2023-02-06 20:14:55,872 INFO [main] http.HttpServer: Added filter static_user_filter (class=org.apache.hadoop.hbase.http.lib.StaticUserWebFilter$StaticUserFilter) to context master
2023-02-06 20:14:55,872 INFO [main] http.HttpServer: Added filter static_user_filter (class=org.apache.hadoop.hbase.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
2023-02-06 20:14:55,872 INFO [main] http.HttpServer: Added filter static_user_filter (class=org.apache.hadoop.hbase.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs
2023-02-06 20:14:55,890 INFO [main] http.HttpServer: ASYNC_PROFILER_HOME environment variable and async.profiler.home system property not specified. Disabling /prof endpoint.
2023-02-06 20:14:55,943 INFO [main] http.HttpServer: Jetty bound to port 16010
2023-02-06 20:14:55,944 INFO [main] server.Server: jetty-9.4.50.v20221201; built: 2022-12-01T22:07:03.915Z; git: da9a0b30691a45daf90a9f17b5defa2f1434f882; jvm 11.0.18+9-LTS-195
2023-02-06 20:14:55,968 INFO [main] http.SecurityHeadersFilter: Added security headers filter
2023-02-06 20:14:55,971 INFO [main] handler.ContextHandler: Started o.a.h.t.o.e.j.s.ServletContextHandler@7f27f59b{logs,/logs,file:///opt/module/hbase/logs/,AVAILABLE}
2023-02-06 20:14:55,971 INFO [main] http.SecurityHeadersFilter: Added security headers filter
2023-02-06 20:14:55,972 INFO [main] handler.ContextHandler: Started o.a.h.t.o.e.j.s.ServletContextHandler@7971c2a9{static,/static,file:///opt/module/hbase/hbase-webapps/static/,AVAILABLE}
2023-02-06 20:14:56,073 INFO [main] webapp.StandardDescriptorProcessor: NO JSP Support for /, did not find org.apache.hbase.thirdparty.org.eclipse.jetty.jsp.JettyJspServlet
2023-02-06 20:14:56,085 INFO [main] server.session: DefaultSessionIdManager workerName=node0
2023-02-06 20:14:56,085 INFO [main] server.session: No SessionScavenger set, using defaults
2023-02-06 20:14:56,087 INFO [main] server.session: node0 Scavenging every 660000ms
2023-02-06 20:14:56,096 INFO [main] http.SecurityHeadersFilter: Added security headers filter
2023-02-06 20:14:56,119 INFO [main] handler.ContextHandler: Started o.a.h.t.o.e.j.w.WebAppContext@77aea{master,/,file:///opt/module/hbase/hbase-webapps/master/,AVAILABLE}{file:/opt/module/hbase/hbase-webapps/master}
2023-02-06 20:14:56,130 INFO [main] server.AbstractConnector: Started ServerConnector@50ec4bfc{HTTP/1.1, (http/1.1)}{0.0.0.0:16010}
2023-02-06 20:14:56,131 INFO [main] server.Server: Started @6126ms
2023-02-06 20:14:56,135 INFO [main] master.HMaster: hbase.rootdir=hdfs://mycluster/hbase, hbase.cluster.distributed=true
2023-02-06 20:14:56,173 INFO [master/hadoop102:16000:becomeActiveMaster] master.HMaster: Adding backup master ZNode /hbase/backup-masters/hadoop102,16000,1675685690632
2023-02-06 20:14:56,260 INFO [master/hadoop102:16000:becomeActiveMaster] master.ActiveMasterManager: Deleting ZNode for /hbase/backup-masters/hadoop102,16000,1675685690632 from backup master directory
2023-02-06 20:14:56,292 INFO [master/hadoop102:16000:becomeActiveMaster] master.ActiveMasterManager: Registered as active master=hadoop102,16000,1675685690632
2023-02-06 20:14:56,296 INFO [master/hadoop102:16000:becomeActiveMaster] regionserver.ChunkCreator: Allocating data MemStoreChunkPool with chunk size 2 MB, max count 526, initial count 0
2023-02-06 20:14:56,298 INFO [master/hadoop102:16000:becomeActiveMaster] regionserver.ChunkCreator: Allocating index MemStoreChunkPool with chunk size 204.80 KB, max count 585, initial count 0
2023-02-06 20:14:57,532 INFO [Thread-19] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: DI
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:14:57,533 WARN [Thread-19] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741849_1025
2023-02-06 20:14:57,589 WARN [Thread-19] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.62:50010,DS-50e7b537-215f-4bea-878e-f8e4845e5720,DISK]
2023-02-06 20:14:57,839 INFO [Thread-19] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: D
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:14:57,842 WARN [Thread-19] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741850_1026
2023-02-06 20:14:57,904 WARN [Thread-19] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.64:50010,DS-7c3811a1-2608-4a72-bbfd-cbbe0631bbe0,DISK]
2023-02-06 20:14:58,106 INFO [Thread-19] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop:
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:14:58,108 WARN [Thread-19] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741851_1027
2023-02-06 20:14:58,187 WARN [Thread-19] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.63:50010,DS-8ef35cdd-6f2f-43f5-b979-f4813215e773,DISK]
2023-02-06 20:14:58,232 WARN [Thread-19] hdfs.DataStreamer: DataStreamer Exception
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /hbase/.tmp/hbase.version could only be written to 0 of the 1 minReplication nodes. There are 3 datanode(s) running and 3 node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2276)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2820)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1540)
at org.apache.hadoop.ipc.Client.call(Client.java:1486)
at org.apache.hadoop.ipc.Client.call(Client.java:1385)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy20.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy21.addBlock(Unknown Source)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:08,593 INFO [Thread-22] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: DI
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:08,594 WARN [Thread-22] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741852_1028
2023-02-06 20:15:08,680 WARN [Thread-22] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.62:50010,DS-50e7b537-215f-4bea-878e-f8e4845e5720,DISK]
2023-02-06 20:15:08,908 INFO [Thread-22] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: D
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:08,910 WARN [Thread-22] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741853_1029
2023-02-06 20:15:08,945 WARN [Thread-22] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.63:50010,DS-8ef35cdd-6f2f-43f5-b979-f4813215e773,DISK]
2023-02-06 20:15:09,041 INFO [Thread-22] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop:
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:09,084 WARN [Thread-22] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741854_1030
2023-02-06 20:15:09,105 WARN [Thread-22] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.64:50010,DS-7c3811a1-2608-4a72-bbfd-cbbe0631bbe0,DISK]
2023-02-06 20:15:09,131 WARN [Thread-22] hdfs.DataStreamer: DataStreamer Exception
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /hbase/.tmp/hbase.version could only be written to 0 of the 1 minReplication nodes. There are 3 datanode(s) running and 3 node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2276)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2820)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1540)
at org.apache.hadoop.ipc.Client.call(Client.java:1486)
at org.apache.hadoop.ipc.Client.call(Client.java:1385)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy20.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy21.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:19,200 INFO [Thread-24] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: DI
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:19,200 WARN [Thread-24] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741855_1031
2023-02-06 20:15:19,205 WARN [Thread-24] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.62:50010,DS-50e7b537-215f-4bea-878e-f8e4845e5720,DISK]
2023-02-06 20:15:19,213 INFO [Thread-24] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: D
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:19,214 WARN [Thread-24] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741856_1032
2023-02-06 20:15:19,217 WARN [Thread-24] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.63:50010,DS-8ef35cdd-6f2f-43f5-b979-f4813215e773,DISK]
2023-02-06 20:15:19,225 INFO [Thread-24] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop:
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:19,226 WARN [Thread-24] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741857_1033
2023-02-06 20:15:19,230 WARN [Thread-24] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.64:50010,DS-7c3811a1-2608-4a72-bbfd-cbbe0631bbe0,DISK]
2023-02-06 20:15:19,232 WARN [Thread-24] hdfs.DataStreamer: DataStreamer Exception
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /hbase/.tmp/hbase.version could only be written to 0 of the 1 minReplication nodes. There are 3 datanode(s) running and 3 node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2276)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2820)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1540)
at org.apache.hadoop.ipc.Client.call(Client.java:1486)
at org.apache.hadoop.ipc.Client.call(Client.java:1385)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy20.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy21.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:29,258 INFO [Thread-26] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: DI
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:29,258 WARN [Thread-26] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741858_1034
2023-02-06 20:15:29,263 WARN [Thread-26] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.62:50010,DS-50e7b537-215f-4bea-878e-f8e4845e5720,DISK]
2023-02-06 20:15:29,270 INFO [Thread-26] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop: D
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:29,271 WARN [Thread-26] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741859_1035
2023-02-06 20:15:29,274 WARN [Thread-26] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.64:50010,DS-7c3811a1-2608-4a72-bbfd-cbbe0631bbe0,DISK]
2023-02-06 20:15:29,281 INFO [Thread-26] hdfs.DataStreamer: Exception in createBlockOutputStream
java.io.IOException: Invalid token in javax.security.sasl.qop:
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil.readSaslMessage(DataTransferSaslUtil.java:220)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.doSaslHandshake(SaslDataTransferClient.java:553)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.getSaslStreams(SaslDataTransferClient.java:455)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.send(SaslDataTransferClient.java:298)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.checkTrustAndSend(SaslDataTransferClient.java:245)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:203)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient.socketSend(SaslDataTransferClient.java:193)
at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1705)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1655)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:29,282 WARN [Thread-26] hdfs.DataStreamer: Abandoning BP-112198028-192.168.20.62-1675684351625:blk_1073741860_1036
2023-02-06 20:15:29,285 WARN [Thread-26] hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[192.168.20.63:50010,DS-8ef35cdd-6f2f-43f5-b979-f4813215e773,DISK]
2023-02-06 20:15:29,288 WARN [Thread-26] hdfs.DataStreamer: DataStreamer Exception
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /hbase/.tmp/hbase.version could only be written to 0 of the 1 minReplication nodes. There are 3 datanode(s) running and 3 node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2276)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2820)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1540)
at org.apache.hadoop.ipc.Client.call(Client.java:1486)
at org.apache.hadoop.ipc.Client.call(Client.java:1385)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy20.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy21.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:29,290 ERROR [master/hadoop102:16000:becomeActiveMaster] master.HMaster: Failed to become active master
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /hbase/.tmp/hbase.version could only be written to 0 of the 1 minReplication nodes. There are 3 datanode(s) running and 3 node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2276)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2820)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1540)
at org.apache.hadoop.ipc.Client.call(Client.java:1486)
at org.apache.hadoop.ipc.Client.call(Client.java:1385)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy20.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy21.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:29,290 ERROR [master/hadoop102:16000:becomeActiveMaster] master.HMaster: ***** ABORTING master hadoop102,16000,1675685690632: Unhandled exception. Starting shutdown. *****
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /hbase/.tmp/hbase.version could only be written to 0 of the 1 minReplication nodes. There are 3 datanode(s) running and 3 node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2276)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2820)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:910)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:577)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:549)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:518)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1086)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1035)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2960)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1540)
at org.apache.hadoop.ipc.Client.call(Client.java:1486)
at org.apache.hadoop.ipc.Client.call(Client.java:1385)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy20.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy21.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at jdk.internal.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:361)
at com.sun.proxy.$Proxy22.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
2023-02-06 20:15:29,290 INFO [master/hadoop102:16000:becomeActiveMaster] regionserver.HRegionServer: ***** STOPPING region server 'hadoop102,16000,1675685690632' *****
2023-02-06 20:15:29,291 INFO [master/hadoop102:16000:becomeActiveMaster] regionserver.HRegionServer: STOPPED: Stopped by master/hadoop102:16000:becomeActiveMaster
2023-02-06 20:15:29,499 INFO [master/hadoop102:16000] ipc.NettyRpcServer: Stopping server on /192.168.20.62:16000
2023-02-06 20:15:29,502 INFO [master/hadoop102:16000] zookeeper.ZKLeaderManager: Stepping down as leader
2023-02-06 20:15:29,523 INFO [master/hadoop102:16000] token.AuthenticationTokenSecretManager: Stopping leader election, because: SecretManager stopping
2023-02-06 20:15:29,527 INFO [master/hadoop102:16000] regionserver.HRegionServer: Stopping infoServer
2023-02-06 20:15:29,539 INFO [master/hadoop102:16000] handler.ContextHandler: Stopped o.a.h.t.o.e.j.w.WebAppContext@77aea{master,/,null,STOPPED}{file:/opt/module/hbase/hbase-webapps/master}
2023-02-06 20:15:29,541 INFO [master/hadoop102:16000] server.AbstractConnector: Stopped ServerConnector@50ec4bfc{HTTP/1.1, (http/1.1)}{0.0.0.0:16010}
2023-02-06 20:15:29,541 INFO [master/hadoop102:16000] server.session: node0 Stopped scavenging
2023-02-06 20:15:29,542 INFO [master/hadoop102:16000] handler.ContextHandler: Stopped o.a.h.t.o.e.j.s.ServletContextHandler@7971c2a9{static,/static,file:///opt/module/hbase/hbase-webapps/static/,STOPPED}
2023-02-06 20:15:29,542 INFO [master/hadoop102:16000] handler.ContextHandler: Stopped o.a.h.t.o.e.j.s.ServletContextHandler@7f27f59b{logs,/logs,file:///opt/module/hbase/logs/,STOPPED}
2023-02-06 20:15:29,545 INFO [master/hadoop102:16000] regionserver.HRegionServer: aborting server hadoop102,16000,1675685690632
2023-02-06 20:15:29,545 INFO [master/hadoop102:16000] regionserver.HRegionServer: stopping server hadoop102,16000,1675685690632; all regions closed.
2023-02-06 20:15:29,545 INFO [master/hadoop102:16000] hbase.ChoreService: Chore service for: master/hadoop102:16000 had [] on shutdown
2023-02-06 20:15:29,659 ERROR [main-EventThread] zookeeper.ClientCnxn: Error while calling watcher
java.util.concurrent.RejectedExecutionException: Task java.util.concurrent.FutureTask@24944c06[Not completed, task = java.util.concurrent.Executors$RunnableAdapter@15cbfd3b[Wrapped task = org.apache.hadoop.hbase.zookeeper.ZKWatcher$$Lambda$126/0x00000008003dac40@4739fd16]] rejected from java.util.concurrent.ThreadPoolExecutor@3f8c129b[Terminated, pool size = 0, active threads = 0, queued tasks = 0, completed tasks = 16]
at java.base/java.util.concurrent.ThreadPoolExecutor$AbortPolicy.rejectedExecution(ThreadPoolExecutor.java:2055)
at java.base/java.util.concurrent.ThreadPoolExecutor.reject(ThreadPoolExecutor.java:825)
at java.base/java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1355)
at java.base/java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:118)
at java.base/java.util.concurrent.Executors$DelegatedExecutorService.submit(Executors.java:714)
at org.apache.hadoop.hbase.zookeeper.ZKWatcher.process(ZKWatcher.java:602)
at org.apache.hadoop.hbase.zookeeper.PendingWatcher.process(PendingWatcher.java:38)
at org.apache.zookeeper.ClientCnxn$EventThread.processEvent(ClientCnxn.java:535)
at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:510)
2023-02-06 20:15:29,659 ERROR [main-EventThread] zookeeper.ClientCnxn: Error while calling watcher
java.util.concurrent.RejectedExecutionException: Task java.util.concurrent.FutureTask@2488da66[Not completed, task = java.util.concurrent.Executors$RunnableAdapter@7dbd8093[Wrapped task = org.apache.hadoop.hbase.zookeeper.ZKWatcher$$Lambda$126/0x00000008003dac40@7b4f075e]] rejected from java.util.concurrent.ThreadPoolExecutor@3f8c129b[Terminated, pool size = 0, active threads = 0, queued tasks = 0, completed tasks = 16]
at java.base/java.util.concurrent.ThreadPoolExecutor$AbortPolicy.rejectedExecution(ThreadPoolExecutor.java:2055)
at java.base/java.util.concurrent.ThreadPoolExecutor.reject(ThreadPoolExecutor.java:825)
at java.base/java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1355)
at java.base/java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:118)
at java.base/java.util.concurrent.Executors$DelegatedExecutorService.submit(Executors.java:714)
at org.apache.hadoop.hbase.zookeeper.ZKWatcher.process(ZKWatcher.java:602)
at org.apache.zookeeper.ClientCnxn$EventThread.processEvent(ClientCnxn.java:535)
at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:510)
2023-02-06 20:15:29,659 INFO [main-EventThread] zookeeper.ClientCnxn: EventThread shut down for session: 0x300021049630004
2023-02-06 20:15:29,660 WARN [Thread-5] zookeeper.Login: TGT renewal thread has been interrupted and will exit.
2023-02-06 20:15:29,660 INFO [master/hadoop102:16000] zookeeper.ZooKeeper: Session: 0x300021049630004 closed
2023-02-06 20:15:29,660 INFO [master/hadoop102:16000] regionserver.HRegionServer: Exiting; stopping=hadoop102,16000,1675685690632; zookeeper connection closed.
2023-02-06 20:15:29,660 ERROR [main] master.HMasterCommandLine: Master exiting
java.lang.RuntimeException: HMaster Aborted
at org.apache.hadoop.hbase.master.HMasterCommandLine.startMaster(HMasterCommandLine.java:254)
at org.apache.hadoop.hbase.master.HMasterCommandLine.run(HMasterCommandLine.java:145)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.hadoop.hbase.util.ServerCommandLine.doMain(ServerCommandLine.java:140)
at org.apache.hadoop.hbase.master.HMaster.main(HMaster.java:2946)
hdfs的datenode错误日志如下
javax.security.sasl.SaslException: Invalid token in javax.security.sasl.qop:
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseProp(AbstractSaslImpl.java:242)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseQop(AbstractSaslImpl.java:206)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseQop(AbstractSaslImpl.java:197)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.<init>(AbstractSaslImpl.java:73)
at java.security.sasl/com.sun.security.sasl.digest.DigestMD5Base.<init>(DigestMD5Base.java:174)
at java.security.sasl/com.sun.security.sasl.digest.DigestMD5Server.<init>(DigestMD5Server.java:145)
at java.security.sasl/com.sun.security.sasl.digest.FactoryImpl.createSaslServer(FactoryImpl.java:109)
at java.security.sasl/javax.security.sasl.Sasl.createSaslServer(Sasl.java:580)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslParticipant.createServerSaslParticipant(SaslParticipant.java:66)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.doSaslHandshake(SaslDataTransferServer.java:387)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.getSaslStreams(SaslDataTransferServer.java:308)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.receive(SaslDataTransferServer.java:135)
at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:234)
at java.base/java.lang.Thread.run(Thread.java:834)
2023-02-06 20:15:49,760 ERROR org.apache.hadoop.hdfs.server.datanode.DataNode: hadoop102:50010:DataXceiver error processing unknown operation src: /192.168.20.63:41892 dst: /192.168.20.62:50010
javax.security.sasl.SaslException: Invalid token in javax.security.sasl.qop:
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseProp(AbstractSaslImpl.java:242)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseQop(AbstractSaslImpl.java:206)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseQop(AbstractSaslImpl.java:197)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.<init>(AbstractSaslImpl.java:73)
at java.security.sasl/com.sun.security.sasl.digest.DigestMD5Base.<init>(DigestMD5Base.java:174)
at java.security.sasl/com.sun.security.sasl.digest.DigestMD5Server.<init>(DigestMD5Server.java:145)
at java.security.sasl/com.sun.security.sasl.digest.FactoryImpl.createSaslServer(FactoryImpl.java:109)
at java.security.sasl/javax.security.sasl.Sasl.createSaslServer(Sasl.java:580)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslParticipant.createServerSaslParticipant(SaslParticipant.java:66)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.doSaslHandshake(SaslDataTransferServer.java:387)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.getSaslStreams(SaslDataTransferServer.java:308)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.receive(SaslDataTransferServer.java:135)
at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:234)
at java.base/java.lang.Thread.run(Thread.java:834)
2023-02-06 20:15:59,808 ERROR org.apache.hadoop.hdfs.server.datanode.DataNode: hadoop102:50010:DataXceiver error processing unknown operation src: /192.168.20.63:41896 dst: /192.168.20.62:50010
javax.security.sasl.SaslException: Invalid token in javax.security.sasl.qop: D
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseProp(AbstractSaslImpl.java:242)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseQop(AbstractSaslImpl.java:206)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.parseQop(AbstractSaslImpl.java:197)
at java.security.sasl/com.sun.security.sasl.util.AbstractSaslImpl.<init>(AbstractSaslImpl.java:73)
at java.security.sasl/com.sun.security.sasl.digest.DigestMD5Base.<init>(DigestMD5Base.java:174)
at java.security.sasl/com.sun.security.sasl.digest.DigestMD5Server.<init>(DigestMD5Server.java:145)
at java.security.sasl/com.sun.security.sasl.digest.FactoryImpl.createSaslServer(FactoryImpl.java:109)
at java.security.sasl/javax.security.sasl.Sasl.createSaslServer(Sasl.java:580)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslParticipant.createServerSaslParticipant(SaslParticipant.java:66)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.doSaslHandshake(SaslDataTransferServer.java:387)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.getSaslStreams(SaslDataTransferServer.java:308)
at org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.receive(SaslDataTransferServer.java:135)
at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:234)
at java.base/java.lang.Thread.run(Thread.java:834)
habse-site配置如下
<name>hbase.cluster.distributed</name>
<value>true</value>
</property>
<property>
<name>hbase.tmp.dir</name>
<value>/opt/module/hbase/tmp</value>
</property>
<property>
<name>hbase.unsafe.stream.capability.enforce</name>
<value>false</value>
</property>
<property>
<name>hbase.rootdir</name>
<value>hdfs://mycluster/hbase</value>
</property>
<property>
<name>hbase.zookeeper.quorum</name>
<value>hadoop102,hadoop103,hadoop104</value>
</property>
<property>
<name>hbase.wal.provider</name>
<value>filesystem</value>
</property>
<property>
<name>hbase.coprocessor.abortonerror</name>
<value>false</value>
</property>
<!-- hbase配置kerberos安全认证 -->
<property>
<name>hbase.security.authentication</name>
<value>kerberos</value>
</property>
<!-- 配置hbase rpc安全通信 -->
<property>
<name>hbase.rpc.engine</name>
<value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
</property>
<property>
<name>hbase.coprocessor.region.classes</name>
<value>org.apache.hadoop.hbase.security.token.TokenProvider</value>
</property>
<!-- hmaster配置kerberos安全凭据认证 -->
<property>
<name>hbase.master.kerberos.principal</name>
<value>hadoop/_HOST@ZHT.COM</value>
</property>
<!-- hmaster配置kerberos安全证书keytab文件位置 -->
<property>
<name>hbase.master.keytab.file</name>
<value>/etc/security/keytab/hadoop.keytab</value>
</property>
<!-- regionserver配置kerberos安全凭据认证 -->
<property>
<name>hbase.regionserver.kerberos.principal</name>
<value>hadoop/_HOST@ZHT.COM</value>
</property>
<!-- regionserver配置kerberos安全证书keytab文件位置 -->
<property>
<name>hbase.regionserver.keytab.file</name>
<value>/etc/security/keytab/hadoop.keytab</value>
</property>
</configuration>
hdfs-site配置如下
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<!-- 完全分布式集群名称 -->
<property>
<name>dfs.nameservices</name>
<value>mycluster</value>
</property>
<!-- NameNode数据存储目录 -->
<property>
<name>dfs.namenode.name.dir</name>
<value>/opt/module/hadoop-3.1.3/data/tmp/namenod</value>
</property>
<!-- DataNode数据存储目录 -->
<property>
<name>dfs.datanode.data.dir</name>
<value>/opt/module/hadoop-3.1.3/data/tmp/datanod</value>
</property>
<!--集群中NameNode节点都有哪些 -->
<property>
<name>dfs.ha.namenodes.mycluster</name>
<value>nn1,nn2</value>
</property>
<!-- 指定NameNode元数据在JournalNode上的存放位置 -->
<property>
<name>dfs.namenode.shared.edits.dir</name>
<value>qjournal://hadoop102:8485;hadoop103:8485;hadoop104:8485/mycluster</value>
</property>
<!-- 声明journalnode服务器存储目录-->
<property>
<name>dfs.journalnode.edits.dir</name>
<value>/opt/module/hadoop-3.1.3/data/tmp/jn</value>
</property>
<!-- 开启NameNode失败自动切换 -->
<property>
<name>dfs.ha.automatic-failover.enabled</name>
<value>true</value>
</property>
<!-- 配置隔离机制,即同一时刻只能有一台服务器对外响应 -->
<property>
<name>dfs.ha.fencing.methods</name>
<value>sshfence</value>
</property>
<!--使用隔离机制时需要ssh无秘钥登录-->
<property>
<name>dfs.ha.fencing.ssh.private-key-files</name>
<value>/root/.ssh/id_rsa</value>
</property>
<!-- 配置sshfence隔离机制超时时间 -->
<property>
<name>dfs.ha.fencing.ssh.connect-timeout</name>
<value>30000</value>
</property>
<!-- 访问代理类:client用于确定哪个NameNode为Active -->
<property>
<name>dfs.client.failover.proxy.provider.mycluster</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<!-- 配置集群namenode的kerberos认证 -->
<property>
<name>dfs.block.access.token.enable</name>
<value>true</value>
</property>
<property>
<name>dfs.namenode.keytab.file</name>
<value>/etc/security/keytab/hadoop.keytab</value>
</property>
<property>
<name>dfs.namenode.kerberos.principal</name>
<value>hadoop/_HOST@ZHT.COM</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.principal</name>
<value>HTTP/_HOST@ZHT.COM</value>
</property>
<property>
<name>dfs.web.authentication.kerberos.keytab</name>
<value>/etc/security/keytab/hadoop.keytab</value>
</property>
<!-- 配置对NameNode Web UI的SSL访问 -->
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>
<property>
<name>dfs.http.policy</name>
<value>HTTPS_ONLY</value>
</property>
<!--nn1的RPC通信地址 -->
<property>
<name>dfs.namenode.rpc-address.mycluster.nn1</name>
<value>hadoop102:9000</value>
</property>
<!--nn2的RPC通信地址 -->
<property>
<name>dfs.namenode.rpc-address.mycluster.nn2</name>
<value>hadoop103:9000</value>
</property>
<!--nn1的http通信地址 -->
<property>
<name>dfs.namenode.http-address.mycluster.nn1</name>
<value>hadoop102:9870</value>
</property>
<!--nn2的http通信地址-->
<property>
<name>dfs.namenode.http-address.mycluster.nn2</name>
<value>hadoop103:9870</value>
</property>
<property>
<name>dfs.namenode.https-address</name>
<value>0.0.0.0:9870</value>
</property>
<property>
<name>dfs.permissions.supergroup</name>
<value>hadoop</value>
</property>
<!-- 配置集群datanode的kerberos认证 -->
<property>
<name>dfs.datanode.keytab.file</name>
<value>/etc/security/keytab/hadoop.keytab</value>
</property>
<property>
<name>dfs.datanode.kerberos.principal</name>
<value>hadoop/_HOST@ZHT.COM</value>
</property>
<!-- 配置datanode SASL配置 -->
<property>
<name>dfs.datanode.data.dir.perm</name>
<value>700</value>
</property>
<property>
<name>dfs.datanode.address</name>
<value>0.0.0.0:50010</value>
</property>
<property>
<name>dfs.datanode.https.address</name>
<value>0.0.0.0:50075</value>
</property>
<property>
<name>dfs.data.transfer.protection</name>
<value>integrity</value>
</property>
<!-- 配置集群journalnode的kerberos认证 -->
<property>
<name>dfs.journalnode.keytab.file</name>
<value>/etc/security/keytab/hadoop.keytab</value>
</property>
<property>
<name>dfs.journalnode.kerberos.principal</name>
<value>hadoop/_HOST@ZHT.COM</value>
</property>
<property>
<name>dfs.journalnode.kerberos.internal.spnego.principal</name>
<value>${dfs.web.authentication.kerberos.principal}</value>
</property>
<property>
<name>dfs.journalnode.http-address</name>
<value>0.0.0.0:8480</value>
</property>
<property>
<name>dfs.datanode.max.transfer.threads</name>
<value>8192</value>
</property>
</configuration>