南大通用GBase 8a Load使用hadoop+Kerberos环境操作全流程

12 阅读4分钟

原文链接:www.gbase.cn/community/p…
更多精彩内容尽在南大通用GBase技术社区,南大通用致力于成为用户最信赖的数据库产品供应商。

1.环境准备

主机信息

IP:192.168.195.110
主机名:mjmhadoop

新建hadoop用户

[root@mjmhadoop ~]useraddmjmos[root@mjmhadoop ] useradd mjmos [root@mjmhadoop ~] passwd mjmos

目录准备

[root@mjmhadoop ~]chownRmjmos:mjmos/opt/[root@mjmhadoop ] chown -R mjmos:mjmos /opt/ [root@mjmhadoop ~] su - mjmos
[mjmos@mjmhadoop ~]cd/opt/[mjmos@mjmhadoop/opt] cd /opt/ [mjmos@mjmhadoop /opt] mkdir /opt/software
[mjmos@mjmhadoop /opt]$ mkdir /opt/module

安装包准备

[mjmos@mjmhadoop /opt]wgethttps://repo.huaweicloud.com/java/jdk/8u202b08/jdk8u202linuxx64.tar.gz[mjmos@mjmhadoop/opt] wget https://repo.huaweicloud.com/java/jdk/8u202-b08/jdk-8u202-linux-x64.tar.gz [mjmos@mjmhadoop /opt] wget mirrors.tuna.tsinghua.edu.cn/apache/hado…
[mjmos@mjmhadoop /opt]tarzxvfjdk8u202linuxx64.tar.gzC/opt/module/[mjmos@mjmhadoop/opt] tar -zxvf jdk-8u202-linux-x64.tar.gz -C /opt/module/ [mjmos@mjmhadoop /opt] tar -zxvf hadoop-2.10.2.tar.gz -C /opt/module/

2.环境配置

hosts解析与环境变量

[mjmos@mjmhadoop /opt]$ exit
[root@mjmhadoop ~]$ echo "192.168.195.110 mjmhadoop" >> /etc/hosts
[root@mjmhadoop ~]$ cat >> /etc/profile << 'eof'

#配置java环境变量
export JAVA_HOME=/opt/module/jdk1.8.0_202
export PATH=$PATH:$JAVA_HOME/bin
#配置Hadoop环境变量
export HADOOP_HOME=/opt/module/hadoop-2.10.2
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
#继续添加其他hadoop需要的环境变量
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib"
#HADOOP VARIABLES END
eof

[root@mjmhadoop ~]$ source /etc/profile

hadoop-env配置

[root@mjmhadoop ~]$ su - mjmos
[mjmos@mjmhadoop ~]$ sed -i 's#^export JAVA_HOME.*#export JAVA_HOME=/opt/module/jdk1.8.0_202#g' /opt/module/hadoop-2.10.2/etc/hadoop/hadoop-env.sh

core-site.xml文件配置


[mjmos@mjmhadoop ~]$ sed -i '/configuration/d' /opt/module/hadoop-2.10.2/etc/hadoop/core-site.xml
[mjmos@mjmhadoop ~]$ cat >>/opt/module/hadoop-2.10.2/etc/hadoop/core-site.xml<<'eof'


       
       
               hadoop.tmp.dir
               /opt/module/hadoop-2.10.2/data/tmp
       
       
       
               fs.defaultFS
               
               hdfs://192.168.195.110:9000
       

eof

hdfs-site.xml文件配置

[mjmos@mjmhadoop ~]$ sed -i '/configuration/d' /opt/module/hadoop-2.10.2/etc/hadoop/hdfs-site.xml
[mjmos@mjmhadoop ~]$ cat >>/opt/module/hadoop-2.10.2/etc/hadoop/hdfs-site.xml<<'eof'


       
               dfs.replication
               1
       

eof

创建初始元数据

[mjmos@mjmhadoop ~]$ hdfs namenode -format

免密认证

[mjmos@mjmhadoop ~]$ ssh-keygen
[mjmos@mjmhadoop ~]$ ssh-copy-id mjmos@192.168.195.110
[mjmos@mjmhadoop ~]$ ssh-copy-id mjmhadoop

3.环境验证

启动

# 在启动过程中有个免密好像有问题,输了个yes就行
[mjmos@mjmhadoop ~]$ start-all.sh

检查

[mjmos@mjmhadoop ~]$ jps
6898 ResourceManager
6770 SecondaryNameNode
6444 NameNode
7309 Jps
7039 NodeManager
6591 DataNode

访问

http://192.168.195.110:50070/

4.测试

hdfs新建目录

hdfs dfs -mkdir -p /test/tb1

本地新建文件
echo "miaojiaming1" > /home/mjmos/t1_1
echo "miaojiaming2" > /home/mjmos/t1_2

上传文件
hdfs dfs -put /home/mjmos/t1_1 /test/tb1
hdfs dfs -put /home/mjmos/t1_2 /test/tb1

查看文件
hdfs dfs -cat /test/tb1/t1_1
hdfs dfs -cat /test/tb1/t1_2

本地拉取测试
hdfs dfs -get hdfs://mjmos@192.168.195.110:9000/test/tb1/t1_1  /tmp
hdfs dfs -get hdfs://mjmos@192.168.195.110:9000/test/tb1/t1_2  /tmp
[mjmos@mjmhadoop ~]cat/tmp/t11miaojiaming1[mjmos@mjmhadoop ] cat /tmp/t1_1 miaojiaming1 [mjmos@mjmhadoop ~] cat /tmp/t1_2
miaojiaming2

加载
load data infile 'hdfs://mjmos@192.168.195.110/test/tb1/t1_*' into table t1;

5.kerberos安装及配置

安装kerberos服务端

#yum install -y krb5-server krb5-libs krb5-workstation krb5-auth-dialog
[root@mjmhadoop ~]$ yum install -y krb5-server krb5-libs krb5-workstation

KDC的相关信息配置

MJM.COM自定义的 realm 名称可修改

[root@mjmhadoop ~]$ cat >/var/kerberos/krb5kdc/kdc.conf<<'eof'
[kdcdefaults]
kdc_ports = 88
kdc_tcp_ports = 88

[realms]
MJM.COM = {
#master_key_type = aes256-cts
acl_file = /var/kerberos/krb5kdc/kadm5.acl
dict_file = /usr/share/dict/words
admin_keytab = /var/kerberos/krb5kdc/kadm5.keytab
permitted_enctypes = aes128-cts:normal des3-hmac-sha1:normal arcfour-hmac:normal camellia256-cts:normal camellia128-cts:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal
}
eof

配置Kerberos

[root@mjmhadoop /opt/module/hadoop-2.10.2/etc/hadoop]$ cat > /etc/krb5.conf <<'eof'

Configuration snippets may be placed in this directory as well

includedir /etc/krb5.conf.d/

[logging]
default = FILE:/var/log/krb5libs.log
kdc = FILE:/var/log/krb5kdc.log
admin_server = FILE:/var/log/kadmind.log

[libdefaults]
dns_lookup_realm = false
ticket_lifetime = 24h
renew_lifetime = 7d
forwardable = true
rdns = false
default_realm = MJM.COM

[realms]
MJM.COM = {
kdc = mjmhadoop
admin_server = mjmhadoop
}

[domain_realm]
.mjm.com = MJM.COM
mjm.com = MJM.COM
eof

创建/初始化Kerberos database

#这里会让输入密码
[root@mjmhadoop ~]# kdb5_util create -s -r MJM.COM

启动服务

[root@mjmhadoop ~]# service krb5kdc start
[root@mjmhadoop ~]# service kadmin start

为database administrator设置ACL权限

[root@mjmhadoop ~]# echo '*/admin@MJM.COM     *' >/var/kerberos/krb5kdc/kadm5.acl

创建认证凭证

[root@mjmhadoop ~]kadmin.localq"addprincrandkeyroot/mjmhadoop@MJM.COM"[root@mjmhadoop ] kadmin.local -q "addprinc -randkey root/mjmhadoop@MJM.COM" [root@mjmhadoop ~] kadmin.local -q "addprinc -randkey HTTP/mjmhadoop@MJM.COM"
[root@mjmhadoop ~]$ kadmin.local -q "addprinc -randkey mjmos/mjmhadoop@MJM.COM"

创建认证凭证文件

[root@mjmhadoop ~]mkdir/home/kerberos[root@mjmhadoop ] mkdir /home/kerberos [root@mjmhadoop ~] cd /home/kerberos
[root@mjmhadoop /home/kerberos]kadmin.localq"xstkrootmjmhadoop.keytabroot/mjmhadoop@MJM.COM"[root@mjmhadoop/home/kerberos] kadmin.local -q "xst -k root-mjmhadoop.keytab root/mjmhadoop@MJM.COM" [root@mjmhadoop /home/kerberos] kadmin.local -q "xst -k mjmos-mjmhadoop.keytab mjmos/mjmhadoop@MJM.COM"
[root@mjmhadoop /home/kerberos]$ kadmin.local -q "xst -k http-mjmhadoop.keytab HTTP/mjmhadoop@MJM.COM"

合并keytab文件

[root@mjmhadoop /home/kerberos]$ ktutil
ktutil:  rkt http-mjmhadoop.keytab
ktutil:  rkt mjmos-mjmhadoop.keytab
ktutil:  rkt root-mjmhadoop.keytab
ktutil:  wkt merged.keytab
ktutil:  q

验证下keytab文件内容

[root@mjmhadoop /home/kerberos]$ klist -k -t merged.keytab
Keytab name: FILE:merged.keytab
KVNO Timestamp           Principal


  2 06/20/2025 16:43:17 HTTP/mjmhadoop@MJM.COM
2 06/20/2025 16:43:17 HTTP/mjmhadoop@MJM.COM
2 06/20/2025 16:43:17 mjmos/mjmhadoop@MJM.COM
2 06/20/2025 16:43:17 mjmos/mjmhadoop@MJM.COM
2 06/20/2025 16:43:17 root/mjmhadoop@MJM.COM
2 06/20/2025 16:43:17 root/mjmhadoop@MJM.COM

查看当前认证用户

[root@mjmhadoop /home/kerberos]$ klist
klist: No credentials cache found (filename: /tmp/krb5cc_0)

使用合并的keytab文件认证

[root@mjmhadoop /home/kerberos]$ kinit -V -k -t /home/kerberos/merged.keytab root/mjmhadoop@MJM.COM
Using default cache: /tmp/krb5cc_0
Using principal: root/mjmhadoop@MJM.COM
Using keytab: /home/kerberos/merged.keytab
Authenticated to Kerberos v5

再次查看当前认证用户

[root@mjmhadoop /home/kerberos]$ klist
Ticket cache: FILE:/tmp/krb5cc_0
Default principal: root/mjmhadoop@MJM.COM

Valid starting       Expires              Service principal
06/20/2025 17:17:26  06/21/2025 17:17:26  krbtgt/MJM.COM@MJM.COM
renew until 06/20/2025 17:17:26

删除当前认证的缓存:kdestroy

移动认证文件改权限

[root@mjmhadoop /home/kerberos]cpmerged.keytab/opt/module/[root@mjmhadoop/home/kerberos] cp merged.keytab /opt/module/ [root@mjmhadoop /home/kerberos] chown mjmos:mjmos /opt/module/merged.keytab

core-site.xml文件配置

[root@mjmhadoop /home/kerberos]$ cat > /opt/module/hadoop-2.10.2/etc/hadoop/core-site.xml <<'eof'

 

 

       

hadoop.tmp.dir
/opt/module/hadoop-2.10.2/data/tmp

       

fs.defaultFS

hdfs://192.168.195.110:9000

hadoop.security.authenticationkerberos
hadoop.security.authorizationtrue
hadoop.rpc.protectionauthentication

eof

hdfs-site.xml文件配置

[root@mjmhadoop /home/kerberos]$ cat > /opt/module/hadoop-2.10.2/etc/hadoop/hdfs-site.xml <<'eof'

 

 

       
dfs.replication
1

 dfs.block.access.token.enable
true

 dfs.datanode.data.dir.perm
700

 dfs.namenode.keytab.file
/opt/module/merged.keytab

 dfs.namenode.kerberos.principal
root/_HOST@MJM.COM

 dfs.namenode.kerberos.https.principal
HTTP/_HOST@MJM.COM

 dfs.datanode.keytab.file
/opt/module/merged.keytab

 dfs.datanode.kerberos.principal
root/_HOST@MJM.COM

 dfs.datanode.kerberos.https.principal
HTTP/_HOST@MJM.COM

 dfs.datanode.address
0.0.0.0:61004

 dfs.datanode.http.address
0.0.0.0:61006

 dfs.webhdfs.enabled
true

 dfs.web.authentication.kerberos.keytab
/opt/module/merged.keytab

 dfs.web.authentication.kerberos.principal
HTTP/_HOST@MJM.COM

 dfs.http.policy
HTTPS_ONLY

 dfs.data.transfer.protection
integrity

    dfs.permissions.supergroup
supergroup
The name of the group of
super-users.

dfs.secondary.namenode.keytab.file
/opt/module/merged.keytab

dfs.secondary.namenode.kerberos.principal
root/_HOST@MJM.COM

eof

证书配置

ca_key文件生成,这里输入的密码为test999

openssl req -new -x509 -keyout ca_key -out ca_cert -days 9999 -subj '/C=CN/ST=hunan/L=changsha/O=dtdream/OU=security/CN=mjm.com'

生成keystore文件

keytool -keystore keystore -alias localhost -validity 9999 -genkey -keyalg RSA -keysize 2048 -dname "CN=mjm.com, OU=test, O=test, L=changsha, ST=hunan, C=cn"

生成truststore文件同时导入CA证书

keytool -keystore truststore -alias CARoot -import -file ca_cert

从 keystore 中导出 cert

keytool -certreq -alias localhost -keystore keystore -file cert

用 CA 对 cert 签名,生成自签证书,注意最后的test999

openssl x509 -req -CA ca_cert -CAkey ca_key -in cert -out cert_signed -days 9999 -CAcreateserial -passin pass:test999

将 CA 的 cert 和导入 keystore文件

keytool -keystore keystore -alias CARoot -import -file ca_cert

用 CA 自签名之后的 cert 也导入 keystore文件

keytool -keystore keystore -alias localhost -import -file cert_signed

keystore和truststore准备

[root@mjmhadoop /home/kerberos]cpkeystore/opt/module/[root@mjmhadoop/home/kerberos] cp keystore /opt/module/ [root@mjmhadoop /home/kerberos] cp truststore /opt/module/
[root@mjmhadoop /home/kerberos]chownmjmos:mjmos/opt/module/keystore [root@mjmhadoop/home/kerberos] chown mjmos:mjmos /opt/module/keystore  [root@mjmhadoop /home/kerberos] chown mjmos:mjmos /opt/module/truststore

解决DataNode报错,编译安装JSVC

启动DataNode时发生如下异常:
2018-03-02 17:20:15,261 FATAL org.apache.hadoop.hdfs.server.datanode.DataNode: Exception in secureMain
java.lang.RuntimeException: Cannot start secure DataNode without configuring either privileged resources or SASL RPC data transfer protection and SSL for HTTP.  Using privileged resources in combination with SASL RPC data transfer protection is not supported.

[mjmos@mjmhadoop /opt/module]cd/opt/module[mjmos@mjmhadoop/opt/module] cd /opt/module [mjmos@mjmhadoop /opt/module] wget archive.apache.org/dist/common…
[mjmos@mjmhadoop /opt/module]wgethttps://archive.apache.org/dist/commons/daemon/binaries/commonsdaemon1.0.15bin.tar.gz[mjmos@mjmhadoop/opt/module] wget https://archive.apache.org/dist/commons/daemon/binaries/commons-daemon-1.0.15-bin.tar.gz [mjmos@mjmhadoop /opt/module] tar xf commons-daemon-1.0.15-src.tar.gz
[mjmos@mjmhadoop /opt/module]tarxfcommonsdaemon1.0.15bin.tar.gz[mjmos@mjmhadoop/opt/module] tar xf commons-daemon-1.0.15-bin.tar.gz [mjmos@mjmhadoop /opt/module] cd commons-daemon-1.0.15-src/src/native/unix/
[mjmos@mjmhadoop /opt/module/commons-daemon-1.0.15-src/src/native/unix]./configure[mjmos@mjmhadoop/opt/module/commonsdaemon1.0.15src/src/native/unix] ./configure [mjmos@mjmhadoop /opt/module/commons-daemon-1.0.15-src/src/native/unix] make
[mjmos@mjmhadoop /opt/module/commons-daemon-1.0.15-src/src/native/unix]cpjsvc/opt/module/hadoop2.10.2/libexec/[mjmos@mjmhadoop/opt/module/commonsdaemon1.0.15src/src/native/unix] cp jsvc /opt/module/hadoop-2.10.2/libexec/ [mjmos@mjmhadoop /opt/module/commons-daemon-1.0.15-src/src/native/unix] rm /opt/module/hadoop-2.10.2/share/hadoop/hdfs/lib/commons-daemon-*.jar
[mjmos@mjmhadoop /opt/module/commons-daemon-1.0.15-src/src/native/unix]cd/opt/module/[mjmos@mjmhadoop/opt/module] cd /opt/module/ [mjmos@mjmhadoop /opt/module] cp commons-daemon-1.0.15/commons-daemon-1.0.15.jar /opt/module/hadoop-2.10.2/share/hadoop/hdfs/lib/
[mjmos@mjmhadoop /opt/module] sed -i 's#export HADOOP_SECURE_DN_USER={HADOOP_SECURE_DN_USER}#export HADOOP_SECURE_DN_USER=mjmos#g' /opt/module/hadoop-2.10.2/etc/hadoop/hadoop-env.sh
[mjmos@mjmhadoop /opt/module] sed -i 's,#export JSVC_HOME={JSVC_HOME},export JSVC_HOME=/opt/module/hadoop-2.10.2/libexec/,g' /opt/module/hadoop-2.10.2/etc/hadoop/hadoop-env.sh

ssl-client.xml文件配置

[root@mjmhadoop /opt/module/hadoop-2.10.2/etc/hadoop]$ cat > /opt/module/hadoop-2.10.2/etc/hadoop/ssl-client.xml <<'eof'

 

 ssl.server.truststore.location
/opt/module/truststore
Truststore to be used by NN and DN. Must be specified.

 ssl.server.truststore.password
test999
Optional. Default value is "".

 ssl.server.truststore.type
jks
Optional. The keystore file format, default value is "jks".

 ssl.server.truststore.reload.interval
10000
Truststore reload check interval, in milliseconds.
Default value is 10000 (10 seconds).

 ssl.server.keystore.location
/opt/module/keystore
Keystore to be used by NN and DN. Must be specified.

 ssl.server.keystore.password
test999
Must be specified.

 ssl.server.keystore.keypassword
test999
Must be specified.

 ssl.server.keystore.type
jks
Optional. The keystore file format, default value is "jks".

 ssl.server.exclude.cipher.list
TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,
SSL_RSA_WITH_DES_CBC_SHA,SSL_DHE_RSA_WITH_DES_CBC_SHA,
SSL_RSA_EXPORT_WITH_RC4_40_MD5,SSL_RSA_EXPORT_WITH_DES40_CBC_SHA,
SSL_RSA_WITH_RC4_128_MD5
Optional. The weak security cipher suites that you want excluded
from SSL communication.

eof

ssl-server.xml文件配置

[root@mjmhadoop /opt/module/hadoop-2.10.2/etc/hadoop]$ cat > /opt/module/hadoop-2.10.2/etc/hadoop/ssl-server.xml <<'eof'

 

 ssl.server.truststore.location
/opt/module/truststore
Truststore to be used by NN and DN. Must be specified.

 ssl.server.truststore.password
test999
Optional. Default value is "".

 ssl.server.truststore.type
jks
Optional. The keystore file format, default value is "jks".

 ssl.server.truststore.reload.interval
10000
Truststore reload check interval, in milliseconds.
Default value is 10000 (10 seconds).

 ssl.server.keystore.location
/opt/module/keystore
Keystore to be used by NN and DN. Must be specified.

 ssl.server.keystore.password
test999
Must be specified.

 ssl.server.keystore.keypassword
test999
Must be specified.

 ssl.server.keystore.type
jks
Optional. The keystore file format, default value is "jks".

 ssl.server.exclude.cipher.list
TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,
SSL_RSA_WITH_DES_CBC_SHA,SSL_DHE_RSA_WITH_DES_CBC_SHA,
SSL_RSA_EXPORT_WITH_RC4_40_MD5,SSL_RSA_EXPORT_WITH_DES40_CBC_SHA,
SSL_RSA_WITH_RC4_128_MD5
Optional. The weak security cipher suites that you want excluded
from SSL communication.

eof

启动hadoop服务

[root@mjmhadoop /opt/module]sumjmos Lastlogin:FriJun2021:58:18CST2025onpts/0[mjmos@mjmhadoop ] su - mjmos  Last login: Fri Jun 20 21:58:18 CST 2025 on pts/0 [mjmos@mjmhadoop ~] jps
14505 Jps
[mjmos@mjmhadoop ~]startstartall.cmd    startbalancer.sh  startdfs.sh     startsecuredns.sh startyarn.cmd    startall.sh     startdfs.cmd    startpulseaudiox11 startx        startyarn.sh     [mjmos@mjmhadoop ] start start-all.cmd         start-balancer.sh     start-dfs.sh          start-secure-dns.sh   start-yarn.cmd         start-all.sh          start-dfs.cmd         start-pulseaudio-x11  startx                start-yarn.sh          [mjmos@mjmhadoop ~] start-all.sh
[mjmos@mjmhadoop ~]hadoopdaemon.shstartdatanode[mjmos@mjmhadoop ] hadoop-daemon.sh start datanode [mjmos@mjmhadoop ~] jps
14659 NameNode
14840 SecondaryNameNode
15165 DataNode
15246 Jps
[mjmos@mjmhadoop ~]$ exit
logout

确认现在是未认证的状态

[root@mjmhadoop /opt/module]$ klist
klist: No credentials cache found (filename: /tmp/krb5cc_0)

查看文件

[root@mjmhadoop /opt/module]hdfsdfscat/test/tb1/t11[root@mjmhadoop/opt/module] hdfs dfs -cat /test/tb1/t1_1 [root@mjmhadoop /opt/module] hdfs dfs -cat /test/tb1/t1_2
#都会报错如下:
cat: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]; Host Details : local host is: "mjmhadoop/192.168.195.110"; destination host is: "mjmhadoop":9000;

使用合并的keytab文件认证

[root@mjmhadoop /opt/module]kinitVkt/home/kerberos/merged.keytabroot/mjmhadoop@MJM.COMUsingdefaultcache:/tmp/krb5cc0Usingprincipal:root/mjmhadoop@MJM.COMUsingkeytab:/home/kerberos/merged.keytabAuthenticatedtoKerberosv5[root@mjmhadoop/opt/module] kinit -V -k -t /home/kerberos/merged.keytab root/mjmhadoop@MJM.COM Using default cache: /tmp/krb5cc_0 Using principal: root/mjmhadoop@MJM.COM Using keytab: /home/kerberos/merged.keytab Authenticated to Kerberos v5 [root@mjmhadoop /opt/module] klist 
Ticket cache: FILE:/tmp/krb5cc_0
Default principal: root/mjmhadoop@MJM.COM

Valid starting       Expires              Service principal
06/20/2025 22:14:30  06/21/2025 22:14:30  krbtgt/MJM.COM@MJM.COM
renew until 06/20/2025 22:14:30

能成功的查看文件

[root@mjmhadoop /opt/module]hdfsdfscat/test/tb1/t1125/06/2022:14:58WARNutil.NativeCodeLoader:Unabletoloadnativehadooplibraryforyourplatform...usingbuiltinjavaclasseswhereapplicablemiaojiaming1[root@mjmhadoop/opt/module] hdfs dfs -cat /test/tb1/t1_1 25/06/20 22:14:58 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable miaojiaming1 [root@mjmhadoop /opt/module] hdfs dfs -cat /test/tb1/t1_2
25/06/20 22:15:03 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
miaojiaming2

本地拉取测试

[root@mjmhadoop /opt/module]hdfsdfsgethdfs://mjmos@192.168.195.110:9000/test/tb1/t11 /tmp[root@mjmhadoop/opt/module] hdfs dfs -get hdfs://mjmos@192.168.195.110:9000/test/tb1/t1_1  /tmp [root@mjmhadoop /opt/module] hdfs dfs -get hdfs://mjmos@192.168.195.110:9000/test/tb1/t1_2  /tmp

load data infile 'hdfs://mjmos@192.168.195.110/test/tb1/t1_*' into table t1;

原文链接:www.gbase.cn/community/p…
更多精彩内容尽在南大通用GBase技术社区,南大通用致力于成为用户最信赖的数据库产品供应商。