展开
拓展 关闭
订阅号推广码
GitHub
视频
公告栏 关闭

脚本开发思路

  • 启动容器
docker start oracle
docker start hadoop
docker start hive
docker start sqoop
  • 要采集的表
# 进入容器
docker exec -it sqoop bash
# 创建测试目录
mkdir -p /opt/datas/shell
cd /opt/datas/shell/
# 创建存放表名的文件
vim test_full_table.txt

# 填写如下
ciss4.ciss_base_areas
ciss4.ciss_base_baseinfo
ciss4.ciss_base_csp
ciss4.ciss_base_customer
ciss4.ciss_base_device
  • 创建脚本
vim test_full_import_table.sh

# 编写如下
#!/bin/bash
#export path
source /etc/profile
#export the tbname files
TB_NAME=/opt/datas/shell/test_full_table.txt
#export the import opt
IMP_OPT="sqoop import -Dmapreduce.job.user.classpath.first=true"
#export the jdbc opt
JDBC_OPT="--connect jdbc:oracle:thin:@oracle.bigdata.cn:1521:helowin --username ciss --password 123456"

#read tbname and exec sqoop
while read tbname
do
  ${IMP_OPT} ${JDBC_OPT} --table ${tbname^^} --delete-target-dir --target-dir /test/full_imp/${tbname^^} --as-avrodatafile --fields-terminated-by "\001" -m 1
done < ${TB_NAME}

# 修改权限
chmod u+x test_full_import_table.sh
# 执行
sh -x test_full_import_table.sh
  • 完整步骤
查看详情
[root@15b0369d3f2a shell]# sh -x test_full_import_table.sh
+ source /etc/profile
++ '[' -x /usr/bin/id ']'
++ '[' -z 0 ']'
+++ /usr/bin/id -un
++ USER=root
++ LOGNAME=root
++ MAIL=/var/spool/mail/root
++ '[' 0 = 0 ']'
++ pathmunge /usr/sbin
++ case ":${PATH}:" in
++ pathmunge /usr/local/sbin
++ case ":${PATH}:" in
+++ /usr/bin/hostname
++ HOSTNAME=15b0369d3f2a
++ HISTSIZE=1000
++ '[' ignoredups = ignorespace ']'
++ export HISTCONTROL=ignoredups
++ HISTCONTROL=ignoredups
++ export PATH USER LOGNAME MAIL HOSTNAME HISTSIZE HISTCONTROL
++ '[' 0 -gt 199 ']'
++ umask 022
++ for i in '/etc/profile.d/*.sh' /etc/profile.d/sh.local
++ '[' -r /etc/profile.d/colorgrep.sh ']'
++ '[' hxB '!=' hxB ']'
++ . /etc/profile.d/colorgrep.sh
+++ /usr/libexec/grepconf.sh -c
+++ alias 'grep=grep --color=auto'
+++ alias 'egrep=egrep --color=auto'
+++ alias 'fgrep=fgrep --color=auto'
++ for i in '/etc/profile.d/*.sh' /etc/profile.d/sh.local
++ '[' -r /etc/profile.d/colorls.sh ']'
++ '[' hxB '!=' hxB ']'
++ . /etc/profile.d/colorls.sh
+++ '[' '!' -t 0 ']'
+++ '[' -z '' ']'
+++ alias 'll=ls -l'
+++ alias 'l.=ls -d .*'
+++ INCLUDE=
+++ COLORS=
+++ for colors in '"$HOME/.dir_colors.$TERM"' '"$HOME/.dircolors.$TERM"' '"$HOME/.dir_colors"' '"$HOME/.dircolors"'
+++ '[' -e /root/.dir_colors.xterm ']'
+++ for colors in '"$HOME/.dir_colors.$TERM"' '"$HOME/.dircolors.$TERM"' '"$HOME/.dir_colors"' '"$HOME/.dircolors"'
+++ '[' -e /root/.dircolors.xterm ']'
+++ for colors in '"$HOME/.dir_colors.$TERM"' '"$HOME/.dircolors.$TERM"' '"$HOME/.dir_colors"' '"$HOME/.dircolors"'
+++ '[' -e /root/.dir_colors ']'
+++ for colors in '"$HOME/.dir_colors.$TERM"' '"$HOME/.dircolors.$TERM"' '"$HOME/.dir_colors"' '"$HOME/.dircolors"'
+++ '[' -e /root/.dircolors ']'
+++ '[' -z '' ']'
+++ '[' -e /etc/DIR_COLORS.xterm ']'
+++ '[' -z '' ']'
+++ '[' -e /etc/DIR_COLORS.256color ']'
++++ /usr/bin/tty -s
++++ /usr/bin/tput colors
+++ '[' x8 = x256 ']'
+++ '[' -z '' ']'
+++ '[' -e /etc/DIR_COLORS ']'
+++ COLORS=/etc/DIR_COLORS
+++ '[' -n /etc/DIR_COLORS ']'
+++ '[' -e '' ']'
++++ /usr/bin/dircolors --sh /etc/DIR_COLORS
+++ eval 'LS_COLORS='\''rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:mi=01;05;37;41:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arc=01;31:*.arj=01;31:*.taz=01;31:*.lha=01;31:*.lz4=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.tzo=01;31:*.t7z=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lrz=01;31:*.lz=01;31:*.lzo=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=01;36:*.au=01;36:*.flac=01;36:*.mid=01;36:*.midi=01;36:*.mka=01;36:*.mp3=01;36:*.mpc=01;36:*.ogg=01;36:*.ra=01;36:*.wav=01;36:*.axa=01;36:*.oga=01;36:*.spx=01;36:*.xspf=01;36:'\'';
export LS_COLORS'
++++ LS_COLORS='rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:mi=01;05;37;41:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arc=01;31:*.arj=01;31:*.taz=01;31:*.lha=01;31:*.lz4=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.tzo=01;31:*.t7z=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lrz=01;31:*.lz=01;31:*.lzo=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=01;36:*.au=01;36:*.flac=01;36:*.mid=01;36:*.midi=01;36:*.mka=01;36:*.mp3=01;36:*.mpc=01;36:*.ogg=01;36:*.ra=01;36:*.wav=01;36:*.axa=01;36:*.oga=01;36:*.spx=01;36:*.xspf=01;36:'
++++ export LS_COLORS
+++ '[' -z 'rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:mi=01;05;37;41:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arc=01;31:*.arj=01;31:*.taz=01;31:*.lha=01;31:*.lz4=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.tzo=01;31:*.t7z=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lrz=01;31:*.lz=01;31:*.lzo=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=01;36:*.au=01;36:*.flac=01;36:*.mid=01;36:*.midi=01;36:*.mka=01;36:*.mp3=01;36:*.mpc=01;36:*.ogg=01;36:*.ra=01;36:*.wav=01;36:*.axa=01;36:*.oga=01;36:*.spx=01;36:*.xspf=01;36:' ']'
+++ /usr/bin/grep -qi '^COLOR.*none' /etc/DIR_COLORS
+++ unset TMP COLORS INCLUDE
+++ alias 'll=ls -l --color=auto'
+++ alias 'l.=ls -d .* --color=auto'
+++ alias 'ls=ls --color=auto'
++ for i in '/etc/profile.d/*.sh' /etc/profile.d/sh.local
++ '[' -r /etc/profile.d/vim.sh ']'
++ '[' hxB '!=' hxB ']'
++ . /etc/profile.d/vim.sh
+++ '[' -n '4.2.46(2)-release' -o -n '' -o -n '' ']'
+++ '[' -x /usr/bin/id ']'
++++ /usr/bin/id -u
+++ ID=0
+++ '[' -n 0 -a 0 -le 200 ']'
+++ return
++ for i in '/etc/profile.d/*.sh' /etc/profile.d/sh.local
++ '[' -r /etc/profile.d/which2.sh ']'
++ '[' hxB '!=' hxB ']'
++ . /etc/profile.d/which2.sh
+++ alias 'which=alias | /usr/bin/which --tty-only --read-alias --show-dot --show-tilde'
++ for i in '/etc/profile.d/*.sh' /etc/profile.d/sh.local
++ '[' -r /etc/profile.d/sh.local ']'
++ '[' hxB '!=' hxB ']'
++ . /etc/profile.d/sh.local
++ unset i
++ unset -f pathmunge
++ export JAVA_HOME=/opt/jdk1.8.0_141
++ JAVA_HOME=/opt/jdk1.8.0_141
++ export CLASSPATH=/opt/jdk1.8.0_141/lib
++ CLASSPATH=/opt/jdk1.8.0_141/lib
++ export PATH=/opt/jdk1.8.0_141/bin:/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ PATH=/opt/jdk1.8.0_141/bin:/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ export HADOOP_HOME=/opt/hadoop-2.7.0
++ HADOOP_HOME=/opt/hadoop-2.7.0
++ export PATH=/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ PATH=/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ export HIVE_HOME=/opt/apache-hive-2.1.0-bin
++ HIVE_HOME=/opt/apache-hive-2.1.0-bin
++ export SQOOP_HOME=/opt/sqoop
++ SQOOP_HOME=/opt/sqoop
++ export PATH=/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ PATH=/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/opt/sqoop/bin:/opt/hadoop-2.7.0/sbin:/opt/hadoop-2.7.0/bin:/opt/jdk1.8.0_141/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ TB_NAME=/opt/datas/shell/test_full_table.txt
+ IMP_OPT='sqoop import -Dmapreduce.job.user.classpath.first=true'
+ JDBC_OPT='--connect jdbc:oracle:thin:@oracle.bigdata.cn:1521:helowin --username ciss --password 123456'
+ read tbname
+ sqoop import -Dmapreduce.job.user.classpath.first=true --connect jdbc:oracle:thin:@oracle.bigdata.cn:1521:helowin --username ciss --password 123456 --table CISS4.CISS_BASE_AREAS --delete-target-dir --target-dir /test/full_imp/CISS4.CISS_BASE_AREAS --as-avrodatafile --fields-terminated-by '\001' -m 1
Warning: /opt/sqoop/../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /opt/sqoop/../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /opt/sqoop/../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /opt/sqoop/../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
24/01/19 03:10:36 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7
24/01/19 03:10:36 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.
24/01/19 03:10:36 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.
24/01/19 03:10:36 INFO manager.SqlManager: Using default fetchSize of 1000
24/01/19 03:10:36 INFO tool.CodeGenTool: Beginning code generation
24/01/19 03:10:37 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:37 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_AREAS t WHERE 1=0
24/01/19 03:10:37 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /opt/hadoop-2.7.0
Note: /tmp/sqoop-root/compile/5dfcbcd1dec89b603cc2e70ba220438b/CISS4_CISS_BASE_AREAS.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
24/01/19 03:10:38 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/5dfcbcd1dec89b603cc2e70ba220438b/CISS4.CISS_BASE_AREAS.jar
24/01/19 03:10:39 INFO tool.ImportTool: Destination directory /test/full_imp/CISS4.CISS_BASE_AREAS is not present, hence not deleting.
24/01/19 03:10:39 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:39 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:39 INFO mapreduce.ImportJobBase: Beginning import of CISS4.CISS_BASE_AREAS
24/01/19 03:10:39 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar
24/01/19 03:10:39 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:39 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:39 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_AREAS t WHERE 1=0
24/01/19 03:10:39 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_AREAS t WHERE 1=0
24/01/19 03:10:39 INFO mapreduce.DataDrivenImportJob: Writing Avro schema file: /tmp/sqoop-root/compile/5dfcbcd1dec89b603cc2e70ba220438b/CISS4_CISS_BASE_AREAS.avsc
24/01/19 03:10:39 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps
24/01/19 03:10:39 INFO client.RMProxy: Connecting to ResourceManager at hadoop.bigdata.cn/172.33.0.121:8032
24/01/19 03:10:42 INFO db.DBInputFormat: Using read commited transaction isolation
24/01/19 03:10:42 INFO mapreduce.JobSubmitter: number of splits:1
24/01/19 03:10:42 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1705630852284_0001
24/01/19 03:10:43 INFO impl.YarnClientImpl: Submitted application application_1705630852284_0001
24/01/19 03:10:43 INFO mapreduce.Job: The url to track the job: http://hadoop.bigdata.cn:8088/proxy/application_1705630852284_0001/
24/01/19 03:10:43 INFO mapreduce.Job: Running job: job_1705630852284_0001
24/01/19 03:10:51 INFO mapreduce.Job: Job job_1705630852284_0001 running in uber mode : true
24/01/19 03:10:51 INFO mapreduce.Job:  map 0% reduce 0%
24/01/19 03:10:53 INFO mapreduce.Job:  map 100% reduce 0%
24/01/19 03:10:54 INFO mapreduce.Job: Job job_1705630852284_0001 completed successfully
24/01/19 03:10:54 INFO mapreduce.Job: Counters: 32
	File System Counters
		FILE: Number of bytes read=0
		FILE: Number of bytes written=0
		FILE: Number of read operations=0
		FILE: Number of large read operations=0
		FILE: Number of write operations=0
		HDFS: Number of bytes read=100
		HDFS: Number of bytes written=2589290
		HDFS: Number of read operations=140
		HDFS: Number of large read operations=0
		HDFS: Number of write operations=5
	Job Counters 
		Launched map tasks=1
		Other local map tasks=1
		Total time spent by all maps in occupied slots (ms)=4686
		Total time spent by all reduces in occupied slots (ms)=0
		TOTAL_LAUNCHED_UBERTASKS=1
		NUM_UBER_SUBMAPS=1
		Total time spent by all map tasks (ms)=2343
		Total vcore-seconds taken by all map tasks=2343
		Total megabyte-seconds taken by all map tasks=2399232
	Map-Reduce Framework
		Map input records=47562
		Map output records=47562
		Input split bytes=87
		Spilled Records=0
		Failed Shuffles=0
		Merged Map outputs=0
		GC time elapsed (ms)=227
		CPU time spent (ms)=3230
		Physical memory (bytes) snapshot=446881792
		Virtual memory (bytes) snapshot=2945511424
		Total committed heap usage (bytes)=409468928
	File Input Format Counters 
		Bytes Read=0
	File Output Format Counters 
		Bytes Written=2445199
24/01/19 03:10:54 INFO mapreduce.ImportJobBase: Transferred 2.4693 MB in 14.6385 seconds (172.7365 KB/sec)
24/01/19 03:10:54 INFO mapreduce.ImportJobBase: Retrieved 47562 records.
+ read tbname
+ sqoop import -Dmapreduce.job.user.classpath.first=true --connect jdbc:oracle:thin:@oracle.bigdata.cn:1521:helowin --username ciss --password 123456 --table CISS4.CISS_BASE_BASEINFO --delete-target-dir --target-dir /test/full_imp/CISS4.CISS_BASE_BASEINFO --as-avrodatafile --fields-terminated-by '\001' -m 1
Warning: /opt/sqoop/../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /opt/sqoop/../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /opt/sqoop/../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /opt/sqoop/../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
24/01/19 03:10:55 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7
24/01/19 03:10:55 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.
24/01/19 03:10:55 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.
24/01/19 03:10:55 INFO manager.SqlManager: Using default fetchSize of 1000
24/01/19 03:10:55 INFO tool.CodeGenTool: Beginning code generation
24/01/19 03:10:55 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:55 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_BASEINFO t WHERE 1=0
24/01/19 03:10:55 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /opt/hadoop-2.7.0
Note: /tmp/sqoop-root/compile/b90ebd17cdcfc15b8c8bb01b003b0ffc/CISS4_CISS_BASE_BASEINFO.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
24/01/19 03:10:56 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/b90ebd17cdcfc15b8c8bb01b003b0ffc/CISS4.CISS_BASE_BASEINFO.jar
24/01/19 03:10:57 INFO tool.ImportTool: Destination directory /test/full_imp/CISS4.CISS_BASE_BASEINFO is not present, hence not deleting.
24/01/19 03:10:57 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:57 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:57 INFO mapreduce.ImportJobBase: Beginning import of CISS4.CISS_BASE_BASEINFO
24/01/19 03:10:57 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar
24/01/19 03:10:57 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:57 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:10:57 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_BASEINFO t WHERE 1=0
24/01/19 03:10:57 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_BASEINFO t WHERE 1=0
24/01/19 03:10:57 INFO mapreduce.DataDrivenImportJob: Writing Avro schema file: /tmp/sqoop-root/compile/b90ebd17cdcfc15b8c8bb01b003b0ffc/CISS4_CISS_BASE_BASEINFO.avsc
24/01/19 03:10:57 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps
24/01/19 03:10:57 INFO client.RMProxy: Connecting to ResourceManager at hadoop.bigdata.cn/172.33.0.121:8032
24/01/19 03:11:00 INFO db.DBInputFormat: Using read commited transaction isolation
24/01/19 03:11:00 INFO mapreduce.JobSubmitter: number of splits:1
24/01/19 03:11:01 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1705630852284_0002
24/01/19 03:11:01 INFO impl.YarnClientImpl: Submitted application application_1705630852284_0002
24/01/19 03:11:01 INFO mapreduce.Job: The url to track the job: http://hadoop.bigdata.cn:8088/proxy/application_1705630852284_0002/
24/01/19 03:11:01 INFO mapreduce.Job: Running job: job_1705630852284_0002
24/01/19 03:11:06 INFO mapreduce.Job: Job job_1705630852284_0002 running in uber mode : true
24/01/19 03:11:06 INFO mapreduce.Job:  map 100% reduce 0%
24/01/19 03:11:08 INFO mapreduce.Job: Job job_1705630852284_0002 completed successfully
24/01/19 03:11:08 INFO mapreduce.Job: Counters: 32
	File System Counters
		FILE: Number of bytes read=0
		FILE: Number of bytes written=0
		FILE: Number of read operations=0
		FILE: Number of large read operations=0
		FILE: Number of write operations=0
		HDFS: Number of bytes read=100
		HDFS: Number of bytes written=150243
		HDFS: Number of read operations=140
		HDFS: Number of large read operations=0
		HDFS: Number of write operations=5
	Job Counters 
		Launched map tasks=1
		Other local map tasks=1
		Total time spent by all maps in occupied slots (ms)=2716
		Total time spent by all reduces in occupied slots (ms)=0
		TOTAL_LAUNCHED_UBERTASKS=1
		NUM_UBER_SUBMAPS=1
		Total time spent by all map tasks (ms)=1358
		Total vcore-seconds taken by all map tasks=1358
		Total megabyte-seconds taken by all map tasks=1390592
	Map-Reduce Framework
		Map input records=6
		Map output records=6
		Input split bytes=87
		Spilled Records=0
		Failed Shuffles=0
		Merged Map outputs=0
		GC time elapsed (ms)=39
		CPU time spent (ms)=620
		Physical memory (bytes) snapshot=319287296
		Virtual memory (bytes) snapshot=2946318336
		Total committed heap usage (bytes)=267386880
	File Input Format Counters 
		Bytes Read=0
	File Output Format Counters 
		Bytes Written=3963
24/01/19 03:11:08 INFO mapreduce.ImportJobBase: Transferred 146.7217 KB in 10.8773 seconds (13.4888 KB/sec)
24/01/19 03:11:08 INFO mapreduce.ImportJobBase: Retrieved 6 records.
+ read tbname
+ sqoop import -Dmapreduce.job.user.classpath.first=true --connect jdbc:oracle:thin:@oracle.bigdata.cn:1521:helowin --username ciss --password 123456 --table CISS4.CISS_BASE_CSP --delete-target-dir --target-dir /test/full_imp/CISS4.CISS_BASE_CSP --as-avrodatafile --fields-terminated-by '\001' -m 1
Warning: /opt/sqoop/../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /opt/sqoop/../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /opt/sqoop/../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /opt/sqoop/../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
24/01/19 03:11:09 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7
24/01/19 03:11:09 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.
24/01/19 03:11:09 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.
24/01/19 03:11:09 INFO manager.SqlManager: Using default fetchSize of 1000
24/01/19 03:11:09 INFO tool.CodeGenTool: Beginning code generation
24/01/19 03:11:09 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:09 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_CSP t WHERE 1=0
24/01/19 03:11:09 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /opt/hadoop-2.7.0
Note: /tmp/sqoop-root/compile/62e8e019f95c72a854a9141cb3690dc5/CISS4_CISS_BASE_CSP.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
24/01/19 03:11:10 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/62e8e019f95c72a854a9141cb3690dc5/CISS4.CISS_BASE_CSP.jar
24/01/19 03:11:11 INFO tool.ImportTool: Destination directory /test/full_imp/CISS4.CISS_BASE_CSP is not present, hence not deleting.
24/01/19 03:11:11 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:11 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:11 INFO mapreduce.ImportJobBase: Beginning import of CISS4.CISS_BASE_CSP
24/01/19 03:11:11 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar
24/01/19 03:11:11 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:11 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:11 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_CSP t WHERE 1=0
24/01/19 03:11:11 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_CSP t WHERE 1=0
24/01/19 03:11:11 INFO mapreduce.DataDrivenImportJob: Writing Avro schema file: /tmp/sqoop-root/compile/62e8e019f95c72a854a9141cb3690dc5/CISS4_CISS_BASE_CSP.avsc
24/01/19 03:11:11 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps
24/01/19 03:11:11 INFO client.RMProxy: Connecting to ResourceManager at hadoop.bigdata.cn/172.33.0.121:8032
24/01/19 03:11:13 INFO db.DBInputFormat: Using read commited transaction isolation
24/01/19 03:11:13 INFO mapreduce.JobSubmitter: number of splits:1
24/01/19 03:11:13 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1705630852284_0003
24/01/19 03:11:14 INFO impl.YarnClientImpl: Submitted application application_1705630852284_0003
24/01/19 03:11:14 INFO mapreduce.Job: The url to track the job: http://hadoop.bigdata.cn:8088/proxy/application_1705630852284_0003/
24/01/19 03:11:14 INFO mapreduce.Job: Running job: job_1705630852284_0003
24/01/19 03:11:19 INFO mapreduce.Job: Job job_1705630852284_0003 running in uber mode : true
24/01/19 03:11:19 INFO mapreduce.Job:  map 0% reduce 0%
24/01/19 03:11:21 INFO mapreduce.Job:  map 100% reduce 0%
24/01/19 03:11:21 INFO mapreduce.Job: Job job_1705630852284_0003 completed successfully
24/01/19 03:11:21 INFO mapreduce.Job: Counters: 32
	File System Counters
		FILE: Number of bytes read=0
		FILE: Number of bytes written=0
		FILE: Number of read operations=0
		FILE: Number of large read operations=0
		FILE: Number of write operations=0
		HDFS: Number of bytes read=100
		HDFS: Number of bytes written=179031
		HDFS: Number of read operations=140
		HDFS: Number of large read operations=0
		HDFS: Number of write operations=5
	Job Counters 
		Launched map tasks=1
		Other local map tasks=1
		Total time spent by all maps in occupied slots (ms)=2748
		Total time spent by all reduces in occupied slots (ms)=0
		TOTAL_LAUNCHED_UBERTASKS=1
		NUM_UBER_SUBMAPS=1
		Total time spent by all map tasks (ms)=1374
		Total vcore-seconds taken by all map tasks=1374
		Total megabyte-seconds taken by all map tasks=1406976
	Map-Reduce Framework
		Map input records=151
		Map output records=151
		Input split bytes=87
		Spilled Records=0
		Failed Shuffles=0
		Merged Map outputs=0
		GC time elapsed (ms)=66
		CPU time spent (ms)=770
		Physical memory (bytes) snapshot=334139392
		Virtual memory (bytes) snapshot=2937131008
		Total committed heap usage (bytes)=282066944
	File Input Format Counters 
		Bytes Read=0
	File Output Format Counters 
		Bytes Written=33401
24/01/19 03:11:21 INFO mapreduce.ImportJobBase: Transferred 174.835 KB in 10.0538 seconds (17.3899 KB/sec)
24/01/19 03:11:21 INFO mapreduce.ImportJobBase: Retrieved 151 records.
+ read tbname
+ sqoop import -Dmapreduce.job.user.classpath.first=true --connect jdbc:oracle:thin:@oracle.bigdata.cn:1521:helowin --username ciss --password 123456 --table CISS4.CISS_BASE_CUSTOMER --delete-target-dir --target-dir /test/full_imp/CISS4.CISS_BASE_CUSTOMER --as-avrodatafile --fields-terminated-by '\001' -m 1
Warning: /opt/sqoop/../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /opt/sqoop/../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /opt/sqoop/../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /opt/sqoop/../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
24/01/19 03:11:22 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7
24/01/19 03:11:22 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.
24/01/19 03:11:22 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.
24/01/19 03:11:22 INFO manager.SqlManager: Using default fetchSize of 1000
24/01/19 03:11:22 INFO tool.CodeGenTool: Beginning code generation
24/01/19 03:11:22 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:22 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_CUSTOMER t WHERE 1=0
24/01/19 03:11:22 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /opt/hadoop-2.7.0
Note: /tmp/sqoop-root/compile/3a5b0e5cf62b5bf57b895279e6bc77b3/CISS4_CISS_BASE_CUSTOMER.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
24/01/19 03:11:23 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/3a5b0e5cf62b5bf57b895279e6bc77b3/CISS4.CISS_BASE_CUSTOMER.jar
24/01/19 03:11:23 INFO tool.ImportTool: Destination directory /test/full_imp/CISS4.CISS_BASE_CUSTOMER is not present, hence not deleting.
24/01/19 03:11:23 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:23 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:23 INFO mapreduce.ImportJobBase: Beginning import of CISS4.CISS_BASE_CUSTOMER
24/01/19 03:11:23 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar
24/01/19 03:11:23 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:23 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:23 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_CUSTOMER t WHERE 1=0
24/01/19 03:11:23 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_CUSTOMER t WHERE 1=0
24/01/19 03:11:24 INFO mapreduce.DataDrivenImportJob: Writing Avro schema file: /tmp/sqoop-root/compile/3a5b0e5cf62b5bf57b895279e6bc77b3/CISS4_CISS_BASE_CUSTOMER.avsc
24/01/19 03:11:24 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps
24/01/19 03:11:24 INFO client.RMProxy: Connecting to ResourceManager at hadoop.bigdata.cn/172.33.0.121:8032
24/01/19 03:11:26 INFO db.DBInputFormat: Using read commited transaction isolation
24/01/19 03:11:26 INFO mapreduce.JobSubmitter: number of splits:1
24/01/19 03:11:26 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1705630852284_0004
24/01/19 03:11:26 INFO impl.YarnClientImpl: Submitted application application_1705630852284_0004
24/01/19 03:11:26 INFO mapreduce.Job: The url to track the job: http://hadoop.bigdata.cn:8088/proxy/application_1705630852284_0004/
24/01/19 03:11:26 INFO mapreduce.Job: Running job: job_1705630852284_0004
24/01/19 03:11:32 INFO mapreduce.Job: Job job_1705630852284_0004 running in uber mode : true
24/01/19 03:11:32 INFO mapreduce.Job:  map 0% reduce 0%
24/01/19 03:11:34 INFO mapreduce.Job:  map 100% reduce 0%
24/01/19 03:11:35 INFO mapreduce.Job: Job job_1705630852284_0004 completed successfully
24/01/19 03:11:35 INFO mapreduce.Job: Counters: 32
	File System Counters
		FILE: Number of bytes read=0
		FILE: Number of bytes written=0
		FILE: Number of read operations=0
		FILE: Number of large read operations=0
		FILE: Number of write operations=0
		HDFS: Number of bytes read=100
		HDFS: Number of bytes written=730311
		HDFS: Number of read operations=140
		HDFS: Number of large read operations=0
		HDFS: Number of write operations=5
	Job Counters 
		Launched map tasks=1
		Other local map tasks=1
		Total time spent by all maps in occupied slots (ms)=3968
		Total time spent by all reduces in occupied slots (ms)=0
		TOTAL_LAUNCHED_UBERTASKS=1
		NUM_UBER_SUBMAPS=1
		Total time spent by all map tasks (ms)=1984
		Total vcore-seconds taken by all map tasks=1984
		Total megabyte-seconds taken by all map tasks=2031616
	Map-Reduce Framework
		Map input records=8178
		Map output records=8178
		Input split bytes=87
		Spilled Records=0
		Failed Shuffles=0
		Merged Map outputs=0
		GC time elapsed (ms)=85
		CPU time spent (ms)=1300
		Physical memory (bytes) snapshot=341499904
		Virtual memory (bytes) snapshot=2946318336
		Total committed heap usage (bytes)=284688384
	File Input Format Counters 
		Bytes Read=0
	File Output Format Counters 
		Bytes Written=586186
24/01/19 03:11:35 INFO mapreduce.ImportJobBase: Transferred 713.1943 KB in 11.064 seconds (64.4605 KB/sec)
24/01/19 03:11:35 INFO mapreduce.ImportJobBase: Retrieved 8178 records.
+ read tbname
+ sqoop import -Dmapreduce.job.user.classpath.first=true --connect jdbc:oracle:thin:@oracle.bigdata.cn:1521:helowin --username ciss --password 123456 --table CISS4.CISS_BASE_DEVICE --delete-target-dir --target-dir /test/full_imp/CISS4.CISS_BASE_DEVICE --as-avrodatafile --fields-terminated-by '\001' -m 1
Warning: /opt/sqoop/../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /opt/sqoop/../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /opt/sqoop/../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /opt/sqoop/../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
24/01/19 03:11:35 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7
24/01/19 03:11:35 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.
24/01/19 03:11:36 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.
24/01/19 03:11:36 INFO manager.SqlManager: Using default fetchSize of 1000
24/01/19 03:11:36 INFO tool.CodeGenTool: Beginning code generation
24/01/19 03:11:36 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:36 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_DEVICE t WHERE 1=0
24/01/19 03:11:36 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is /opt/hadoop-2.7.0
Note: /tmp/sqoop-root/compile/ed4e22f2ffc2f997cf5a809877307c6f/CISS4_CISS_BASE_DEVICE.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
24/01/19 03:11:37 INFO orm.CompilationManager: Writing jar file: /tmp/sqoop-root/compile/ed4e22f2ffc2f997cf5a809877307c6f/CISS4.CISS_BASE_DEVICE.jar
24/01/19 03:11:37 INFO tool.ImportTool: Destination directory /test/full_imp/CISS4.CISS_BASE_DEVICE is not present, hence not deleting.
24/01/19 03:11:37 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:37 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:37 INFO mapreduce.ImportJobBase: Beginning import of CISS4.CISS_BASE_DEVICE
24/01/19 03:11:37 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar
24/01/19 03:11:37 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:37 INFO manager.OracleManager: Time zone has been set to GMT
24/01/19 03:11:37 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_DEVICE t WHERE 1=0
24/01/19 03:11:37 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM CISS4.CISS_BASE_DEVICE t WHERE 1=0
24/01/19 03:11:37 INFO mapreduce.DataDrivenImportJob: Writing Avro schema file: /tmp/sqoop-root/compile/ed4e22f2ffc2f997cf5a809877307c6f/CISS4_CISS_BASE_DEVICE.avsc
24/01/19 03:11:37 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps
24/01/19 03:11:37 INFO client.RMProxy: Connecting to ResourceManager at hadoop.bigdata.cn/172.33.0.121:8032
24/01/19 03:11:40 INFO db.DBInputFormat: Using read commited transaction isolation
24/01/19 03:11:40 INFO mapreduce.JobSubmitter: number of splits:1
24/01/19 03:11:40 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1705630852284_0005
24/01/19 03:11:40 INFO impl.YarnClientImpl: Submitted application application_1705630852284_0005
24/01/19 03:11:40 INFO mapreduce.Job: The url to track the job: http://hadoop.bigdata.cn:8088/proxy/application_1705630852284_0005/
24/01/19 03:11:40 INFO mapreduce.Job: Running job: job_1705630852284_0005
24/01/19 03:11:46 INFO mapreduce.Job: Job job_1705630852284_0005 running in uber mode : true
24/01/19 03:11:46 INFO mapreduce.Job:  map 0% reduce 0%
24/01/19 03:11:48 INFO mapreduce.Job:  map 100% reduce 0%
24/01/19 03:11:48 INFO mapreduce.Job: Job job_1705630852284_0005 completed successfully
24/01/19 03:11:48 INFO mapreduce.Job: Counters: 32
	File System Counters
		FILE: Number of bytes read=0
		FILE: Number of bytes written=0
		FILE: Number of read operations=0
		FILE: Number of large read operations=0
		FILE: Number of write operations=0
		HDFS: Number of bytes read=100
		HDFS: Number of bytes written=2097767
		HDFS: Number of read operations=140
		HDFS: Number of large read operations=0
		HDFS: Number of write operations=5
	Job Counters 
		Launched map tasks=1
		Other local map tasks=1
		Total time spent by all maps in occupied slots (ms)=3240
		Total time spent by all reduces in occupied slots (ms)=0
		TOTAL_LAUNCHED_UBERTASKS=1
		NUM_UBER_SUBMAPS=1
		Total time spent by all map tasks (ms)=1620
		Total vcore-seconds taken by all map tasks=1620
		Total megabyte-seconds taken by all map tasks=1658880
	Map-Reduce Framework
		Map input records=15517
		Map output records=15517
		Input split bytes=87
		Spilled Records=0
		Failed Shuffles=0
		Merged Map outputs=0
		GC time elapsed (ms)=52
		CPU time spent (ms)=1520
		Physical memory (bytes) snapshot=347426816
		Virtual memory (bytes) snapshot=2945314816
		Total committed heap usage (bytes)=287834112
	File Input Format Counters 
		Bytes Read=0
	File Output Format Counters 
		Bytes Written=1953859
24/01/19 03:11:48 INFO mapreduce.ImportJobBase: Transferred 2.0006 MB in 10.3504 seconds (197.9248 KB/sec)
24/01/19 03:11:48 INFO mapreduce.ImportJobBase: Retrieved 15517 records.
+ read tbname
  • 查看hdfs

posted @ 2024-01-18 08:49  DogLeftover  阅读(10)  评论(0编辑  收藏  举报