king-base

1、kingbase报错
com.kingbase8.util.KSQLException: The authentication type 10 is not supported. Check that you have configured the sys_hba.conf file to include the client's IP address or subnet,
and that it is using an authentication scheme supported by the driver.
https://blog.csdn.net/arthemis_14/article/details/124663105
2、kinbase批量插入
https://blog.csdn.net/C3245073527/article/details/122071045
3、https://www.cnblogs.com/Ao0216/p/15963495.html Spark读取Hive数据的方式
4、https://www.csdn.net/tags/NtzaAg1sODI1MC1ibG9n.html spark设置运行用户
5、https://blog.csdn.net/qq_21159963/article/details/103457752 spark jdbc操作
6、https://blog.csdn.net/fengzheku/article/details/50688044 java.lang.NoSuchMethodError: com.google.common.collect.Sets.newConcurrentHashSet()异常解决思路

 

sqoop list-tables \
--driver com.kingbase8.Driver \
--connect jdbc:kingbase8://ip:54321/bigdata \
--username system \
--password SYSTEM


Error: java.lang.RuntimeException: java.lang.RuntimeException: com.kingbase8.util.KSQLException:
������������: ������ "client_encoding" ������������: "ANSI_X3.4-1968"


sqoop import \
--driver com.kingbase8.Driver \
--connect jdbc:kingbase8://ip:54321/bigdata \
--username system \
--password SYSTEM \
--table flow_conf \
--hive-import \
--create-hive-table \
--hive-table default.flow_conf \
-m 1

 

su hdfs;
hdfs dfs -ls /user/root/cds_alarm_dispose_flow_conf;

export HADOOP_USER_NAME=hdfs;
sqoop import \
--driver com.kingbase8.Driver \
--connect "jdbc:kingbase8://ip:54321/bigdata?useUnicode=true&characterEncoding=gbk" \
--username system \
--password SYSTEM \
--table cds_alarm_dispose_flow_conf \
--validate \
--hive-import \
--hive-table default.cds_alarm_dispose_flow_conf \
--hive-overwrite \
--delete-target-dir \
--as-parquetfile \
--hive-delims-replacement ' ' \
--fields-terminated-by '\t' \
--lines-terminated-by '\n' \
--null-string '\\N' \
--null-non-string '\\N' \
-m 1

--

export HADOOP_USER_NAME=hdfs
sqoop import \
--connect jdbc:mysql://ip2:3306/device_db \
--username bigdata \
--password Bigdata@sunward \
--query 'SELECT device_id,device_type_num_id,device_num,device_data_update_time FROM device WHERE $CONDITIONS' \
--target-dir /user/hdfs/device \
--hive-import \
--hive-database shxypt \
--hive-table device \
--hive-overwrite \
--delete-target-dir \
--as-parquetfile \
--hive-delims-replacement ' ' \
--fields-terminated-by '\t' \
--lines-terminated-by '\n' \
--null-string '\\N' \
--null-non-string '\\N' \
-m 1

sqoop export \
--connect "jdbc:postgresql://ip:54321/test?allowEncodingChanges=true&clientEncoding=utf8" \
--username system \
--password root1234 \
--table t1 \
--num-mappers 1 \
--export-dir /user/hive/warehouse/testa \
--input-fields-terminated-by ","

export HADOOP_USER_NAME=hdfs;
sqoop import \
--driver com.kingbase8.Driver \
--connect "jdbc:kingbase8://ip:54321/bigdata?allowEncodingChanges=true&clientEncoding=utf8" \
--username system \
--password SYSTEM \
--table cds_alarm_dispose_flow_conf \
--validate \
--hive-import \
--hive-table default.cds_alarm_dispose_flow_conf \
--hive-overwrite \
--delete-target-dir \
--as-parquetfile \
--hive-delims-replacement ' ' \
--fields-terminated-by '\t' \
--lines-terminated-by '\n' \
--null-string '\\N' \
--null-non-string '\\N' \
-m 1

posted @ 2023-01-04 19:42  所向披靡zz  阅读(977)  评论(0编辑  收藏  举报