Hive之间使用5.1的kettle-core包交换
目录
前言
在这个博客中https://blog.csdn.net/lw18751836671/article/details/119794613?spm=1001.2014.3001.5501,
我误以为下载的是5.1的源码,没想到是9.3的源码,5.1的源码的core模块如何下载打包看这里,
https://blog.csdn.net/lw18751836671/article/details/119816722
core的源码修改
在org.pentaho.di.core.row.value.ValueMetaBase中找到isSigned方法,应该有两处,将这两处try-catch一下就行,和9.3中源码处理方式一样即可。
如果不处理,那么就是报 Method not Support
当然引用的包要换成修改的core的jar包,engine我没有成功打包,所以还是用的前人留下来的,
<dependency>
<groupId>pentaho-kettle</groupId>
<artifactId>kettle-engine</artifactId>
<!--<version>${kettle.version}</version>-->
<version>5.1.0.0-752</version>
</dependency>
<dependency>
<groupId>pentaho-kettle</groupId>
<artifactId>kettle-core</artifactId>
<version>5.1.0.0-753</version>
</dependency>
运行一下hive到Hive交换的单元测试类,就可以了,
单元测试类代码:
@Before
public void before() {
try {
// 初始化Kettle环境
KettleEnvironment.init();
EnvUtil.environmentInit();
} catch (KettleException e) {
e.printStackTrace();
}
}
/**
* hive之间的交换
* @throws KettleException
*/
@Test
public void exchangeHive2Hive() throws KettleException{
//源数据库连接
String hive_src = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
"<connection>" +
"<name>hive_src</name>" +
"<server>192.168.10.212</server>" +
"<type>HIVE2</type>" +
"<access>Native</access>" +
"<database>ntzw_dev_64</database>" +
"<port>10000</port>" +
"<username>hadoop</username>" +
"<password>hadoop</password>" +
"</connection>";
//目标数据库连接
String hive_dest = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
"<connection>" +
"<name>hive_dest</name>" +
"<server>192.168.10.212</server>" +
"<type>HIVE2</type>" +
"<access>Native</access>" +
"<database>ntzw_dev_64</database>" +
"<port>10000</port>" +
"<username>hadoop</username>" +
"<password>hadoop</password>" +
"</connection>";
DatabaseMeta srcDatabaseMeta = new DatabaseMeta(hive_src);
DatabaseMeta destDatabaseMeta = new DatabaseMeta(hive_dest);
//创建转换元信息
TransMeta transMeta = new TransMeta();
transMeta.setName("hive之间的交换");
//设置源和目标
transMeta.addDatabase(srcDatabaseMeta);
transMeta.addDatabase(destDatabaseMeta);
/*
* 创建 表输入->表输出
* 同时将两个步骤连接起来
*/
PluginRegistry registry = PluginRegistry.getInstance();
TableInputMeta tableInputMeta = new TableInputMeta();
String tableInputPluginId = registry.getPluginId(StepPluginType.class,
tableInputMeta);
tableInputMeta.setDatabaseMeta(srcDatabaseMeta);
//设置查询条件
String selectSql = "select id ,name from user_info_src";
tableInputMeta.setSQL(selectSql);
StepMeta tableInputStepMeta = new StepMeta(tableInputPluginId,
"tableInput", (StepMetaInterface) tableInputMeta);
transMeta.addStep(tableInputStepMeta);
TableOutputMeta tableOutputMeta = new TableOutputMeta();
tableOutputMeta.setDatabaseMeta(destDatabaseMeta);
//设置目标表的 schema和表名
tableOutputMeta.setSchemaName(null);
tableOutputMeta.setTablename("user_info_dest");
String tableOutputPluginId = registry.getPluginId(StepPluginType.class, tableOutputMeta);
StepMeta tableOutputStep = new StepMeta(tableOutputPluginId, "tableOutput" , (StepMetaInterface) tableOutputMeta);
//将步骤添加进去
transMeta.addStep(tableOutputStep);
//将步骤和上一步关联起来
transMeta.addTransHop(new TransHopMeta(tableInputStepMeta, tableOutputStep));
Trans trans = new Trans(transMeta);
//执行转换
trans.execute(null);
//等待完成
trans.waitUntilFinished();
if (trans.getErrors() > 0) {
System.out.println("交换出错.");
return;
}
}