java执行hive命令或者脚本
java执行脚本
import java.io.*; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; /** * @Auther:liDeHui * @Date: 2019/7/11 14:23 * @Description:TODO * String start_time,String end_time,String current_lac,String result,String call_type,String date,String id * @Version:1.0 */ public class TestShell2 { private static final String basePath = "/data"; // 记录Shell执行状况的日志文件的位置(绝对路径) private static final String executeShellLogFile = basePath + "executeShell.log"; // 发送文件到Kondor系统的Shell的文件名(绝对路径) private static final String sendKondorShellName = basePath + "a.sh"; public int executeShell(String shellCommand) throws IOException { System.out.println("shellCommand:"+shellCommand); int success = 0; StringBuffer stringBuffer = new StringBuffer(); BufferedReader bufferedReader = null; // 格式化日期时间,记录日志时使用 DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS "); try { stringBuffer.append(dateFormat.format(new Date())) .append("准备执行Shell命令 ").append(shellCommand) .append(" \r\n"); Process pid = null; //String[] cmd = { "/bin/sh", "-c", shellCommand }; //给shell传递参数 String[] cmd = { "/bin/sh", "-c", shellCommand+" 20190628000006"+" 20190628010558"+" 22397"+ " 0"+" 0"+" 2019-06-28"+" 3" }; // 执行Shell命令 pid = Runtime.getRuntime().exec(cmd); if (pid != null) { stringBuffer.append("进程号:").append(pid.toString()) .append("\r\n"); // bufferedReader用于读取Shell的输出内容 bufferedReader = new BufferedReader(new InputStreamReader(pid.getInputStream()), 1024); pid.waitFor(); } else { stringBuffer.append("没有pid\r\n"); } stringBuffer.append(dateFormat.format(new Date())).append( "Shell命令执行完毕\r\n执行结果为:\r\n"); String line = null; // 读取Shell的输出内容,并添加到stringBuffer中 while (bufferedReader != null && (line = bufferedReader.readLine()) != null) { stringBuffer.append(line).append("\r\n"); } System.out.println("stringBuffer:"+stringBuffer); } catch (Exception ioe) { stringBuffer.append("执行Shell命令时发生异常:\r\n").append(ioe.getMessage()) .append("\r\n"); } finally { if (bufferedReader != null) { OutputStreamWriter outputStreamWriter = null; try { bufferedReader.close(); // 将Shell的执行情况输出到日志文件中 OutputStream outputStream = new FileOutputStream(executeShellLogFile); outputStreamWriter = new OutputStreamWriter(outputStream, "UTF-8"); outputStreamWriter.write(stringBuffer.toString()); System.out.println("stringBuffer.toString():"+stringBuffer.toString()); } catch (Exception e) { e.printStackTrace(); } finally { outputStreamWriter.close(); } } success = 1; } return success; } public static void main(String[] args) { try { new TestShell2().executeShell(sendKondorShellName); } catch (IOException e) { e.printStackTrace(); } } }
java执行hive相关命令
import java.io.*; import java.util.ArrayList; import java.util.List; /** * @Auther:liDeHui * @Date: 2019/7/11 14:23 * @Description:TODO String start_time,String end_time,String current_lac,String * result,String call_type,String date,String id @Version:1.0 * */ public class TestShell3 { public static void executeShell(String tableName,String sliceTime,String hdfsPath) throws IOException, InterruptedException { List<String> command1 = new ArrayList<String>(); List<String> command2 = new ArrayList<String>(); command1.add("beeline"); command1.add("-e"); command1.add("alter table default."+tableName+" add partition(slicetime="+sliceTime+") location '"+hdfsPath+"'"); command2.add("beeline"); command2.add("-e"); command2.add("alter table default."+tableName+" drop partition(slicetime="+sliceTime+")"); // command1.add("select * from wxwzzx_hive_db.DIM_CFG_LTE_FLAG where type_n='newest' limit 10"); ProcessBuilder hiveProcessBuilder = new ProcessBuilder(command2); Process hiveProcess = hiveProcessBuilder.start(); int status = hiveProcess.waitFor(); System.out.println("drop paritions status:" + status); ProcessBuilder hiveProcessBuilder1 = new ProcessBuilder(command1); Process hiveProcess1 = hiveProcessBuilder1.start(); int status1 = hiveProcess1.waitFor(); System.out.println("add paritions status:" + status1); } /* * alter table O_NRM_ZJ add partition(slicetime=20190808000000) location 'hdfs://ns2/jc_wxwzzx/o_data_dir/kaitong/NRM_ZJ/NRM_ZJ/20190808000000'; */ public static void main(String[] args) throws Exception { String tableName = "O_AAA_ZJ"; String sliceTime = "20190808000000"; String hdfsPath = "hdfs://NRM_ZJ/NRM_ZJ/20190808000000"; executeShell(tableName,sliceTime,hdfsPath); } }