Hive QL模块执行过程分析
DriverState
本线程的Driver的状态机,同时包含可重入锁
/**
* Represents the driver's state. Also has mechanism for locking for the time of state transitions.
*/
public class DriverState {
// 线程安全的静态变量,此处使用为本线程的单例模式,一个线程只有一个DriverState对象
private static ThreadLocal<DriverState> tlInstance = new ThreadLocal<DriverState>() {
@Override
protected DriverState initialValue() {
return new DriverState();
}
};
public static void setDriverState(DriverState state) {
tlInstance.set(state);
}
public static DriverState getDriverState() {
return tlInstance.get();
}
public static void removeDriverState() {
tlInstance.remove();
}
/**
* Enumeration of the potential driver states.
*/
private enum State {
INITIALIZED,
COMPILING,
COMPILED,
EXECUTING,
EXECUTED,
// a state that the driver enters after close() has been called to clean the query results
// and release the resources after the query has been executed
CLOSED,
// a state that the driver enters after destroy() is called and it is the end of driver life cycle
DESTROYED,
ERROR
}
// a lock is used for synchronizing the state transition and its associated resource releases
private final ReentrantLock stateLock = new ReentrantLock();
private final AtomicBoolean aborted = new AtomicBoolean();
private State driverState = State.INITIALIZED;
.....
剩余code为调用状态转移的函数
DriverContext
/**
* Context for the procedure managed by the Driver.
*/
public class DriverContext {
// For WebUI. Kept alive after queryPlan is freed.
private final QueryDisplay queryDisplay = new QueryDisplay();
private final QueryState queryState;
private final QueryInfo queryInfo;
private final HiveConf conf;
private final HookRunner hookRunner;
// Transaction manager the Driver has been initialized with (can be null).
// If this is set then this Transaction manager will be used during query
// compilation/execution rather than using the current session's transaction manager.
// This might be needed in a situation where a Driver is nested within an already
// running Driver/query - the nested Driver requires a separate transaction manager
// so as not to conflict with the outer Driver/query which is using the session
// transaction manager.
private final HiveTxnManager initTxnManager;
// jvm级执行计划,基于执行引擎(MapReduce、Tez、Spark生成)
private QueryPlan plan;
private Schema schema;
private FetchTask fetchTask;
// Transaction manager used for the query. This will be set at compile time based on
// either initTxnMgr or from the SessionState, in that order.
private HiveTxnManager txnManager;
private TxnType txnType = TxnType.DEFAULT;
private StatsSource statsSource;
// Boolean to store information about whether valid txn list was generated
// for current query.
private boolean validTxnListsGenerated;
private CacheUsage cacheUsage;
private CacheEntry usedCacheEntry;
private ValidWriteIdList compactionWriteIds = null;
private long compactorTxnId = 0;
private boolean retrial = false;
private DataInput resStream;
// HS2 operation handle guid string
private String operationId;
public DriverContext(QueryState queryState, QueryInfo queryInfo, HookRunner hookRunner,
HiveTxnManager initTxnManager) {
this.queryState = queryState;
this.queryInfo = queryInfo;
this.conf = queryState.getConf();
this.hookRunner = hookRunner;
this.initTxnManager = initTxnManager;
}
...
}
TaskQueue
Compiler
/**
* The compiler compiles the command, by creating a QueryPlan from a String command.
* Also opens a transaction if necessary.
*/
public class Compiler {
private static final String CLASS_NAME = Driver.class.getName();
private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
private static final LogHelper CONSOLE = new LogHelper(LOG);
private final Context context;
private final DriverContext driverContext;
private final DriverState driverState;
private final PerfLogger perfLogger = SessionState.getPerfLogger();
private ASTNode tree;
public Compiler(Context context, DriverContext driverContext, DriverState driverState) {
this.context = context;
this.driverContext = driverContext;
this.driverState = driverState;
}
/**
* @param deferClose indicates if the close/destroy should be deferred when the process has been interrupted
* it should be set to true if the compile is called within another method like runInternal,
* which defers the close to the called in that method.
*/
public QueryPlan compile(String rawCommand, boolean deferClose) throws CommandProcessorException {
initialize(rawCommand);
boolean compileError = false;
boolean parsed = false;
QueryPlan plan = null;
try {
DriverUtils.checkInterrupted(driverState, driverContext, "before parsing and analysing the query", null, null);
// 语法分析
parse();
parsed = true;
// 语义分析
BaseSemanticAnalyzer sem = analyze();
DriverUtils.checkInterrupted(driverState, driverContext, "after analyzing query.", null, null);
// 生成执行计划
plan = createPlan(sem);
initializeFetchTask(plan);
authorize(sem);
explainOutput(sem, plan);
} catch (CommandProcessorException cpe) {
compileError = true;
throw cpe;
} catch (Exception e) {
compileError = true;
DriverUtils.checkInterrupted(driverState, driverContext, "during query compilation: " + e.getMessage(), null,
null);
handleException(e);
} finally {
cleanUp(compileError, parsed, deferClose);
}
return plan;
}
Driver
驱动hive任务的客户端类,主要负责编译sql和执行sql
/**
* Compiles and executes an HQL command.
*/
@Override
public CommandProcessorResponse run(String command) throws CommandProcessorException {
return run(command, false);
}
/**
* Executes a previously compiled HQL command.
*/
@Override
public CommandProcessorResponse run() throws CommandProcessorException {
return run(null, true);
}
private CommandProcessorResponse run(String command, boolean alreadyCompiled) throws CommandProcessorException {
try {
// 执行SQL的函数
runInternal(command, alreadyCompiled);
// 返回结果
return new CommandProcessorResponse(getSchema(), null);
} catch (CommandProcessorException cpe) {
// 剩余部分均为异常处理
SessionState ss = SessionState.get();
if (ss == null) {
throw cpe;
}
MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf());
if (!(mdf instanceof JsonMetaDataFormatter)) {
throw cpe;
}
/*Here we want to encode the error in machine readable way (e.g. JSON)
* Ideally, errorCode would always be set to a canonical error defined in ErrorMsg.
* In practice that is rarely the case, so the messy logic below tries to tease
* out canonical error code if it can. Exclude stack trace from output when
* the error is a specific/expected one.
* It's written to stdout for backward compatibility (WebHCat consumes it).*/
try {
if (cpe.getCause() == null) {
mdf.error(ss.out, cpe.getMessage(), cpe.getResponseCode(), cpe.getSqlState());
throw cpe;
}
ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpe.getResponseCode());
if (canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) {
/*Some HiveExceptions (e.g. SemanticException) don't set
canonical ErrorMsg explicitly, but there is logic
(e.g. #compile()) to find an appropriate canonical error and
return its code as error code. In this case we want to
preserve it for downstream code to interpret*/
mdf.error(ss.out, cpe.getMessage(), cpe.getResponseCode(), cpe.getSqlState(), null);
throw cpe;
}
if (cpe.getCause() instanceof HiveException) {
HiveException rc = (HiveException)cpe.getCause();
mdf.error(ss.out, cpe.getMessage(), rc.getCanonicalErrorMsg().getErrorCode(), cpe.getSqlState(),
rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? StringUtils.stringifyException(rc) : null);
} else {
ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(cpe.getCause().getMessage());
mdf.error(ss.out, cpe.getMessage(), canonicalMsg.getErrorCode(), cpe.getSqlState(),
StringUtils.stringifyException(cpe.getCause()));
}
} catch (HiveException ex) {
CONSOLE.printError("Unable to JSON-encode the error", StringUtils.stringifyException(ex));
}
throw cpe;
}
}
@Override
public CommandProcessorResponse compileAndRespond(String command) throws CommandProcessorException {
return compileAndRespond(command, false);
}
public CommandProcessorResponse compileAndRespond(String command, boolean cleanupTxnList)
throws CommandProcessorException {
try {
compileInternal(command, false);
return new CommandProcessorResponse(getSchema(), null);
} catch (CommandProcessorException cpe) {
throw cpe;
} finally {
if (cleanupTxnList) {
// Valid txn list might be generated for a query compiled using this command, thus we need to reset it
driverTxnHandler.cleanupTxnList();
}
}
}
public void lockAndRespond() throws CommandProcessorException {
// Assumes the query has already been compiled
if (driverContext.getPlan() == null) {
throw new IllegalStateException(
"No previously compiled query for driver - queryId=" + driverContext.getQueryState().getQueryId());
}
try {
driverTxnHandler.acquireLocksIfNeeded();
} catch (CommandProcessorException cpe) {
driverTxnHandler.rollback(cpe);
throw cpe;
}
}
编译相关
compileInternal()是执行编译生成执行计划的函数
/**
* Compiles an HQL command, creates an execution plan for it.
*
* @param deferClose indicates if the close/destroy should be deferred when the process has been interrupted, it
* should be set to true if the compile is called within another method like runInternal, which defers the
* close to the called in that method.
* @param resetTaskIds Resets taskID counter if true.
*/
@VisibleForTesting
public void compile(String command, boolean resetTaskIds, boolean deferClose) throws CommandProcessorException {
preparForCompile(resetTaskIds);
Compiler compiler = new Compiler(context, driverContext, driverState);
QueryPlan plan = compiler.compile(command, deferClose);
driverContext.setPlan(plan);
compileFinished(deferClose);
}
private void compileInternal(String command, boolean deferClose) throws CommandProcessorException {
Metrics metrics = MetricsFactory.getInstance();
if (metrics != null) {
metrics.incrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1);
}
PerfLogger perfLogger = SessionState.getPerfLogger(true);
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.WAIT_COMPILE);
try (CompileLock compileLock = CompileLockFactory.newInstance(driverContext.getConf(), command)) {
boolean success = compileLock.tryAcquire();
perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.WAIT_COMPILE);
if (metrics != null) {
metrics.decrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1);
}
if (!success) {
String errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg();
throw DriverUtils.createProcessorException(driverContext, ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode(),
errorMessage, null, null);
}
try {
compile(command, true, deferClose);
} catch (CommandProcessorException cpe) {
try {
driverTxnHandler.endTransactionAndCleanup(false);
} catch (LockException e) {
LOG.warn("Exception in releasing locks. " + StringUtils.stringifyException(e));
}
throw cpe;
}
}
//Save compile-time PerfLogging for WebUI.
//Execution-time Perf logs are done by either another thread's PerfLogger
//or a reset PerfLogger.
driverContext.getQueryDisplay().setPerfLogStarts(QueryDisplay.Phase.COMPILATION, perfLogger.getStartTimes());
driverContext.getQueryDisplay().setPerfLogEnds(QueryDisplay.Phase.COMPILATION, perfLogger.getEndTimes());
}
执行相关
run()中调用的具体执行SQL的函数
private void runInternal(String command, boolean alreadyCompiled) throws CommandProcessorException {
DriverState.setDriverState(driverState);
// 针对同一个driverState对象实例加锁
driverState.lock();
try {
if (driverContext != null && driverContext.getPlan() != null
&& driverContext.getPlan().isPrepareQuery()
&& !driverContext.getPlan().isExplain()) {
LOG.info("Skip running tasks for prepare plan");
return;
}
if (alreadyCompiled) {
if (driverState.isCompiled()) {
driverState.executing();
} else {
// alreadyCompiled和driverState.executing()状态不一致抛出异常
String errorMessage = "FAILED: Precompiled query has been cancelled or closed.";
CONSOLE.printError(errorMessage);
throw DriverUtils.createProcessorException(driverContext, 12, errorMessage, null, null);
}
} else {
// 改变driverState
driverState.compiling();
}
} finally {
// 释放锁
driverState.unlock();
}
// a flag that helps to set the correct driver state in finally block by tracking if
// the method has been returned by an error or not.
boolean isFinishedWithError = true;
try {
HiveDriverRunHookContext hookContext = new HiveDriverRunHookContextImpl(driverContext.getConf(),
alreadyCompiled ? context.getCmd() : command);
// Get all the driver run hooks and pre-execute them.
try {
driverContext.getHookRunner().runPreDriverHooks(hookContext);
} catch (Exception e) {
String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
CONSOLE.printError(errorMessage + "\n" + StringUtils.stringifyException(e));
throw DriverUtils.createProcessorException(driverContext, 12, errorMessage,
ErrorMsg.findSQLState(e.getMessage()), e);
}
if (!alreadyCompiled) {
// compile internal will automatically reset the perf logger
// 生成执行计划,放在driverContext中
compileInternal(command, true);
} else {
// Since we're reusing the compiled plan, we need to update its start time for current run
driverContext.getPlan().setQueryStartTime(driverContext.getQueryDisplay().getQueryStartTime());
}
//Reset the PerfLogger so that it doesn't retain any previous values.
// Any value from compilation phase can be obtained through the map set in queryDisplay during compilation.
PerfLogger perfLogger = SessionState.getPerfLogger(true);
// the reason that we set the txn manager for the cxt here is because each
// query has its own ctx object. The txn mgr is shared across the
// same instance of Driver, which can run multiple queries.
context.setHiveTxnManager(driverContext.getTxnManager());
DriverUtils.checkInterrupted(driverState, driverContext, "at acquiring the lock.", null, null);
lockAndRespond();
driverTxnHandler.validateTxnListState();
try {
taskQueue = new TaskQueue(context); // for canceling the query (should be bound to session?)
Executor executor = new Executor(context, driverContext, driverState, taskQueue);
executor.execute();
} catch (CommandProcessorException cpe) {
driverTxnHandler.rollback(cpe);
throw cpe;
}
//if needRequireLock is false, the release here will do nothing because there is no lock
driverTxnHandler.handleTransactionAfterExecution();
driverContext.getQueryDisplay().setPerfLogStarts(QueryDisplay.Phase.EXECUTION, perfLogger.getStartTimes());
driverContext.getQueryDisplay().setPerfLogEnds(QueryDisplay.Phase.EXECUTION, perfLogger.getEndTimes());
// Take all the driver run hooks and post-execute them.
try {
driverContext.getHookRunner().runPostDriverHooks(hookContext);
} catch (Exception e) {
String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
CONSOLE.printError(errorMessage + "\n" + StringUtils.stringifyException(e));
throw DriverUtils.createProcessorException(driverContext, 12, errorMessage,
ErrorMsg.findSQLState(e.getMessage()), e);
}
isFinishedWithError = false;
} finally {
if (driverState.isAborted()) {
closeInProcess(true);
} else {
// only release the related resources ctx, taskQueue as normal
releaseResources();
}
driverState.executionFinishedWithLocking(isFinishedWithError);
}
SessionState.getPerfLogger().cleanupPerfLogMetrics();
}
2021-12-10T00:35:41,239 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ql.Context (Context.java:getMRScratchDir(460)) - New scratch dir is hdfs://mfphadoop/tmp/hive/hive/5c6db66c-1a76-4af3-8519-ba0b309eae43/hive_2021-12-10_00-35-34_671_6780014398222947332-7171
2021-12-10T00:35:41,239 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: parse.CalcitePlanner (SemanticAnalyzer.java:genResolvedParseTree(11081)) - Completed getting MetaData in Semantic Analysis
2021-12-10T00:35:41,305 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: parse.CalcitePlanner (SemanticAnalyzer.java:getMetaData(1959)) - Get metadata for source tables
2021-12-10T00:35:41,306 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: metastore.HiveMetaStore (HiveMetaStore.java:logInfo(781)) - 35127: get_table : db=metadata tbl=channels
2021-12-10T00:35:41,306 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(309)) - ugi=hive ip=unknown-ip-addr cmd=get_table : db=metadata tbl=channels
2021-12-10T00:35:41,326 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: parse.CalcitePlanner (SemanticAnalyzer.java:getMetaData(2095)) - Get metadata for subqueries
2021-12-10T00:35:41,326 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: parse.CalcitePlanner (SemanticAnalyzer.java:getMetaData(2119)) - Get metadata for destination tables
2021-12-10T00:35:41,327 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ql.Context (Context.java:getMRScratchDir(460)) - New scratch dir is hdfs://mfphadoop/tmp/hive/hive/5c6db66c-1a76-4af3-8519-ba0b309eae43/hive_2021-12-10_00-35-34_671_6780014398222947332-7171
2021-12-10T00:35:41,328 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: common.FileUtils (FileUtils.java:mkdir(520)) - Creating directory if it doesn't exist: hdfs://mfphadoop/tmp/hive-staging/.hive_hive_2021-12-10_00-35-34_671_6780014398222947332-7171
2021-12-10T00:35:41,513 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: parse.CalcitePlanner (CalcitePlanner.java:genOPTree(409)) - CBO Succeeded; optimized logical plan.
2021-12-10T00:35:41,513 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ppd.OpProcFactory (OpProcFactory.java:process(741)) - Processing for FS(2)
2021-12-10T00:35:41,514 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ppd.OpProcFactory (OpProcFactory.java:process(741)) - Processing for SEL(1)
2021-12-10T00:35:41,514 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ppd.OpProcFactory (OpProcFactory.java:process(415)) - Processing for TS(0)
2021-12-10T00:35:41,515 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: parse.CalcitePlanner (SemanticAnalyzer.java:analyzeInternal(11276)) - Completed plan generation
2021-12-10T00:35:41,515 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ql.Driver (Driver.java:compile(518)) - Semantic Analysis Completed
2021-12-10T00:35:41,515 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ql.Driver (Driver.java:getSchema(290)) - Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:channels.channelid, type:int, comment:null), FieldSchema(name:channels.region, type:string, comment:null), FieldSchema(name:channels.countries, type:array<string>, comment:null), FieldSchema(name:channels.devicetypes, type:array<string>, comment:null), FieldSchema(name:channels.names, type:map<string,string>, comment:null), FieldSchema(name:channels.weight, type:int, comment:null), FieldSchema(name:channels.summarynames, type:map<string,string>, comment:null)], properties:null)
2021-12-10T00:35:41,516 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: exec.TableScanOperator (Operator.java:initialize(334)) - Initializing operator TS[0]
2021-12-10T00:35:41,517 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: exec.SelectOperator (Operator.java:initialize(334)) - Initializing operator SEL[1]
2021-12-10T00:35:41,517 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: exec.SelectOperator (SelectOperator.java:initializeOp(73)) - SELECT struct<channelid:int,region:string,countries:array<string>,devicetypes:array<string>,names:map<string,string>,weight:int,summarynames:map<string,string>>
2021-12-10T00:35:41,517 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: exec.ListSinkOperator (Operator.java:initialize(334)) - Initializing operator LIST_SINK[3]
2021-12-10T00:35:41,528 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: metastore.HiveMetaStore (HiveMetaStore.java:logInfo(781)) - 35127: get_table : db=metadata tbl=channels
2021-12-10T00:35:41,529 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: HiveMetaStore.audit (HiveMetaStore.java:logAuditEvent(309)) - ugi=hive ip=unknown-ip-addr cmd=get_table : db=metadata tbl=channels
2021-12-10T00:35:41,547 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: ql.Driver (Driver.java:compile(642)) - Completed compiling command(queryId=hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae); Time taken: 6.911 seconds
2021-12-10T00:35:41,548 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,548 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,549 INFO [HiveServer2-Background-Pool: Thread-417454]: lockmgr.DbTxnManager (DbTxnManager.java:acquireLocks(203)) - Setting lock request transaction to txnid:0 for queryId=hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae
2021-12-10T00:35:41,550 INFO [HiveServer2-Background-Pool: Thread-417454]: lockmgr.DbLockManager (DbLockManager.java:lock(101)) - Requesting: queryId=hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae LockRequest(component:[LockComponent(type:SHARED_READ, level:TABLE, dbname:metadata, tablename:channels, operationType:SELECT)], txnid:0, user:hive, hostname:hive-server3, agentInfo:hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae)
2021-12-10T00:35:41,560 INFO [HiveServer2-Background-Pool: Thread-417454]: lockmgr.DbLockManager (DbLockManager.java:lock(104)) - Response to queryId=hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae LockResponse(lockid:23026581, state:ACQUIRED)
2021-12-10T00:35:41,560 INFO [HiveServer2-Background-Pool: Thread-417454]: lockmgr.DbTxnManager (DbTxnManager.java:startHeartbeat(511)) - Started heartbeat with delay/interval = 150000/150000 MILLISECONDS for query: hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae
2021-12-10T00:35:41,560 INFO [HiveServer2-Background-Pool: Thread-417454]: ql.Driver (Driver.java:execute(1735)) - Executing command(queryId=hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae): select * from metadata.channels
2021-12-10T00:35:41,561 INFO [HiveServer2-Background-Pool: Thread-417454]: ql.Driver (Driver.java:execute(2050)) - Completed executing command(queryId=hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae); Time taken: 0.001 seconds
2021-12-10T00:35:41,561 INFO [HiveServer2-Background-Pool: Thread-417454]: ql.Driver (SessionState.java:printInfo(1111)) - OK
2021-12-10T00:35:41,561 INFO [HiveServer2-Background-Pool: Thread-417454]: lockmgr.DbTxnManager (DbTxnManager.java:stopHeartbeat(536)) - Stopped heartbeat for query: hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae
2021-12-10T00:35:41,562 INFO [HiveServer2-Background-Pool: Thread-417454]: lockmgr.DbLockManager (DbLockManager.java:releaseLocks(251)) - releaseLocks: [lockid:23026581 queryId=hive_20211210003534_c13479ce-ad88-4218-84ad-a827693604ae txnid:0]
2021-12-10T00:35:41,567 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,567 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,567 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,567 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,568 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,568 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,569 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,569 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,570 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,570 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,570 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,570 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,571 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,571 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,574 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: mapred.FileInputFormat (FileInputFormat.java:listStatus(259)) - Total input files to process : 1
2021-12-10T00:35:41,579 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: exec.TableScanOperator (Operator.java:close(690)) - Closing operator TS[0]
2021-12-10T00:35:41,579 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: exec.SelectOperator (Operator.java:close(690)) - Closing operator SEL[1]
2021-12-10T00:35:41,579 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: exec.ListSinkOperator (Operator.java:close(690)) - Closing operator LIST_SINK[3]
2021-12-10T00:35:41,580 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,580 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,585 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,585 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,586 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,586 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,598 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,598 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,598 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,598 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,599 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,600 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,600 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,600 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,600 INFO [HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,600 INFO [HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:updateThreadName(421)) - Updating thread name to 5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075
2021-12-10T00:35:41,600 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: operation.OperationManager (OperationManager.java:closeOperation(284)) - Closing operation: OperationHandle [opType=EXECUTE_STATEMENT, getHandleIdentifier()=e1898771-3981-4f4f-9691-550efc5a5c40]
2021-12-10T00:35:41,630 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: conf.HiveConf (HiveConf.java:getLogIdVar(3944)) - Using the default value passed in for log id: 5c6db66c-1a76-4af3-8519-ba0b309eae43
2021-12-10T00:35:41,630 INFO [5c6db66c-1a76-4af3-8519-ba0b309eae43 HiveServer2-Handler-Pool: Thread-398075]: session.SessionState (SessionState.java:resetThreadName(432)) - Resetting thread name to HiveServer2-Handler-Pool: Thread-398075