sqoop报错ERROR bonecp.BoneCP: Unable to start/stop JMX java.security.AccessControlException: access denied ("javax.management.MBeanTrustPermission" "register")
21/01/19 18:12:31 WARN metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value checked . Setting it to value ignored 21/01/19 18:12:31 INFO metastore.ObjectStore: ObjectStore, initialize called 21/01/19 18:12:31 INFO DataNucleus.Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored 21/01/19 18:12:31 INFO DataNucleus.Persistence: Property datanucleus.cache.level2 unknown - will be ignored 21/01/19 18:12:31 INFO DataNucleus.Persistence: Property datanucleus.fixedDatastore unknown - will be ignored 21/01/19 18:12:31 ERROR bonecp.BoneCP: Unable to start/stop JMX java.security.AccessControlException: access denied ("javax.management.MBeanTrustPermission" "register") at java.security.AccessControlContext.checkPermission(AccessControlContext.java:372) at java.lang.SecurityManager.checkPermission(SecurityManager.java:585) at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.checkMBeanTrustPermission(DefaultMBeanServerInterceptor.java:1848) at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:322) at com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522) at com.jolbox.bonecp.BoneCP.registerUnregisterJMX(BoneCP.java:528) at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:500) at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120) at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483) at org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:296) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:526) at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606) at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301) at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133) at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:420) at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:821) at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:338) at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:217) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965) at java.security.AccessController.doPrivileged(Native Method) at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960) at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166) at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808) at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701) at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:515) at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:544) at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:399) at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:336) at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:297) at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:58) at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:599) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:564) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:626) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:416) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84) at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6490) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:238) at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:526) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1652) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:80) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:130) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:101) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3367) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3406) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3386) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3640) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:221) at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:366) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:310) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:290) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:266) at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:558) at org.apache.hadoop.hive.ql.session.SessionState.beginStart(SessionState.java:531) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:705) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:641) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:333) at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:240) at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:514) at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605) at org.apache.sqoop.Sqoop.run(Sqoop.java:143) at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70) at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:179) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:218) at org.apache.sqoop.Sqoop.runTool(Sqoop.java:227) at org.apache.sqoop.Sqoop.main(Sqoop.java:236) 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.key.prefix.reserve.length does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.enforce.bucketing does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.jdbcdriver does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.retries.max does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.collect.rawdatasize does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.plan.serialization.format does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.mapred.supports.subdirectories does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.enforce.sorting does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.key.prefix.max.length does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.hadoop.supports.splittable.combineinputformat does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.map.groupby.sorted.testmode does not exist 21/01/19 18:12:32 WARN conf.HiveConf: HiveConf of name hive.stats.dbconnectionstring does not exist 21/01/19 18:12:32 INFO metastore.ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order" 21/01/19 18:12:34 ERROR bonecp.BoneCP: Unable to start/stop JMX
解决方案(从mysql导出到hive):
注:虽然报错,但是不影响数据的准确性
java 的安全策略问题:找到jre包, /Java/jre/lib/security 包下面的 java.policy 文件,编辑加上如下代码:
permission javax.management.MBeanTrustPermission "register";
posted on 2021-01-19 18:44 RICH-ATONE 阅读(848) 评论(0) 编辑 收藏 举报