Advertisement

运行程序遇到的问题

阅读量:

py4j.protocol.Py4JJavaError: An error occurred while calling o36.load.

org.apache.spark.SparkException: Unable to create database default as failed to create its directory /user/hive/warehouse
at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.liftedTree11(InMemoryCatalog.scala:115) at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.doCreateDatabase(InMemoryCatalog.scala:109) at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.createDatabase(ExternalCatalog.scala:69) at org.apache.spark.sql.internal.SharedState.externalCataloglzycompute(SharedState.scala:117)
at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.cataloglzycompute(BaseSessionStateBuilder.scala:133) at org.apache.spark.sql.internal.BaseSessionStateBuilder.catalog(BaseSessionStateBuilder.scala:131) at org.apache.spark.sql.internal.BaseSessionStateBuilderanon1.<init>(BaseSessionStateBuilder.scala:157) at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:157) at org.apache.spark.sql.internal.BaseSessionStateBuilderanonfun$build$2.apply(BaseSessionStateBuilder.scala:293) at org.apache.spark.sql.internal.BaseSessionStateBuilderanonfunbuild2.apply(BaseSessionStateBuilder.scala:293) at org.apache.spark.sql.internal.SessionState.analyzerlzycompute(SessionState.scala:79) at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79) at org.apache.spark.sql.execution.QueryExecution.analyzedlzycompute(QueryExecution.scala:57) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47) at org.apache.spark.sql.Dataset.ofRows(Dataset.scala:74) at org.apache.spark.sql.SparkSession.baseRelationToDataFrame(SparkSession.scala:428) at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:233) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227) at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:214) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=lenovo, access=WRITE, inode="/user/hive":hive:hdfs:drwxr-xr-x at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:255) at org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizerRangerAccessControlEnforcer.checkDefaultEnforcer(RangerHdfsAuthorizer.java:589) at org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizerRangerAccessControlEnforcer.checkPermission(RangerHdfsAuthorizer.java:350) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1857) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1841) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkAncestorAccess(FSDirectory.java:1800) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:59) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3150) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1126) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:707) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtosClientNamenodeProtocol2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngineServerProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524) at org.apache.hadoop.ipc.RPCServer.call(RPC.java:1025)
at org.apache.hadoop.ipc.ServerRpcCall.run(Server.java:876) at org.apache.hadoop.ipc.ServerRpcCall.run(Server.java:822)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3002)
at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2970)
at org.apache.hadoop.hdfs.DistributedFileSystem21.doCall(DistributedFileSystem.java:1047) at org.apache.hadoop.hdfs.DistributedFileSystem21.doCall(DistributedFileSystem.java:1043)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1061)
at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1036)
at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1881)
at org.apache.spark.sql.catalyst.catalog.InMemoryCatalog.liftedTree11(InMemoryCatalog.scala:112) ... 31 more Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=lenovo, access=WRITE, inode="/user/hive":hive:hdfs:drwxr-xr-x at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:255) at org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizerRangerAccessControlEnforcer.checkDefaultEnforcer(RangerHdfsAuthorizer.java:589)
at org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizerRangerAccessControlEnforcer.checkPermission(RangerHdfsAuthorizer.java:350) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1857) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1841) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkAncestorAccess(FSDirectory.java:1800) at org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.mkdirs(FSDirMkdirOp.java:59) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3150) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:1126) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:707) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtosClientNamenodeProtocol2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngineServerProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524) at org.apache.hadoop.ipc.RPCServer.call(RPC.java:1025)
at org.apache.hadoop.ipc.ServerRpcCall.run(Server.java:876) at org.apache.hadoop.ipc.ServerRpcCall.run(Server.java:822)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)

org.apache.hadoop.ipc.Client.call(Client.java:1475)处进行了操作;
org.apache.hadoop.ipc.Client.call(Client.java:1412)处进行了操作;
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)处进行了操作;
com.sun.proxy.$Proxy23.mkdirs(Unknown Source)处进行了操作;
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:558)处进行了操作;
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)处进行了操作;
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)处进行了操作;
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)处进行了操作;
java.lang.reflect.Method.invoke(Method.java:498)处进行了操作;
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)处进行了操作;
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102>处进行了操作;
com.sun.proxy.$Proxy24.mkdirs(Unknown Source>处进行了操作;
org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3000>处进行了操作;……

通过卸载pyspark后重新安装系统,在一定程度上缓解了问题带来的困扰;然而紧接着出现了更为棘手的技术错误信息:"Java gateway process exited before sending its port number"(Java会话过程在发送端口号之前提前退出)。尽管已经花费了许多时间和精力去排查和解决问题但仍未得到满意的结果。基于以往的经验教训我们意识到单纯依靠卸载Python及其相关环境进行完全重构可能会带来不可预见的风险因此暂时不想采取此类措施而是持续关注相关的技术支持文章寻找新的解决方案以便尽快彻底解决问题。

全部评论 (0)

还没有任何评论哟~