开发者社区> 问答> 正文

java.lang.ClassNotFoundException: com.aliyun.oss.OSSClient 求解答

我的命令 :
./bin/spark-submit --jars /opt/aliyun-sdk-oss-2.8.2.jar,/opt/aliyun-sdk-mns-1.1.8.jar,/opt/emr-mns_2.11-1.4.1.jar,/opt/emr-core-1.4.1.jar,/opt/aliyun-log-0.6.6.jar,/opt/emr-logservice_2.11-1.4.1.jar,/opt/emr-maxcompute_2.11-1.4.1.jar,/opt/emr-ons_2.11-1.4.1.jar,/opt/emr-sdk_2.11-1.3.2.jar,/opt/emr-tablestore-1.4.1.jar,/opt/kangze8.jar --class com.Fifty_millisecond_ratio /opt/kangze8.jar --master 192.168.0.72:7077
结果:

java.lang.ClassNotFoundException: com.aliyun.oss.OSSClient

at com.aliyun.fs.oss.nat.NativeOssFileSystem.initialize(NativeOssFileSystem.java:138)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2669)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2703)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2685)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:373)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295)
at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:258)
at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:229)
at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:315)
at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:200)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:253)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:251)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:251)
at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:253)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:251)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:251)
at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:253)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:251)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:251)
at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:253)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:251)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:251)
at org.apache.spark.Partitioner$$anonfun$4.apply(Partitioner.scala:75)
at org.apache.spark.Partitioner$$anonfun$4.apply(Partitioner.scala:75)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.immutable.List.map(List.scala:285)
at org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:75)
at org.apache.spark.rdd.RDD$$anonfun$groupBy$1.apply(RDD.scala:691)
at org.apache.spark.rdd.RDD$$anonfun$groupBy$1.apply(RDD.scala:691)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.RDD.groupBy(RDD.scala:690)
at com.Fifty_millisecond_ratio$.main(Fifty_millisecond_ratio.scala:34)
at com.Fifty_millisecond_ratio.main(Fifty_millisecond_ratio.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:879)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:197)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:227)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:136)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Caused by: java.lang.ClassNotFoundException: com.aliyun.oss.OSSClient

at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at com.aliyun.fs.oss.utils.OSSClientAgent.<init>(OSSClientAgent.java:86)
at com.aliyun.fs.oss.nat.JetOssNativeFileSystemStore.initialize(JetOssNativeFileSystemStore.java:140)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy17.initialize(Unknown Source)
at com.aliyun.fs.oss.nat.NativeOssFileSystem.initialize(NativeOssFileSystem.java:135)
... 55 more

展开
收起
李权001 2018-03-28 15:09:43 12568 0
3 条回答
写回答
取消 提交回答
  • 你好 你怎么解决这个问题的

    2019-11-06 11:07:54
    赞同 展开评论 打赏
  • 如果包没有读的权限好像也是会出现这个问题。因为服务器上其他的lib下的jar文件都是有权限的。OSS包是后来单独放上去的没有权限,就报这个错误了。

    2019-08-13 10:15:26
    赞同 展开评论 打赏
  • 阿里云最年轻的云计算架构师, 一直致力于企业客户应用上云工作,对云计算及云安全具有独到的认识,在上云架构设计, 企业应用系统上云实施交互方面有丰富的经验,旨在轻松,愉快,带你玩转&ldquo;云&quot;世界

    检查一下是否有依赖的包没有通过 -resources 的参数引用进来。

    2019-07-17 22:03:33
    赞同 展开评论 打赏
问答排行榜
最热
最新

相关电子书

更多
Spring Cloud Alibaba - 重新定义 Java Cloud-Native 立即下载
The Reactive Cloud Native Arch 立即下载
JAVA开发手册1.5.0 立即下载