开发者社区> 问答> 正文

Spark和AWS Kinesis

我试图在独立模式下使用Apache Spark 2.4.0连接到Kinesis。我用以下方法提供我的凭据:

val cred = SparkAWSCredentials.builder.basicCredentials("{awsAccessKeyId}", "{awsSecretAccessKey}").build()
并得到这样的错误:

java.lang.NoSuchMethodError:org.apache.spark.internal.Logging。$ init $(Lorg / apache / spark / internal / Logging;)V org.apache.spark.streaming.kinesis.BasicCredentials。(SparkAWSCredentials.scala: 51)org.apache.spark.streaming.kinesis.SparkAWSCredentials $ Builder.basicCredentials(SparkAWSCredentials.scala:116)at $ line18. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $ $ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:34)at $ line18. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $ $ iw $$ iw $$ iw $$ iw。(:39)at $ line18. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $ $ iw。(:41)at $ line18. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:43)at $ line18。$阅读$$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:45)at $ line18. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:47)at $ line18. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:49)at $ line18. $ read $$ iw $$ iw $$ iw $$ iw $$ iw。(:51)at $ line18。$ read $$ iw $$ iw $$ iw $$ iw。(:53)at $ line18. $ read $$ iw $$ iw $$ iw。(:55)at $ line18. $ read $$ iw $$ iw。(:57)at $ line18. $ read $ iw。(:59)at $ line18. $ read。(:61)at $ line18. $ read $。(:65)at $ line18. $ read $ 。()at $ line18. $ eval $。$ print $ lzycompute(:7)at $ line18. $ eval $。$ print(:6)at $ line18. $ eval。$ print()at sun.reflect.NativeMethodAccessorImpl .invoke0(本机方法)at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)at java.lang.reflect.Method.invoke(Method.java) :498)scala.tools.nsc.interpreter.IMain $ ReadEvalPrint.call(IMain.scala:793)scala.tools.nsc.interpreter.IMain $ Request.loadAndRun(IMain.scala:1054)at scala.tools。 nsc.interpreter。在scala.reflect的scala.tools.nsc.interpreter.IMain $ WrappedRequest $$ anonfun $ loadAndRunReq $ 1.apply(IMain.scala:644)上的IMain $ WrappedRequest $$ anonfun $ loadAndRunReq $ 1.apply(IMain.scala:645)。在scala.tools.nsc.interpreter.IMain的$ scrap.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)中的internal.util.ScalaClassLoader $ class.asContext(ScalaClassLoader.scala:31)$ WrappedRequest.loadAndRunReq( IMain.scala:644)scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:576)scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)scala.tools.nsc .interpreter.ILoop.interpretStartingWith(ILoop.scala:819)at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:691)at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:404) )在scala.tools.nsc。解释器.ILoop.loop(ILoop.scala:425)org.apache.spark.repl.SparkILoop $$ anonfun $ process $ 1.apply $ mcZ $ sp(SparkILoop.scala:285)org.apache.spark.repl。位于org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)的SparkILoop.runClosure(SparkILoop.scala:159)位于org.apache.spark.repl.Main $ .doMain(Main.scala:78)at at org.apache.spark.repl.Main $ .main(Main.scala:58)org.apache.spark.repl.Main.main(Main.scala)at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)at sun .reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)位于org.apache的java.lang.reflect.Method.invoke(Method.java:498)的sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)。组织中的spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)。apache.spark.deploy.SparkSubmit.org $ apache $ spark $ deploy $ org.apache.spark.deploy.SparkSubmit.doRunMain $ 1(SparkSubmit.scala:167)org中的$ SparkSubmit $$ runMain(SparkSubmit.scala:849)。位于org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)的apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)位于org.apache.spark.deploy.SparkSubmit $$ anon $ 2。在org.apache.spark.spark.deploy.SparkSubmit.main(SparkSubmit.scala)的org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:933)的doSubmit(SparkSubmit.scala:924)doSubmit(SparkSubmit.scala:86)atg.apache.spark.deploy.SparkSubmit $$ anon $ 2.doSubmit(SparkSubmit.scala:924)at org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:933) )org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)doSubmit(SparkSubmit.scala:86)atg.apache.spark.deploy.SparkSubmit $$ anon $ 2.doSubmit(SparkSubmit.scala:924)at org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:933) )org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

当我尝试连接作为环境变量提供的凭据或通过设置它们时:

sc.hadoopConfiguration.set("fs.s3n.awsAccessKeyId", "{}")
sc.hadoopConfiguration.set("fs.s3n.awsSecretAccessKey","{}")
我收到这样的错误:

java.lang.NoSuchMethodError:scala.Product。$ init $(Lscala / Product;)V org.apache.spark.stream.kinesis.DefaultCredentials $。(SparkAWSCredentials.scala:39)at org.apache.spark.streaming。 kinesis.DefaultCredentials $。(SparkAWSCredentials.scala)at org.apache.spark.streaming.kinesis.KinesisInputDStream $ Builder。$ anonfun $ buildWithMessageHandler $ 6(KinesisInputDStream.scala:291)at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.streaming.kinesis.KinesisInputDStream $ Builder.buildWithMessageHandler(KinesisInputDStream.scala:291)位于$ line24的org.apache.spark.streaming.kinesis.KinesisInputDStream $ Builder.build(KinesisInputDStream.scala:302)。 $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:43)at $ line24. $ read $ $ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:48)at $ line24。$ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:50)at $ line24. $ read $$ iw $$ iw $ $ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:52)at $ line24. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $ $ iw $$ iw。(:54)at $ line24. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:56)at $ line24. $ read $$ iw $$ iw $$ iw $$ iw $$ iw $$ iw。(:58)at $ line24. $ read $$ iw $$ iw $$ iw $$ iw $$ iw。(:60)at $ line24 。$ $$ iw $$ iw $$ iw $$ iw。(:62)at $ line24. $ read $$ iw $$ iw $$ iw。(:64)at $ line24. $ read $$ iw $ $ iw。(:66)at $ line24。$ read $ iw。(:68)at $ line24. $ read。(:70)at $ line24. $ read $。(:74)at $ line24。$ read $。()at $ line24. $ eval $。$ print $ lzycompute(:7)at $ line24. $ eval $。$ print(:6)at $ line.2. $ eval。$ print()at sun.reflect。 sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:NativeMethodAccessorImpl.invoke0(Native Method)62)at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)at java.lang.reflect.Method.invoke(Method.java:498)at scala.tools.nsc.interpreter.IMain $ ReadEvalPrint.call(IMain) .scala:793)scala.tools.nsc.interpreter.IMain $ Request.loadAndRun(IMain.scala:1054)at scala.tools.nsc.interpreter.IMain $ WrappedRequest $$ anonfun $ loadAndRunReq $ 1.apply(IMain.scala :645)scala.tools.nsc.interpreter.IMain $ WrappedRequest $$ anonfun $ loadAndRunReq $ 1.apply(IMain.scala:644)at scala.reflect.internal.util.ScalaClassLoader $ class.asContext(ScalaClassLoader.scala:31) )在scala.tools.nsol.interpreter的Scala.tools.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)scala.tools.nsc.interpreter.IMain $ WrappedRequest.loadAndRunReq(IMain.scala:644)中。IMain.interpret(IMain.scala:576)at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)at scala.tools.nsc.interpreter.ILoop $$ anonfun $ 20.apply(ILoop.scala: 762)scala.tools.nsols.nsols.nsol上的scala.tools.nsc.interpreter.ILoop $$ anonfun $ 20.apply(ILoop.scala:762)scala.tools.nsc.interpreter.IMain.withLabel(IMain.scala:116) .interpreter.ILoop.interpretCode $ 1(ILoop.scala:762)at scala.tools.nsc.interpreter.ILoop.pasteCommand(ILoop.scala:776)at scala.tools.nsc.interpreter.ILoop $$ anonfun $ standardCommands $ 9。申请(ILoop.scala:217)在scala.tools.nsc.interpreter.ILoop $$ anonfun $ standardCommands $ 9.apply(ILoop.scala:217)scala.tools.nsc.interpreter.LoopCommands $ LineCmd.apply(LoopCommands。 scala:62)在scala.tools.nsc的scala.tools.nsc.interpreter.ILoop.colonCommand(ILoop.scala:698)。transter.ILoop.command(ILoop.scala:689)at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:404)at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:425)在org.apache.spark.repl.SparkILoop $$ anonfun $ process $ 1.apply $ mcZ $ sp(SparkILoop.scala:285)org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)at org位于org.apache.spark.repl.Main $ .main的org.apache.spark.repl.Main $ .doMain(Main.scala:78)的.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182) (Main.scala:58)atg.apache.spark.repl.Main.main(Main.scala)at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62 )在java的sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)。lang.reflect.Method.invoke(Method.java:498)org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)at org.apache.spark.deploy.SparkSubmit.org $ apache $ spark $在org.apache.spark.spark.spark.deploy.SparkSubmit.submit(SparkSubmit.scamit:)的org.apache.spark.deploy.SparkSubmit.doRunMain $ 1(SparkSubmit.scala:167)部署$ SparkSubmit $$ runMain(SparkSubmit.scala:849): 195)org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)atg.apache.spark.deploy.SparkSubmit $$ anon $ 2.doSubmit(SparkSubmit.scala:924)at org.apache.spark .deploy.SparkSubmit $ .main(SparkSubmit.scala:933)at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)deploy.SparkSubmit.org $ apache $ spark $ deploy $ org.apache.spark.deploy.SparkSubmit.doRunMain $ 1(SparkSubmit.scala:167)org.apache.spark上的$ SparkSubmit $$ runMain(SparkSubmit.scala:849)。 deploy.SparkSubmit.submit(SparkSubmit.scala:195)位于org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)org.apache.spark.deploy.SparkSubmit $$ anon $ 2.doSubmit(SparkSubmit。 scala:924)在org.apache.spark.spark.deploy.SparkSubmit.main(SparkSubmit.scala)的org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:933)deploy.SparkSubmit.org $ apache $ spark $ deploy $ org.apache.spark.deploy.SparkSubmit.doRunMain $ 1(SparkSubmit.scala:167)org.apache.spark上的$ SparkSubmit $$ runMain(SparkSubmit.scala:849)。 deploy.SparkSubmit.submit(SparkSubmit.scala:195)位于org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)org.apache.spark.deploy.SparkSubmit $$ anon $ 2.doSubmit(SparkSubmit。 scala:924)在org.apache.spark.spark.deploy.SparkSubmit.main(SparkSubmit.scala)的org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:933)86)在org.apache的org.apache.spark.deploy.SparkSubmit $$ anon $ 2.doSubmit(SparkSubmit.scala:924)org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:933)。 spark.deploy.SparkSubmit.main(SparkSubmit.scala)86)在org.apache的org.apache.spark.deploy.SparkSubmit $$ anon $ 2.doSubmit(SparkSubmit.scala:924)org.apache.spark.deploy.SparkSubmit $ .main(SparkSubmit.scala:933)。 spark.deploy.SparkSubmit.main(SparkSubmit.scala)

展开
收起
社区小助手 2018-12-06 13:58:38 2060 0
1 条回答
写回答
取消 提交回答
  • 社区小助手是spark中国社区的管理员,我会定期更新直播回顾等资料和文章干货,还整合了大家在钉群提出的有关spark的问题及回答。

    我得到了同样的错误,在我的情况下,我使用spark-streaming-kinesis-asl_2.12而不是spark-streaming-kinesis-asl_2.11。

    2019-07-17 23:18:31
    赞同 展开评论 打赏
问答地址:
问答排行榜
最热
最新

相关电子书

更多
Scaling Self Service Analytics with Databricks and Apache Spark 立即下载
Migration from Redshift to Spark 立即下载
Dataflow with Apache NiFi 立即下载