当前位置:   article > 正文

PyCharm使用pyspark 环境变量os.environ[‘PYSPARK_PYTHON‘]也配置了还是报错,求大神赐教_os.environ['pyspark_python']

os.environ['pyspark_python']

不调用python函数时不报错

from pyspark import SparkConf,SparkContext,version
import os
os.environ['PYSPARK_PYTHON'] = "D:/python/Python312/python.exe"

conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

rdd1 = sc.parallelize([1,2,3])
print(rdd1.collect())

# rdd2 = rdd1.map(lambda x:x+1)
# print(rdd2.collect())

sc.stop()

只要一调用python函数就报错

from pyspark import SparkConf,SparkContext,version
import os
os.environ['PYSPARK_PYTHON'] = "D:/python/Python312/python.exe"

conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

rdd1 = sc.parallelize([1,2,3])
print(rdd1.collect())

rdd2 = rdd1.map(lambda x:x+1)
print(rdd2.collect())

sc.stop()

报错如下

D:\python\Python312\python.exe D:\python-learn\rddleader.py 
23/11/07 22:39:05 WARN Shell: Did not find winutils.exe: java.io.FileNotFoundException: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. -see https://wiki.apache.org/hadoop/WindowsProblems
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
23/11/07 22:39:05 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[1, 2, 3]
23/11/07 22:39:08 ERROR Executor: Exception in task 3.0 in stage 1.0 (TID 19)
org.apache.spark.SparkException: Python worker exited unexpectedly (crashed)
    at org.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:612)atorg.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:594)
    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)
    at org.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:789)atorg.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:766)
    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)
    at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)
    at scala.collection.Iterator.foreach(Iterator.scala:943)
    at scala.collection.Iterator.foreach$(Iterator.scala:943)
    at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)
    at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)
    at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)
    at scala.collection.TraversableOnce.to(TraversableOnce.scala:366)
    at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364)
    at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)
    at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)
    at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)
    at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)
    at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)
    at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)
    at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)
    at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)
    at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
    at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
    at org.apache.spark.scheduler.Task.run(Task.scala:141)
    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
    at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
    at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
    at java.base/java.lang.Thread.run(Thread.java:1583)
Caused by: java.io.EOFException
    at java.base/java.io.DataInputStream.readFully(DataInputStream.java:210)
    at java.base/java.io.DataInputStream.readInt(DataInputStream.java:385)
    at org.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:774)...32more23/11/0722:39:08WARNTaskSetManager:Losttask3.0instage1.0(TID19)(zBOOKexecutordriver):org.apache.spark.SparkException:Pythonworkerexitedunexpectedly(crashed)atorg.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:612)
    at org.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:594)atscala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)atorg.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:789)
    at org.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:766)atorg.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)atorg.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)atscala.collection.Iterator.foreach(Iterator.scala:943)atscala.collection.Iterator.foreach$(Iterator.scala:943)atorg.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)atscala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)atscala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)atscala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)atscala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)atscala.collection.TraversableOnce.to(TraversableOnce.scala:366)atscala.collection.TraversableOnce.to$(TraversableOnce.scala:364)atorg.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)atscala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)atscala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)atorg.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)atscala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)atscala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)atorg.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)atorg.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)atorg.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)atorg.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)atorg.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)atorg.apache.spark.scheduler.Task.run(Task.scala:141)atorg.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)atorg.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)atorg.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)atorg.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)atorg.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)atjava.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)atjava.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)atjava.base/java.lang.Thread.run(Thread.java:1583)Causedby:java.io.EOFExceptionatjava.base/java.io.DataInputStream.readFully(DataInputStream.java:210)atjava.base/java.io.DataInputStream.readInt(DataInputStream.java:385)atorg.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:774)
    ... 32 more

23/11/07 22:39:08 ERROR TaskSetManager: Task 3 in stage 1.0 failed 1 times; aborting job
Traceback (most recent call last):
  File "D:\python-learn\rddleader.py", line 12, in <module>
    print(rdd2.collect())
          ^^^^^^^^^^^^^^
  File "D:\python\Python312\Lib\site-packages\pyspark\rdd.py", line 1833, in collect
    sock_info = self.ctx._jvm.PythonRDD.collectAndServe(self._jrdd.rdd())
                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "D:\python\Python312\Lib\site-packages\py4j\java_gateway.py", line 1322, in __call__
    return_value = get_return_value(
                   ^^^^^^^^^^^^^^^^^
  File "D:\python\Python312\Lib\site-packages\py4j\protocol.py", line 326, in get_return_value
    raise Py4JJavaError(
py4j.protocol.Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.collectAndServe.
: org.apache.spark.SparkException: Job aborted due to stage failure: Task 3 in stage 1.0 failed 1 times, most recent failure: Lost task 3.0 in stage 1.0 (TID 19) (zBOOK executor driver): org.apache.spark.SparkException: Python worker exited unexpectedly (crashed)
    at org.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:612)atorg.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:594)
    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)
    at org.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:789)atorg.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:766)
    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)
    at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)
    at scala.collection.Iterator.foreach(Iterator.scala:943)
    at scala.collection.Iterator.foreach$(Iterator.scala:943)
    at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)
    at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)
    at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)
    at scala.collection.TraversableOnce.to(TraversableOnce.scala:366)
    at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364)
    at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)
    at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)
    at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)
    at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)
    at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)
    at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)
    at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)
    at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)
    at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
    at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
    at org.apache.spark.scheduler.Task.run(Task.scala:141)
    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
    at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
    at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
    at java.base/java.lang.Thread.run(Thread.java:1583)
Caused by: java.io.EOFException
    at java.base/java.io.DataInputStream.readFully(DataInputStream.java:210)
    at java.base/java.io.DataInputStream.readInt(DataInputStream.java:385)
    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:774)
    ... 32 more

Driver stacktrace:
    at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779)
    at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
    at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
    at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242)
    at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242)
    at scala.Option.foreach(Option.scala:407)
    at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971)
    at org.apache.spark.util.EventLoopanon$1.run(EventLoop.scala:49)atorg.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984)atorg.apache.spark.SparkContext.runJob(SparkContext.scala:2398)atorg.apache.spark.SparkContext.runJob(SparkContext.scala:2419)atorg.apache.spark.SparkContext.runJob(SparkContext.scala:2438)atorg.apache.spark.SparkContext.runJob(SparkContext.scala:2463)atorg.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1046)atorg.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)atorg.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)atorg.apache.spark.rdd.RDD.withScope(RDD.scala:407)atorg.apache.spark.rdd.RDD.collect(RDD.scala:1045)atorg.apache.spark.api.python.PythonRDD$.collectAndServe(PythonRDD.scala:195)atorg.apache.spark.api.python.PythonRDD.collectAndServe(PythonRDD.scala)atjava.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atjava.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:75)atjava.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:52)atjava.base/java.lang.reflect.Method.invoke(Method.java:580)atpy4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)atpy4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)atpy4j.Gateway.invoke(Gateway.java:282)atpy4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)atpy4j.commands.CallCommand.execute(CallCommand.java:79)atpy4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)atpy4j.ClientServerConnection.run(ClientServerConnection.java:106)atjava.base/java.lang.Thread.run(Thread.java:1583)Causedby:org.apache.spark.SparkException:Pythonworkerexitedunexpectedly(crashed)atorg.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:612)
    at org.apache.spark.api.python.BasePythonRunner$ReaderIteratoranonfun$1.applyOrElse(PythonRunner.scala:594)atscala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)atorg.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:789)
    at org.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:766)atorg.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)atorg.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)atscala.collection.Iterator.foreach(Iterator.scala:943)atscala.collection.Iterator.foreach$(Iterator.scala:943)atorg.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)atscala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)atscala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)atscala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)atscala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)atscala.collection.TraversableOnce.to(TraversableOnce.scala:366)atscala.collection.TraversableOnce.to$(TraversableOnce.scala:364)atorg.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)atscala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)atscala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)atorg.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)atscala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)atscala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)atorg.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)atorg.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)atorg.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)atorg.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)atorg.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)atorg.apache.spark.scheduler.Task.run(Task.scala:141)atorg.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)atorg.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)atorg.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)atorg.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)atorg.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)atjava.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)atjava.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)...1moreCausedby:java.io.EOFExceptionatjava.base/java.io.DataInputStream.readFully(DataInputStream.java:210)atjava.base/java.io.DataInputStream.readInt(DataInputStream.java:385)atorg.apache.spark.api.python.PythonRunneranon$3.read(PythonRunner.scala:774)
    ... 32 more

[Stage 1:>                                                        (0 + 15) / 16]
进程已结束,退出代码为 1
 

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/笔触狂放9/article/detail/66438
推荐阅读
相关标签
  

闽ICP备14008679号