pyspark设置了环境变量,调用python函数就报错,求指点(已解决)

弱弱问下,我设置了环境变量,但是运行就报错,是哪里错了啊? 求指点~~拜谢 

问题解决了,是python版本问题,版本高了,我降了版本就正常了   谢谢大家

代码如下

from pyspark import SparkConf, SparkContext
import os
os.environ['PYSPARK_PYTHON'] = "E:/Python_code/pythonProject/.venv/Scripts/python.exe"
# 创建SparkConf类对象
conf = SparkConf().setMaster("local[*]").setAppName("test_spark_app")
# 基于SparkConf类对象创建SparkContext对象
sc = SparkContext(conf=conf)
# 打印spark版本信息
# print(sc.version)
rdd = sc.parallelize([1, 2, 3, 4])
print(rdd.collect())
print(rdd.map(lambda x: x * 10).collect())
# 停止SparkContext对象的运行
sc.stop()

报错信息:

24/02/22 09:36:38 ERROR Executor: Exception in task 5.0 in stage 1.0 (TID 25)

org.apache.spark.SparkException: Python worker exited unexpectedly (crashed)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:612)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:594)

    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:789)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:766)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)

    at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)

    at scala.collection.Iterator.foreach(Iterator.scala:943)

    at scala.collection.Iterator.foreach$(Iterator.scala:943)

    at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)

    at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)

    at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)

    at scala.collection.TraversableOnce.to(TraversableOnce.scala:366)

    at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364)

    at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)

    at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)

    at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)

    at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)

    at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)

    at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)

    at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)

    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)

    at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)

    at org.apache.spark.scheduler.Task.run(Task.scala:141)

    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)

    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)

    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)

    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

    at java.lang.Thread.run(Thread.java:748)

Caused by: java.io.EOFException

    at java.io.DataInputStream.readInt(DataInputStream.java:392)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:774)

    ... 32 more

24/02/22 09:36:38 WARN TaskSetManager: Lost task 5.0 in stage 1.0 (TID 25) (GYG-LZC executor driver): org.apache.spark.SparkException: Python worker exited unexpectedly (crashed)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:612)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:594)

    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:789)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:766)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)

    at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)

    at scala.collection.Iterator.foreach(Iterator.scala:943)

    at scala.collection.Iterator.foreach$(Iterator.scala:943)

    at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)

    at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)

    at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)

    at scala.collection.TraversableOnce.to(TraversableOnce.scala:366)

    at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364)

    at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)

    at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)

    at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)

    at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)

    at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)

    at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)

    at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)

    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)

    at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)

    at org.apache.spark.scheduler.Task.run(Task.scala:141)

    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)

    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)

    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)

    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

    at java.lang.Thread.run(Thread.java:748)

Caused by: java.io.EOFException

    at java.io.DataInputStream.readInt(DataInputStream.java:392)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:774)

    ... 32 more

24/02/22 09:36:38 ERROR TaskSetManager: Task 5 in stage 1.0 failed 1 times; aborting job

Traceback (most recent call last):

  File "E:\Python_code\pythonProject\spark_test.py", line 14, in

    print(rdd.map(lambda x: x * 10).collect())

          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

  File "E:\Python_code\pythonProject\.venv\Lib\site-packages\pyspark\rdd.py", line 1833, in collect

    sock_info = self.ctx._jvm.PythonRDD.collectAndServe(self._jrdd.rdd())

                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

  File "E:\Python_code\pythonProject\.venv\Lib\site-packages\py4j\java_gateway.py", line 1322, in __call__

    return_value = get_return_value(

                   ^^^^^^^^^^^^^^^^^

  File "E:\Python_code\pythonProject\.venv\Lib\site-packages\py4j\protocol.py", line 326, in get_return_value

    raise Py4JJavaError(

py4j.protocol.Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.collectAndServe.

: org.apache.spark.SparkException: Job aborted due to stage failure: Task 5 in stage 1.0 failed 1 times, most recent failure: Lost task 5.0 in stage 1.0 (TID 25) (GYG-LZC executor driver): org.apache.spark.SparkException: Python worker exited unexpectedly (crashed)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:612)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:594)

    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:789)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:766)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)

    at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)

    at scala.collection.Iterator.foreach(Iterator.scala:943)

    at scala.collection.Iterator.foreach$(Iterator.scala:943)

    at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)

    at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)

    at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)

    at scala.collection.TraversableOnce.to(TraversableOnce.scala:366)

    at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364)

    at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)

    at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)

    at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)

    at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)

    at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)

    at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)

    at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)

    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)

    at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)

    at org.apache.spark.scheduler.Task.run(Task.scala:141)

    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)

    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)

    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)

    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

    at java.lang.Thread.run(Thread.java:748)

Caused by: java.io.EOFException

    at java.io.DataInputStream.readInt(DataInputStream.java:392)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:774)

    ... 32 more

Driver stacktrace:

    at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844)

    at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780)

    at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779)

    at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)

    at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)

    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)

    at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779)

    at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242)

    at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242)

    at scala.Option.foreach(Option.scala:407)

    at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242)

    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048)

    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)

    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971)

    at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)

    at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984)

    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2398)

    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2419)

    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2438)

    at org.apache.spark.SparkContext.runJob(SparkContext.scala:2463)

    at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1046)

    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)

    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)

    at org.apache.spark.rdd.RDD.withScope(RDD.scala:407)

    at org.apache.spark.rdd.RDD.collect(RDD.scala:1045)

    at org.apache.spark.api.python.PythonRDD$.collectAndServe(PythonRDD.scala:195)

    at org.apache.spark.api.python.PythonRDD.collectAndServe(PythonRDD.scala)

    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

    at java.lang.reflect.Method.invoke(Method.java:498)

    at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)

    at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)

    at py4j.Gateway.invoke(Gateway.java:282)

    at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)

    at py4j.commands.CallCommand.execute(CallCommand.java:79)

    at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)

    at py4j.ClientServerConnection.run(ClientServerConnection.java:106)

    at java.lang.Thread.run(Thread.java:748)

Caused by: org.apache.spark.SparkException: Python worker exited unexpectedly (crashed)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:612)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator$$anonfun$1.applyOrElse(PythonRunner.scala:594)

    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:789)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:766)

    at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:525)

    at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37)

    at scala.collection.Iterator.foreach(Iterator.scala:943)

    at scala.collection.Iterator.foreach$(Iterator.scala:943)

    at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28)

    at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)

    at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)

    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)

    at scala.collection.TraversableOnce.to(TraversableOnce.scala:366)

    at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364)

    at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358)

    at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358)

    at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28)

    at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345)

    at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339)

    at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28)

    at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)

    at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)

    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)

    at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)

    at org.apache.spark.scheduler.Task.run(Task.scala:141)

    at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)

    at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)

    at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)

    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)

    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

    ... 1 more

Caused by: java.io.EOFException

    at java.io.DataInputStream.readInt(DataInputStream.java:392)

    at org.apache.spark.api.python.PythonRunner$$anon$3.read(PythonRunner.scala:774)

    ... 32 more