LSTM-TensorSpark icon indicating copy to clipboard operation
LSTM-TensorSpark copied to clipboard

Running your code in google colabatory

Open farhad-abdi opened this issue 4 years ago • 0 comments

Hi, have you tried run your code in colab? can you specify the enviroment that I can run the code. while running your code I got following error.

Py4JJavaError Traceback (most recent call last) in () 35 36 weights_mean_rdd = out.reduceByKey(mean_weights) ---> 37 wm = weights_mean_rdd.collect() 38 # Do something with wm 39

2 frames /usr/local/lib/python3.6/dist-packages/pyspark/rdd.py in collect(self) 814 """ 815 with SCCallSiteSync(self.context) as css: --> 816 sock_info = self.ctx._jvm.PythonRDD.collectAndServe(self._jrdd.rdd()) 817 return list(_load_from_socket(sock_info, self._jrdd_deserializer)) 818

/usr/local/lib/python3.6/dist-packages/py4j/java_gateway.py in call(self, *args) 1255 answer = self.gateway_client.send_command(command) 1256 return_value = get_return_value( -> 1257 answer, self.gateway_client, self.target_id, self.name) 1258 1259 for temp_arg in temp_args:

/usr/local/lib/python3.6/dist-packages/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name) 326 raise Py4JJavaError( 327 "An error occurred while calling {0}{1}{2}.\n". --> 328 format(target_id, ".", name), value) 329 else: 330 raise Py4JError(

Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.collectAndServe. : java.lang.IllegalArgumentException: Unsupported class file major version 55 at org.apache.xbean.asm6.ClassReader.(ClassReader.java:166) at org.apache.xbean.asm6.ClassReader.(ClassReader.java:148) at org.apache.xbean.asm6.ClassReader.(ClassReader.java:136) at org.apache.xbean.asm6.ClassReader.(ClassReader.java:237) at org.apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:50) at org.apache.spark.util.FieldAccessFinder$$anon$4$$anonfun$visitMethodInsn$7.apply(ClosureCleaner.scala:845) at org.apache.spark.util.FieldAccessFinder$$anon$4$$anonfun$visitMethodInsn$7.apply(ClosureCleaner.scala:828) at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733) at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134) at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134) at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:236) at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40) at scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:134) at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732) at org.apache.spark.util.FieldAccessFinder$$anon$4.visitMethodInsn(ClosureCleaner.scala:828) at org.apache.xbean.asm6.ClassReader.readCode(ClassReader.java:2175) at org.apache.xbean.asm6.ClassReader.readMethod(ClassReader.java:1238) at org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:631) at org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:355) at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:272) at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:271) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:271) at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:163) at org.apache.spark.SparkContext.clean(SparkContext.scala:2326) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2100) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2126) at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:990) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:385) at org.apache.spark.rdd.RDD.collect(RDD.scala:989) at org.apache.spark.api.python.PythonRDD$.collectAndServe(PythonRDD.scala:166) at org.apache.spark.api.python.PythonRDD.collectAndServe(PythonRDD.scala) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:566) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:238) at java.base/java.lang.Thread.run(Thread.java:834)

farhad-abdi avatar Jun 09 '20 12:06 farhad-abdi