nebula-up
nebula-up copied to clipboard
spark/AI_suite_demo.ipynb, wrong?
when
from ng_ai import NebulaReader from ng_ai.config import NebulaGraphConfig
read data with spark engine, scan mode
ensure the "demo_basketballplayer" was created in NebulaGraph studio
config = NebulaGraphConfig(space="demo_basketballplayer") reader = NebulaReader(engine="spark", config=config) reader.scan(edge="follow", props="degree") df = reader.read() df.show(2)
Py4JJavaError Traceback (most recent call last)
/tmp/ipykernel_907/3158679048.py in
/usr/lib/python3.7/site-packages/ng_ai/nebula_reader.py in read(self, **kwargs) 221 if self.raw_df_reader is None: 222 raise Exception("No data loaded, please use scan or query first") --> 223 self.raw_df = self.raw_df_reader.load() 224 self.df = NebulaDataFrameObject(engine=self.engine, data=self.raw_df) 225 return self.df
/spark/python/pyspark/sql/readwriter.py in load(self, path, format, schema, **options) 170 return self._df(self._jreader.load(self._spark._sc._jvm.PythonUtils.toSeq(path))) 171 else: --> 172 return self._df(self._jreader.load()) 173 174 @since(1.4)
/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in call(self, *args) 1255 answer = self.gateway_client.send_command(command) 1256 return_value = get_return_value( -> 1257 answer, self.gateway_client, self.target_id, self.name) 1258 1259 for temp_arg in temp_args:
/spark/python/pyspark/sql/utils.py in deco(*a, **kw) 61 def deco(*a, **kw): 62 try: ---> 63 return f(*a, **kw) 64 except py4j.protocol.Py4JJavaError as e: 65 s = e.java_exception.toString()
/spark/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name) 326 raise Py4JJavaError( 327 "An error occurred while calling {0}{1}{2}.\n". --> 328 format(target_id, ".", name), value) 329 else: 330 raise Py4JError(
Py4JJavaError: An error occurred while calling o55.load.
: java.util.NoSuchElementException: key not found: operateType
at scala.collection.MapLike$class.default(MapLike.scala:228)
at org.apache.spark.sql.catalyst.util.CaseInsensitiveMap.default(CaseInsensitiveMap.scala:28)
at scala.collection.MapLike$class.apply(MapLike.scala:141)
at org.apache.spark.sql.catalyst.util.CaseInsensitiveMap.apply(CaseInsensitiveMap.scala:28)
at com.vesoft.nebula.connector.NebulaOptions.