extraction-framework
extraction-framework copied to clipboard
Check whether core really needs spark and guava dependency
most of the things seem to be newly introduced
grep -R spark *
main/scala/org/dbpedia/databus/mod/EvalMod.scala:import org.apache.spark.sql.{SQLContext, SparkSession}
main/scala/org/dbpedia/databus/mod/EvalMod.scala: val sparkSession = SparkSession.builder()
main/scala/org/dbpedia/databus/mod/EvalMod.scala: .config("spark.local.dir", "./.spark")
main/scala/org/dbpedia/databus/mod/EvalMod.scala: sparkSession.sparkContext.setLogLevel("WARN")
main/scala/org/dbpedia/databus/mod/EvalMod.scala: val sqlContext: SQLContext = sparkSession.sqlContext
main/scala/org/dbpedia/validation/ValidationLauncher.scala:import org.apache.spark.sql.{SQLContext, SparkSession}
main/scala/org/dbpedia/validation/ValidationLauncher.scala: val sparkSession = SparkSession.builder()
main/scala/org/dbpedia/validation/ValidationLauncher.scala: .config("spark.local.dir", "./.spark")
main/scala/org/dbpedia/validation/ValidationLauncher.scala: sparkSession.sparkContext.setLogLevel("WARN")
main/scala/org/dbpedia/validation/ValidationLauncher.scala: val sqlContext: SQLContext = sparkSession.sqlContext
main/scala/org/dbpedia/validation/ValidationExecutor.scala:import org.apache.spark.broadcast.Broadcast
main/scala/org/dbpedia/validation/ValidationExecutor.scala:import org.apache.spark.sql.SQLContext
main/scala/org/dbpedia/validation/ValidationExecutor.scala: val brdcstTestSuit: Broadcast[TestSuite] = sqlContext.sparkSession.sparkContext.broadcast(testSuite)
main/scala/org/dbpedia/extraction/config/Config.scala: lazy val sparkMaster: String = Option(getString(this, "spark-master")).getOrElse("local[*]")
main/scala/org/dbpedia/extraction/config/Config.scala: lazy val sparkLocalDir: String = Option(getString(this, "spark-local-dir")).getOrElse("")
main/scala/org/dbpedia/extraction/mappings/LabelExtractor.scala:import org.apache.spark.SparkContext
main/resources/universal.properties:spark-master=local[4]
main/resources/universal.properties:spark-local-dir=/home/extractor/data/
test/scala/org/dbpedia/iri/IRI_Test_Suite.scala:import org.apache.spark.sql.SparkSession
test/scala/org/dbpedia/iri/IRI_Test_Suite.scala: System.setProperty("log4j.logger.org.apache.spark.SparkContext", "WARN")
test/scala/org/dbpedia/iri/IRI_Test_Suite.scala: val sparkSession = SparkSession.builder().config("hadoop.home.dir", "./hadoop")
test/scala/org/dbpedia/iri/IRI_Test_Suite.scala: // sparkSession.sparkContext.setLogLevel("WARN")
test/scala/org/dbpedia/iri/IRI_Test_Suite.scala: val sqlContext = sparkSession.sqlContext
test/scala/org/dbpedia/validation/TestSuiteTests.scala:import org.apache.spark.sql.{SQLContext, SparkSession}
test/scala/org/dbpedia/validation/TestSuiteTests.scala: val sparkSession = SparkSession.builder()
test/scala/org/dbpedia/validation/TestSuiteTests.scala: .config("spark.local.dir", "./.spark")
test/scala/org/dbpedia/validation/TestSuiteTests.scala: sparkSession.sparkContext.setLogLevel("WARN")
test/scala/org/dbpedia/validation/TestSuiteTests.scala: val sqlContext: SQLContext = sparkSession.sqlContext