Sparklify can not read example rdf.nt file
earthquakesan opened this issue · 2 comments
earthquakesan commented
sparklify_1 | 17/05/11 13:19:36 WARN function.FunctionRegistry: Class org.aksw.sparqlify.core.RdfTerm is not a Function
sparklify_1 | 17/05/11 13:19:36 WARN cast.TypeSystemImpl: Skipping: date, date
sparklify_1 | 17/05/11 13:19:36 WARN cast.TypeSystemImpl: Skipping: integer, integer
sparklify_1 | 17/05/11 13:19:36 WARN cast.TypeSystemImpl: Skipping: float, float
sparklify_1 | 17/05/11 13:19:36 WARN cast.TypeSystemImpl: Skipping: geography, geography
sparklify_1 | 17/05/11 13:19:36 WARN cast.TypeSystemImpl: Skipping: geometry, geometry
sparklify_1 | 17/05/11 13:19:36 WARN cast.TypeSystemImpl: Skipping: timestamp, timestamp
sparklify_1 | 17/05/11 13:19:36 INFO storage.BlockManagerInfo: Removed broadcast_1_piece0 on 172.28.0.8:34731 in memory (size: 3.0 KB, free: 366.3 MB)
sparklify_1 | 17/05/11 13:19:36 INFO storage.BlockManagerInfo: Removed broadcast_2_piece0 on 172.28.0.8:34731 in memory (size: 2.4 KB, free: 366.3 MB)
sparklify_1 | 17/05/11 13:19:36 INFO spark.ContextCleaner: Cleaned shuffle 0
sparklify_1 | Processing: RdfPartitionDefault(1,http://commons.dbpedia.org/property/source,2,http://www.w3.org/2001/XMLSchema#string,true)
sparklify_1 | 17/05/11 13:19:40 INFO execution.SparkSqlParser: Parsing command: source
sparklify_1 | Processing: RdfPartitionDefault(1,http://commons.dbpedia.org/property/otherVersions,2,http://www.w3.org/2001/XMLSchema#string,true)
sparklify_1 | 17/05/11 13:19:40 INFO execution.SparkSqlParser: Parsing command: otherVersions
sparklify_1 | Processing: RdfPartitionDefault(1,http://commons.dbpedia.org/property/eo,2,http://www.w3.org/2001/XMLSchema#string,true)
sparklify_1 | 17/05/11 13:19:40 INFO execution.SparkSqlParser: Parsing command: eo
sparklify_1 | Processing: RdfPartitionDefault(1,http://commons.dbpedia.org/property/width,2,http://dbpedia.org/datatype/perCent,false)
sparklify_1 | Exception in thread "main" java.lang.RuntimeException: Unsupported object type: http://dbpedia.org/datatype/perCent
sparklify_1 | at net.sansa_stack.rdf.partition.core.RdfPartitionerDefault$.determineLayoutDatatype(RdfPartitionerDefault.scala:103)
sparklify_1 | at net.sansa_stack.rdf.partition.core.RdfPartitionerDefault$.determineLayout(RdfPartitionerDefault.scala:84)
sparklify_1 | at net.sansa_stack.rdf.partition.core.RdfPartitionDefault.layout(RdfPartitionDefault.scala:12)
sparklify_1 | at net.sansa_stack.query.spark.server.SparqlifyUtils3$$anonfun$1.apply(SparqlifyUtils3.scala:53)
sparklify_1 | at net.sansa_stack.query.spark.server.SparqlifyUtils3$$anonfun$1.apply(SparqlifyUtils3.scala:42)
sparklify_1 | at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
sparklify_1 | at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
sparklify_1 | at scala.collection.immutable.HashMap$HashMap1.foreach(HashMap.scala:221)
sparklify_1 | at scala.collection.immutable.HashMap$HashTrieMap.foreach(HashMap.scala:428)
sparklify_1 | at scala.collection.immutable.HashMap$HashTrieMap.foreach(HashMap.scala:428)
sparklify_1 | at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
sparklify_1 | at scala.collection.AbstractTraversable.map(Traversable.scala:104)
sparklify_1 | at net.sansa_stack.query.spark.server.SparqlifyUtils3$.createSparqlSqlRewriter(SparqlifyUtils3.scala:42)
sparklify_1 | at net.sansa_stack.examples.spark.query.Sparklify$.main(Sparklify.scala:53)
sparklify_1 | at net.sansa_stack.examples.spark.query.Sparklify.main(Sparklify.scala)
sparklify_1 | at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
sparklify_1 | at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
sparklify_1 | at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
sparklify_1 | at java.lang.reflect.Method.invoke(Method.java:498)
sparklify_1 | at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743)
sparklify_1 | at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
sparklify_1 | at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
sparklify_1 | at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
sparklify_1 | at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
LorenzBuehmann commented