Skip to content
This repository has been archived by the owner on Oct 8, 2020. It is now read-only.

Commit

Permalink
Merge branch 'develop' into feature/flink-jena
Browse files Browse the repository at this point in the history
  • Loading branch information
GezimSejdiu committed Jun 28, 2019
2 parents b5e03b2 + 0f9741f commit 50260fe
Show file tree
Hide file tree
Showing 9 changed files with 285 additions and 172 deletions.
13 changes: 13 additions & 0 deletions sansa-inference-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,19 @@
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<archive>
<manifest>
<mainClass>fully.qualified.MainClass</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
</plugins>
</build>
</project>
66 changes: 66 additions & 0 deletions sansa-inference-spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<!-- <dependency>-->
<!-- <groupId>${project.groupId}</groupId>-->
<!-- <artifactId>sansa-inference-spark_${scala.binary.version}</artifactId>-->
<!-- </dependency>-->

<!-- SANSA OWL -->
<dependency>
Expand Down Expand Up @@ -338,8 +342,70 @@
<overWriteSnapshots>true</overWriteSnapshots>
</configuration>
</execution>
<execution>
<id>copy</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>
${project.build.directory}/lib
</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<!-- Run shade goal on package phase -->
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>jar-with-dependencies</shadedClassifierName>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>net.sansa_stack.inference.spark.forwardchaining.axioms.ForwardRuleReasonerOWLHorst</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>${project.build.directory}/lib</classpathPrefix>
<mainClass>net.sansa_stack.inference.spark.forwardchaining.axioms.ForwardRuleReasonerOWLHorst</mainClass>
</manifest>
</archive>
</configuration>
</plugin>

</plugins>
<!--New resource locations-->
<resources>
Expand Down
4 changes: 2 additions & 2 deletions sansa-inference-spark/src/main/resources/ont_functional.owl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ Ontology(<http://ex.com/ont/sample1.owl>
#Import(<http://www.example.com/my/2.0>)
# annotations
Annotation(foo:hasName "Name")
Annotation(bar:hasTitle "Title")
Annotation(bar:hasTitle "Title")
Annotation(:description "A longer
description running over
several lines")
Expand All @@ -35,7 +35,6 @@ Declaration(NamedIndividual(foo:indivB))
Declaration(NamedIndividual(foo:indivC))

## annotation axioms
AnnotationAssertion(bar:label bar:Cls1 "Class 1")
SubAnnotationPropertyOf(bar:annProp1 bar:annProp2)
AnnotationPropertyDomain(bar:annProp1 bar:Cls1)
AnnotationPropertyRange(bar:annProp1 bar:Cls2)
Expand Down Expand Up @@ -98,6 +97,7 @@ HasKey(bar:Cls1 () (bar:dataProp1))

## assertions -- 63
AnnotationAssertion(bar:annProp1 foo:indivB "ZYXW")
AnnotationAssertion(bar:label bar:Cls1 "Class 1")
SameIndividual(foo:sameAsIndivA foo:indivA)
SameIndividual(foo:indivC foo:sameAsIndivC)
DifferentIndividuals(foo:indivA foo:indivB)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import org.apache.spark.SparkContext
import org.apache.spark.rdd.{RDD, UnionRDD}
import org.apache.spark.sql.SparkSession
import org.semanticweb.owlapi.apibinding.OWLManager
import org.semanticweb.owlapi.model.AxiomType
import org.semanticweb.owlapi.model._

import scala.collection.JavaConverters._
Expand All @@ -22,24 +23,24 @@ import scala.collection.JavaConverters._
* @param parallelism The degree of parallelism
* @author Heba Mohamed
*/
class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) extends TransitiveReasoner{
class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 30) extends TransitiveReasoner{

def this(sc: SparkContext) = this(sc, sc.defaultParallelism)

def apply(axioms: RDD[OWLAxiom]): RDD[OWLAxiom] = {

val startTime = System.currentTimeMillis()
val axiomsRDD = axioms.cache() // cache this RDD because it will be used quite often

val manager = OWLManager.createOWLOntologyManager()
val dataFactory = manager.getOWLDataFactory

val axiomsRDD = axioms.cache() // cache this RDD because it will be used quite often
val startTime = System.currentTimeMillis()

// ------------ extract the schema elements -------------------
val classes: RDD[OWLClass] = axiomsRDD.flatMap {
case axiom: HasClassesInSignature => axiom.classesInSignature().iterator().asScala
case _ => null
}.filter(_ != null).distinct()
// val classes: RDD[OWLClass] = axiomsRDD.flatMap {
// case axiom: HasClassesInSignature => axiom.classesInSignature().iterator().asScala
// case _ => null
// }.filter(_ != null).distinct()

var subClassof = extractAxiom(axiomsRDD, AxiomType.SUBCLASS_OF)
var subDataProperty = extractAxiom(axiomsRDD, AxiomType.SUB_DATA_PROPERTY)
Expand Down Expand Up @@ -84,15 +85,16 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten
subClassof = sc.union(subClassof,
subC1.asInstanceOf[RDD[OWLAxiom]],
subC2.asInstanceOf[RDD[OWLAxiom]])
.distinct(parallelism)
.distinct(parallelism)

// 2. we compute the transitive closure of rdfs:subPropertyOf and rdfs:subClassOf
// 2. we compute the transitive closure of rdfs:subPropertyOf and rdfs:subClassOf
// R1: (x rdfs:subClassOf y), (y rdfs:subClassOf z) -> (x rdfs:subClassOf z)

val tr = new TransitiveReasoner()

val subClassOfAxiomsTrans = tr.computeTransitiveClosure(subClassof, AxiomType.SUBCLASS_OF)
.asInstanceOf[RDD[OWLSubClassOfAxiom]]
.filter(a => a.getSubClass != a.getSuperClass) // to exclude axioms with (C owl:subClassOf C)
.asInstanceOf[RDD[OWLSubClassOfAxiom]]
.filter(a => a.getSubClass != a.getSuperClass) // to exclude axioms with (C owl:subClassOf C)

// Convert all RDDs into maps which should be more efficient later on
val subClassMap = CollectionUtils
Expand All @@ -117,9 +119,6 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten
val equClassMap = equClass_Pairs
.map(a => (a.getOperandsAsList.get(0), a.getOperandsAsList.get(1))).collect.toMap

val equClassSwapMap = equClass_Pairs
.map(a => (a.getOperandsAsList.get(1), a.getOperandsAsList.get(0))).collect.toMap

// distribute the schema data structures by means of shared variables
// Assume that the schema data is less than the instance data

Expand All @@ -130,7 +129,7 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten
val dataRangeMapBC = sc.broadcast(dataRangeMap)
val objRangeMapBC = sc.broadcast(objRangeMap)
val equClassMapBC = sc.broadcast(equClassMap)
val equClassSwapMapBC = sc.broadcast(equClassSwapMap)
// val equClassSwapMapBC = sc.broadcast(equClassSwapMap)

// Compute the equivalence of classes and properties
// O11c: (C rdfs:subClassOf D ), (D rdfs:subClassOf C ) -> (C owl:equivalentClass D)
Expand Down Expand Up @@ -176,16 +175,10 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten
.asInstanceOf[RDD[OWLSubDataPropertyOfAxiom]]
.filter(a => a.getSubProperty != a.getSuperProperty) // to exclude axioms with (C owl:subDataPropertyOf C)

// println("\n Transitive subDataPropOfAxiom closures: \n----------------\n")
// subDataPropOfAxiomsTrans.collect().foreach(println)

val subObjPropOfAxiomsTrans = tr.computeTransitiveClosure(subObjProperty, AxiomType.SUB_OBJECT_PROPERTY)
.asInstanceOf[RDD[OWLSubObjectPropertyOfAxiom]]
.filter(a => a.getSubProperty != a.getSuperProperty) // to exclude axioms with (C owl:subObjectPropertyOf C)

// println("\n Transitive subObjPropOfAxiom closures: \n----------------\n")
// subObjPropOfAxiomsTrans.collect().foreach(println)

val subAnnPropOfAxiomsTrans = tr.computeTransitiveClosure(subAnnProp, AxiomType.SUB_ANNOTATION_PROPERTY_OF)
.asInstanceOf[RDD[OWLSubAnnotationPropertyOfAxiom]]
.filter(a => a.getSubProperty != a.getSuperProperty) // to exclude axioms with (C owl:subAnnotationPropertyOf C)
Expand All @@ -212,10 +205,6 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten
.filter(a => subDataPropertyBC.value.getOrElse(a.getSuperProperty, Set.empty).contains(a.getSubProperty))
.map(a => dataFactory.getOWLEquivalentDataPropertiesAxiom(a.getSubProperty, a.getSuperProperty))

// val equivDP = equDataProp.union(eqDP.asInstanceOf[RDD[OWLAxiom]]).distinct(parallelism)
// println("\n O12c : \n----------------\n")
// equivDP.collect().foreach(println)

val eqOP = subObjPropOfAxiomsTrans
.filter(a => subObjectPropertyBC.value.getOrElse(a.getSuperProperty, Set.empty).contains(a.getSubProperty))
.map(a => dataFactory.getOWLEquivalentObjectPropertiesAxiom(a.getSubProperty, a.getSuperProperty))
Expand Down Expand Up @@ -429,12 +418,13 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten
val subOperands = subClassof.asInstanceOf[RDD[OWLSubClassOfAxiom]]
.map(a => (a.getSubClass, a.getSuperClass))

val sd = subOperands.filter(s => s._2.isInstanceOf[OWLDataHasValue])
.map(s => (s._1, s._2.asInstanceOf[OWLDataHasValue]))
val sd: RDD[(OWLClassExpression, OWLDataHasValue)] = subOperands.filter(s => s._2.isInstanceOf[OWLDataHasValue])
.map(s => (s._1, s._2.asInstanceOf[OWLDataHasValue])) // (S, OWLDataHasValue(P, w))

val O14_data_b = typeAxioms.filter(a => subClassOfBC.value.contains(a.getClassExpression))
.map(a => (a.getClassExpression, a.getIndividual))
.join(sd).filter(x => x._2._2.isInstanceOf[OWLDataHasValue])
.map(a => (a.getClassExpression, a.getIndividual)) // (S, i)
.join(sd) // (S, (i, (OWLDataHasValue(P, w))))
.filter(x => x._2._2.isInstanceOf[OWLDataHasValue])
.map(a => dataFactory.getOWLDataPropertyAssertionAxiom(a._2._2.getProperty, a._2._1, a._2._2.getFiller))

// case c: OWLEquivelantClass(E, OWLDataHasValue(P, w)), OWLClassAssertion(E, i)
Expand Down Expand Up @@ -487,16 +477,16 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten
// O13: (R owl:hasValue V), (R owl:onProperty P), (U P V) -> (U rdf:type R)
// case a: OWLEquivalentClasses(E, OWLDataHasValue(P, w)), OWLDataPropertyAssertion(P, i, w) --> OWLClassAssertion(E, i)

val e_swap = e.map(a => (a._2.getProperty, a._1))
val e_swap = e.map(a => (a._2.getProperty, a._1)) // (P, E)
val O13_data_a = dataPropAssertion.filter(a => dataHasValBC.value.contains(a.getProperty))
.map(a => (a.getProperty, a.getSubject))
.join(e_swap)
.map(a => (a.getProperty, a.getSubject)) // (P, i)
.join(e_swap) // (P, (E, i))
.map(a => dataFactory.getOWLClassAssertionAxiom(a._2._2, a._2._1))

// case b: OWLSubClassOf(S, OWLDataHasValue(P, w)) , OWLClassAssertion(S, i)
val s_swap = sd.map(a => (a._2.getProperty, a._1))
val O13_data_b = dataPropAssertion.map(a => (a.getProperty, a.getSubject))
.join(s_swap)
val s_swap = sd.map(a => (a._2.getProperty, a._1)) // (P, S)
val O13_data_b = dataPropAssertion.map(a => (a.getProperty, a.getSubject)) // (P, U)
.join(s_swap) // (P, (S, U))
.map(a => dataFactory.getOWLClassAssertionAxiom(a._2._2, a._2._1))

// case a: OWLEquivalentClasses(E, OWLObjectHasValue(P, w)), OWLObjectPropertyAssertion(P, i, w) --> OWLClassAssertion(E, i)
Expand Down Expand Up @@ -603,47 +593,51 @@ class ForwardRuleReasonerOWLHorst (sc: SparkContext, parallelism: Int = 2) exten

val time = System.currentTimeMillis() - startTime

val inferedAxioms = sc.union(typeAxioms.asInstanceOf[RDD[OWLAxiom]], sameAsAxioms.asInstanceOf[RDD[OWLAxiom]], SPOAxioms)
val inferredAxioms = sc.union(typeAxioms.asInstanceOf[RDD[OWLAxiom]], sameAsAxioms.asInstanceOf[RDD[OWLAxiom]], SPOAxioms)
.subtract(axioms)
.distinct(parallelism)

// println("\n Finish with " + inferedAxioms.count + " Inferred Axioms after adding SameAs rules")
println("\n Finish with " + inferredAxioms.count + " Inferred Axioms after adding SameAs rules")
println("\n...finished materialization in " + (time/1000) + " sec.")

inferedAxioms
val inferredGraph = inferredAxioms.union(axioms).distinct(parallelism)

// inferredAxioms
inferredGraph
}

def extractAxiom(axiom: RDD[OWLAxiom], T: AxiomType[_]): RDD[OWLAxiom] = {
axiom.filter(a => a.getAxiomType.equals(T))
}
}
}

object ForwardRuleReasonerOWLHorst{
//
object ForwardRuleReasonerOWLHorst {

def main(args: Array[String]): Unit = {

val input = getClass.getResource("/ont_functional.owl").getPath

println("=====================================")
println("| OWLAxioms Forward Rule Reasoner |")
println("=====================================")

val sparkSession = SparkSession.builder
// .master("spark://172.18.160.16:3090")
.master("local[*]")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
// .config("spark.kryo.registrator", "net.sansa_stack.inference.spark.forwardchaining.axioms.Registrator")
.appName("OWL Axiom Forward Chaining Rule Reasoner")
.appName("OWLAxiom Forward Chaining Rule Reasoner")
.getOrCreate()

val sc: SparkContext = sparkSession.sparkContext
// val sparkConf = new SparkConf().setMaster("spark://172.18.160.16:3077")

val input = getClass.getResource("/ont_functional.owl").getPath
// val input = args(0)

println("=====================================")
println("| OWLAxioms Forward Rule Reasoner |")
println("=====================================")

// Call the functional syntax OWLAxiom builder
val owlAxiomsRDD: OWLAxiomsRDD = FunctionalSyntaxOWLAxiomsRDDBuilder.build(sparkSession, input)
// OWLAxiomsRDD.collect().foreach(println)

val ruleReasoner = new ForwardRuleReasonerOWLHorst(sc, 2)
val res: RDD[OWLAxiom] = ruleReasoner(owlAxiomsRDD)
// res.collect().foreach(println)

sparkSession.stop
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,23 @@ class ForwardRuleReasonerRDFS(sc: SparkContext, parallelism: Int = 2) extends Lo
* y rdfs:subClassOf z . x rdfs:subClassOf z .
*/

// val subClassOf = subClassofAxiom.asInstanceOf[RDD[OWLSubClassOfAxiom]]
// val op1 = subClassOf.map { a => (a.getSubClass, a.getSuperClass) }
// val op2 = subClassOf.map { a => (a.getSuperClass, a.getSubClass) }
// val subClass_ExtVP_OS = op2.join(op1).map(a => (a._1, a._2._1)).distinct()
// val subClass_ExtVP_SO = op1.join(op2).map(a => (a._1, a._2._1)).distinct()
//
// var startTime = System.currentTimeMillis()
// val Trans = subClass_ExtVP_OS.join(subClass_ExtVP_SO).map(a => (a._2._1, a._2._2))
// var endTime = System.currentTimeMillis() - startTime
// println("\n...first --- " + (endTime) + " milisec.")
//
// val res = Trans.map(a => dataFactory.getOWLSubClassOfAxiom(a._1, a._2)).distinct
//
// println("\n ---- Trans ----\n")
// Trans.collect.foreach(println(_))
// startTime = System.currentTimeMillis()

val tr = new TransitiveReasoner()
val subClassOfAxiomsTrans = tr.computeTransitiveClosure(subClassofAxiom, AxiomType.SUBCLASS_OF)
.setName("rdfs11")
Expand Down Expand Up @@ -272,12 +289,13 @@ class ForwardRuleReasonerRDFS(sc: SparkContext, parallelism: Int = 2) extends Lo
.distinct(parallelism)
.setName("typeAxioms + sameAsAxioms + SPOAxioms")

val infered = allAxioms.subtract(axioms)
val inferedCount = infered.count()
val inferred = allAxioms.subtract(axioms)
val inferredCount = inferred.count()

println("Finished with " + inferedCount + " inferred axioms")
infered
log.info(s"Finished with $inferredCount inferred axioms")

// inferred
allAxioms
}

def extractAxiom(axiom: RDD[OWLAxiom], T: AxiomType[_]): RDD[OWLAxiom] = {
Expand Down
Loading

0 comments on commit 50260fe

Please sign in to comment.