package fr.emse.gitlab.saref.utils; import java.util.HashMap; import java.util.Map; import org.apache.jena.query.Dataset; import org.apache.jena.query.ReadWrite; import org.apache.jena.rdf.model.Literal; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.Property; import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.Resource; import org.semanticweb.owlapi.formats.TurtleDocumentFormatFactory; import org.semanticweb.owlapi.io.AbstractOWLParser; import org.semanticweb.owlapi.io.OWLOntologyDocumentSource; import org.semanticweb.owlapi.io.OWLParserException; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.NodeID; import org.semanticweb.owlapi.model.OWLDocumentFormat; import org.semanticweb.owlapi.model.OWLDocumentFormatFactory; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyLoaderConfiguration; import org.semanticweb.owlapi.rdf.turtle.parser.OWLRDFConsumerAdapter; public class JenaModelOntologyParser extends AbstractOWLParser { private Map string2IRI = new HashMap(); private final Dataset dataset; public JenaModelOntologyParser(Dataset dataset) { this.dataset = dataset; } @Override public OWLDocumentFormatFactory getSupportedFormat() { return new TurtleDocumentFormatFactory(); } @Override public OWLDocumentFormat parse(OWLOntologyDocumentSource source, OWLOntology ontology, OWLOntologyLoaderConfiguration config) { String iri = source.getDocumentIRI().toString(); boolean ownTransaction = false; if(dataset.transactionMode() == ReadWrite.WRITE) { throw new OWLParserException("already in writing transation"); } if(dataset.transactionMode() == null) { ownTransaction = true; dataset.begin(ReadWrite.READ); } Model model = dataset.getNamedModel(iri); if(model == null) { throw new OWLParserException("no graph named " + iri + " in the dataset."); } OWLRDFConsumerAdapter consumer = new OWLRDFConsumerAdapter(ontology, config); JenaModelFormat format = new JenaModelFormat(); consumer.setOntologyFormat(format); consumer.startModel(source.getDocumentIRI()); model.getNsPrefixMap().forEach((s,l)->{ consumer.handlePrefixDirective(s, l); }); model.listStatements().forEachRemaining(stmt->{ Resource s = stmt.getSubject(); Property p =stmt.getPredicate(); RDFNode o = stmt.getObject(); IRI s2 = getIRI(s); IRI p2 = getIRI(p); if(o.isResource()) { IRI o2 = getIRI(o.asResource()); consumer.handleTriple(s2, p2, o2); } else { Literal l = o.asLiteral(); String object = l.getLexicalForm(); String dt = l.getDatatypeURI(); String language = l.getLanguage(); if(language != null && !language.isEmpty()) { consumer.handleTriple(s2, p2, object, language); } else if ( dt != null) { IRI dt2 = IRI.create(dt); consumer.handleTriple(s2, p2, object, dt2); } else { consumer.handleTriple(s2, p2, object); } } }); if(ownTransaction) { dataset.end(); } consumer.handleEnd(); return format; } private IRI getIRI(Resource s) { if(s.isAnon()) { return getBlankNode(s.getId().getLabelString()); } else { return IRI.create(s.getURI()); } } /** Gets the next blank node. * * @param id the id * @return the next blank node */ protected IRI getBlankNode(String id) { IRI iri = string2IRI.get(id); if (iri == null) { String string = NodeID.nextAnonymousIRI(); iri = IRI.create(string); string2IRI.put(id, iri); } return iri; } }