Skip to content
Snippets Groups Projects
Commit b8d673f1 authored by David Gnabasik's avatar David Gnabasik
Browse files

version 2024-04-05

parent 4d780dd4
No related branches found
No related tags found
No related merge requests found
Showing
with 533 additions and 100 deletions
......@@ -9,3 +9,9 @@ dependency-reduced-pom.xml
lib
*.env
.~lock.saref.pipeline.changes.ods#
patterns
saref-pipeline.env saref-pipeline.iml saref-pipeline.ipr saref-pipeline.iws
saref-pipeline.iml
saref-pipeline.ipr
saref-pipeline.iws
......@@ -281,7 +281,15 @@
<cleanupDaemonThreads>false</cleanupDaemonThreads>
</configuration>
</plugin>
</plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings
......
......@@ -85,11 +85,9 @@ public abstract class AbstractClauseChecker extends AbstractChecker {
* If the directory does not exist, create it. directoryPath can be relative or absolute.
* If the file does not exist, create it with the default contents. Creates a file of length=0 if fileContents is empty.
* @param directoryPath
* @param fileName
* @param fileContents
* @throws IOException
*/
public final void CreateFileInDirectoryWithContents(String directoryPath, String fileName, String[] fileContents) throws IOException {
public final void CreateDirectory(String directoryPath) throws IOException {
File theDir = new File(directoryPath);
if (!theDir.exists()) {
try {
......@@ -98,6 +96,22 @@ public abstract class AbstractClauseChecker extends AbstractChecker {
throw new IOException("Unable to create directory " + directoryPath);
}
}
}
/**
* If the directory does not exist, create it. directoryPath can be relative or absolute.
* If the file does not exist, create it with the default contents. Creates a file of length=0 if fileContents is empty.
* @param directoryPath
* @param fileName
* @param fileContents
* @throws IOException
*/
public final void CreateFileInDirectoryWithContents(String directoryPath, String fileName, String[] fileContents) throws IOException {
try {
CreateDirectory(directoryPath);
} catch (Exception se) {
return;
}
String fName = directoryPath + "/" + fileName;
File fout = new File(fName); // implicit bw.close().
try (FileOutputStream fos = new FileOutputStream(fout); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fos));) {
......
......@@ -27,8 +27,10 @@ package fr.mines_stetienne.ci.saref.checkers;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import fr.mines_stetienne.ci.saref.SAREFPipelineException;
import fr.mines_stetienne.ci.saref.managers.RepositoryManager;
import fr.mines_stetienne.ci.saref.utils.Languages;
/**
* Checks TS 103 673 Clause 10.1: Reference ontology pattern documentation and specification.
......@@ -38,7 +40,7 @@ public class Clause_10_1_Checker extends AbstractClauseChecker {
private static final String FIRST_LINE = "This file describes the pattern files found in this directory.";
private enum MESSAGE {
missing, ioexception, line
ioexception, missing, name, one;
}
public Clause_10_1_Checker(RepositoryManager repositoryManager) {
......@@ -57,15 +59,10 @@ public class Clause_10_1_Checker extends AbstractClauseChecker {
}
}
try {
File file = new File(dir, "pattern.ttl");
if (!file.exists()) {
logError(getMessage(Clause_10_1_Checker.MESSAGE.missing));
return;
if (Files.walk(dir.toPath(), 1).filter(p -> !p.toFile().isFile() && !p.toFile().getName().startsWith("."))
.count() != 1) {
logError(getMessage(Clause_10_1_Checker.MESSAGE.one, repository.getProject().getOntologyFileName(Languages.TEXT_TURTLE)));
}
/*List<String> lines = FileUtils.readLines(file, StandardCharsets.UTF_8);
if (lines.isEmpty() || !lines.get(0).equals(FIRST_LINE)) {
logError(getMessage(Clause_10_1_Checker.MESSAGE.line));//<<<
}*/
} catch (Exception ex) {
logError(getMessage(Clause_10_1_Checker.MESSAGE.ioexception), ex);
}
......
......@@ -55,7 +55,7 @@ public class Clause_10_2_Checker extends AbstractClauseChecker {
logWarning(getMessage(Clause_10_2_Checker.MESSAGE.missing));
}
try {
String[] lines = {}; // create empty file.
String[] lines = {"# empty"}; // create empty file.
CreateFileInDirectoryWithContents(patternDir, "pattern.ttl", lines);
} catch (IOException ex) {
logError(getMessage(Clause_10_2_Checker.MESSAGE.ioexception));
......
/*
* Copyright 2024 ETSI
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package fr.mines_stetienne.ci.saref.checkers;
import java.io.IOException;
import java.net.http.HttpResponse;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ResIterator;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.vocabulary.DCTypes;
import org.apache.jena.vocabulary.RDF;
import fr.mines_stetienne.ci.saref.SAREF;
import fr.mines_stetienne.ci.saref.SAREFPipelineException;
import fr.mines_stetienne.ci.saref.entities.SAREFExample;
//import fr.mines_stetienne.ci.saref.entities.SAREFVersion;
import fr.mines_stetienne.ci.saref.entities.SAREFVersionName;
import fr.mines_stetienne.ci.saref.managers.RepositoryManager;
import fr.mines_stetienne.ci.saref.vocabs.SHACL;
import fr.mines_stetienne.ci.saref.managers.ShaclValidationManager;
/**
* Checks TS 103 673 Clause 9.4.6: Conformance to reference ontology patterns.
* The ontology in the ontology document of a SAREF project version shall conform to the SHACL specification of each
* reference ontology pattern defined in this SAREF project version.
*/
public class Clause_9_4_6_Checker extends AbstractShaclChecker {
private enum MESSAGE implements MessageResource {
dataset, conformsTo1, conformsToNot, conformsToNot2, conformsTo2, conformsTo3,
title1, title2, title3, abstract1, description1, description2,
license, ioexception
}
private final SAREFExample example = null;
public Clause_9_4_6_Checker(RepositoryManager repositoryManager) //, SAREFExample shacl
throws SAREFPipelineException {
super(repositoryManager, Clause_9_4_6_Checker.class); // , "example " + example.getName());
//this.example = shacl;
}
@Override
protected Model getModel() {
Model model = example.getModel();
final String defaultVersion = "v0.0.1";
final String regex = "^" + example.getIRI().replace(defaultVersion, SAREF.REGEX_VERSION_NUMBER) + "$";
int onto = 0;
boolean found = false;
for (ResIterator it = model.listSubjectsWithProperty(RDF.type, DCTypes.Dataset); it.hasNext();) {
Resource r = it.next();
if(versionName.equals(SAREFVersionName.DEFAULT)) {
if(r.getURI().matches(regex)) {
found = true;
}
} else {
if(r.getURI().equals( example.getIRI() )) {
found = true;
}
}
onto++;
}
if (onto != 1 || !found) {
final String msg;
if(versionName.equals(SAREFVersionName.DEFAULT)) {
msg = getMessage(MESSAGE.dataset, example.getIRI()).replace("v0.0.1", "<<some SAREF version name>>");
} else {
msg = getMessage(MESSAGE.dataset, example.getIRI());
}
logError(msg);
}
return model;
}
protected final void updateShapeModel() {
if(!versionName.equals(SAREFVersionName.DEFAULT)) {
shapeModel.add(MESSAGE.conformsToNot.asResource(), SHACL.pattern, exactly(version.getIRI()));
add(MESSAGE.conformsTo1, version.getIRI());
}
add(MESSAGE.conformsTo2);
add(MESSAGE.conformsTo3);
add(MESSAGE.title1);
add(MESSAGE.title2);
add(MESSAGE.title3);
add(MESSAGE.abstract1);
add(MESSAGE.description1);
add(MESSAGE.description2);
add(exactly(SAREF.LICENSE), MESSAGE.license);
if(versionName.equals(SAREFVersionName.DEFAULT)) {
remove(MESSAGE.conformsTo1);
}
}
/**
The ontology in the ontology document of a SAREF project version shall conform to the SHACL specification of each
reference ontology pattern defined in this SAREF project version.
If the ontology in the ontology document of a SAREF project version imports an {Ontology | Vocabulary | Pattern} Version IRI defined in
another SAREF project version, either directly or transitively, then it shall conform to the SHACL specification of each
reference ontology pattern defined in this other SAREF project version.
This method calls the configured docker image that runs the common shacl files.
* */
protected final void validateOntologyWithShacl(String ontologyToValidate) {
try {
HttpResponse<String> response = new ShaclValidationManager(ontologyToValidate).validateOntologyWithShacl();
} catch (IOException | InterruptedException e) {
logError(getMessage(Clause_9_4_6_Checker.MESSAGE.ioexception));
}
}
// This method calls the generic docker image that runs a specific shacl files.
protected final void validateOntologyPerShacl(String ontologyToValidate, String shaclFile) {
try {
HttpResponse<String> response = new ShaclValidationManager(ontologyToValidate).validateOntologyWithShacl();
} catch (IOException | InterruptedException e) {
logError(getMessage(Clause_9_4_6_Checker.MESSAGE.ioexception));
}
}
}
\ No newline at end of file
......@@ -56,6 +56,7 @@ public class Clause_9_4_Checker extends AbstractClauseChecker {
new Clause_9_4_3_Checker(repositoryManager).check();
new Clause_9_4_4_Checker(repositoryManager).check();
new Clause_9_4_5_Checker(repositoryManager).check();
new Clause_9_4_6_Checker(repositoryManager).check();
} catch (SAREFPipelineException ex) {
logError(getMessage(MESSAGE.error), ex);
}
......
......@@ -130,7 +130,7 @@ public class Clause_9_5_Checker extends AbstractClauseChecker {
csvReader.forEach(row -> {
String id = row[0]; // WATR-TEST-17
Resource testResource = requirements.getResource(String.format("%stests#%s", version.getIRI(), id));
/*<<< String requirementId = row[1];
/* String requirementId = row[1];
Resource requirementResource = requirements
.getResource(String.format("%srequirements#", version.getIRI(), requirementId));
String category = row[2]; // Water meter
......
......@@ -39,7 +39,7 @@ import fr.mines_stetienne.ci.saref.managers.RepositoryManager;
public class Clause_9_6_Checker extends AbstractClauseChecker {
private static enum MESSAGE {
directories
error, missing;
};
public Clause_9_6_Checker(RepositoryManager repositoryManager) {
......@@ -48,39 +48,21 @@ public class Clause_9_6_Checker extends AbstractClauseChecker {
@Override
public void checkClause() throws SAREFPipelineException {
File dir = new File(repository.getDirectory(), "examples");
final String directoryName = "examples";
File dir = new File(repository.getDirectory(), directoryName);
if (!dir.isDirectory()) {
//return;
try {
String[] lines = { // saref4abcd template example1.ttl
"@prefix dcterms: <http://purl.org/dc/terms/> .",
"@prefix dctype: <http://purl.org/dc/dcmitype/> .",
"@prefix geo: <http://www.w3.org/2003/01/geo/wgs84_pos#> .",
"@prefix geosp: <http://www.opengis.net/ont/geosparql#> .",
"@prefix owl: <http://www.w3.org/2002/07/owl#> .",
"@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .",
"@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .",
"@prefix saref: <https://saref.etsi.org/core/> .",
"@prefix time: <http://www.w3.org/2006/time#> .",
"@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .",
"### CHANGE abcd !!! ",
"@prefix s4abcd: <https://saref.etsi.org/saref4abcd/> .",
"@prefix ex: <https://saref.etsi.org/saref4abcd/v1.1.1/example/example1/> .",
"",
"<https://saref.etsi.org/saref4abcd/v1.1.1/example/example1#> a dctype:Dataset ;",
"dcterms:license <https://forge.etsi.org/etsi-software-license> ;",
"dcterms:conformsTo <https://saref.etsi.org/core/v3.2.1/> ;",
"dcterms:conformsTo <https://saref.etsi.org/saref4abcd/v1.1.1/> ;",
"dcterms:title \"SAREF4abcd example number 1.\"@en ;",
"dcterms:description \"SAREF4abcd example number 1.\"@en .",
"",
};
CreateFileInDirectoryWithContents("examples", "example1.ttl", lines);
CreateDirectory(directoryName);
} catch (IOException ex) {
logError(getMessage(Clause_9_6_Checker.MESSAGE.directories));
logError(getMessage(Clause_9_6_Checker.MESSAGE.error));
}
}
if (dir.list().length < 1) {
log(getMessage(Clause_9_6_Checker.MESSAGE.missing));
return;
}
new Clause_9_6_1_Checker(repositoryManager).check();
for(SAREFExample example: version.getExamples().values()) {
......
......@@ -52,17 +52,6 @@ public class Clause_9_8_1_1_Checker extends AbstractClauseChecker {
public void checkClause() throws SAREFPipelineException {
try {
File dir = new File(repository.getDirectory(), "vocabularies");
/*String directories = Files.walk(dir.toPath()).filter(p -> {
try {
return p.toFile().isDirectory() && !Files.isSameFile(dir.toPath(), p);
} catch(IOException ex) {
return false;
}
}).map(p -> p.toString()).collect(Collectors.joining(", "));
if(directories.isEmpty()) {
logError(getMessage(MESSAGE.directories, directories));
}*/
String nonTtl = Files.walk(dir.toPath(), 1).filter(p -> {
try {
return p.toFile().isFile() && !SAREF.TTL_MATCHER.matches(p) && !p.toFile().getName().startsWith(".");
......
......@@ -25,11 +25,17 @@
*/
package fr.mines_stetienne.ci.saref.checkers;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.io.*;
import java.nio.file.Files;
import java.util.Set;
import java.util.ArrayList;
import java.util.Iterator;
import org.semanticweb.owlapi.model.OWLImportsDeclaration;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.IRI;
import fr.mines_stetienne.ci.saref.entities.SAREFExtension;
import fr.mines_stetienne.ci.saref.SAREFPipelineException;
import fr.mines_stetienne.ci.saref.managers.RepositoryManager;
import fr.mines_stetienne.ci.saref.utils.Languages;
......@@ -40,22 +46,27 @@ import fr.mines_stetienne.ci.saref.utils.Languages;
*/
public class Clause_9_8_1_Checker extends AbstractClauseChecker {
private Set<OWLImportsDeclaration> importDeclarations;
private static enum MESSAGE {
error, missing, ioexception, line, one, name, turtle;
}
public Clause_9_8_1_Checker(RepositoryManager repositoryManager) {
public Clause_9_8_1_Checker(RepositoryManager repositoryManager, Set<OWLImportsDeclaration> importDeclarations) {
super(repositoryManager, Clause_9_8_1_Checker.class);
this.importDeclarations = importDeclarations;
}
@Override
public void checkClause() throws SAREFPipelineException {
File dir = new File(repository.getDirectory(), "vocabularies");
final String directoryName = "vocabularies";
File dir = new File(repository.getDirectory(), directoryName);
if (!dir.isDirectory()) {
return;
}
File[] fileList = null;
// Step 4. Fetch the list of turtle files in the vocabularies directory.
File[] fileList = null;
try {
if (Files.walk(dir.toPath(), 1).filter(p -> !p.toFile().isFile() && !p.toFile().getName().startsWith("."))
.count() != 1) {
......@@ -72,27 +83,40 @@ public class Clause_9_8_1_Checker extends AbstractClauseChecker {
} catch (IOException e) {
logError(getMessage(Clause_9_8_1_Checker.MESSAGE.ioexception), e);
}
// Step 5. If the list is empty, return an error.
if (fileList.length < 1) {
log(getMessage(Clause_9_8_1_Checker.MESSAGE.missing));
return;
}
/*File file = new File(dir, repository.getProject().getOntologyFileName(Languages.TEXT_TURTLE));
if (!file.isFile()) {
String msg = getMessage(MESSAGE.name, repository.getOntologyFileName(Languages.TEXT_TURTLE));
logError(msg);
throw new SAREFPipelineException(msg);
// <<< Step 6. Match each imports statements to the presence of each file using the last tag (e.g. "properties" +".ttl")
// Ignore turtle files which do not have a corresponding imports statements.
/*Iterator<OWLImportsDeclaration> itr = this.importDeclarations.iterator();
while(itr.hasNext()) { // <https://saref.etsi.org/saref4auto/v2.1.1/properties/>
IRI iri = itr.next().getIRI();
}*/
for (File child : fileList) {
System.out.println("!!! "+child.getName()+" !!!");//<<<
}
//<<< Check if saref core imports each vocabulary file. Iterate over each element in fileList and call checks().
try {
new Clause_9_8_1_1_Checker(repositoryManager).check();
new Clause_9_8_1_2_Checker(repositoryManager).check();
new Clause_9_8_1_3_Checker(repositoryManager).check();
new Clause_9_8_1_4_Checker(repositoryManager).check();
new Clause_9_8_1_5_Checker(repositoryManager).check();
} catch (SAREFPipelineException ex) {
logError(getMessage(Clause_9_8_1_Checker.MESSAGE.error), ex);
}
// Step 7. Read each vocabulary file and check its syntax.
for(int i = 0; i < fileList.length; i++) {
String fileName = "file://" + fileList[i].getAbsolutePath();
// SAREFExtension(String acronym, name, path, prefix, namespace, ontologyFileName) use dummy values
SAREFExtension vocabulary = new SAREFExtension("abcd", "abcd", "owl", "owl", "abcd", fileName);
IRI iri = IRI.create(fileName);
// Ignore ==> ERROR in org.semanticweb.owlapi.rdf.rdfxml.parser.OWLRDFConsumer: Entity not properly recognized...
final OWLOntology ontology = pipeline.getOntologyManager().loadOntology(vocabulary, errorLogger, iri);
if (ontology == null) {
return;
}
try {
new Clause_9_8_1_1_Checker(repositoryManager).check();
new Clause_9_8_1_2_Checker(repositoryManager).check();
new Clause_9_8_1_3_Checker(repositoryManager).check();
new Clause_9_8_1_4_Checker(repositoryManager).check();
new Clause_9_8_1_5_Checker(repositoryManager).check();
} catch (SAREFPipelineException ex) {
logError(getMessage(Clause_9_8_1_Checker.MESSAGE.error), ex);
}
} // for loop
}
}
......@@ -25,34 +25,63 @@
*/
package fr.mines_stetienne.ci.saref.checkers;
import java.util.Set;
import java.io.File;
import java.io.IOException;
import fr.mines_stetienne.ci.saref.SAREFPipelineException;
import fr.mines_stetienne.ci.saref.managers.RepositoryManager;
import org.semanticweb.owlapi.model.OWLImportsDeclaration;
import org.semanticweb.owlapi.model.OWLOntology;
/**
* Checks TS 103 673 Clause 9.8: Vocabularies
* Checks TS 103 673 Clause 9.8: Vocabularies: Check if saref core imports each vocabulary file.
*
*/
public class Clause_9_8_Checker extends AbstractClauseChecker {
private enum MESSAGE {
error
error, missing;
}
public Clause_9_8_Checker(RepositoryManager repositoryManager) {
super(repositoryManager, Clause_9_8_Checker.class);
}
// Step 1. Open the vocabularies directory. If it does not exist, create it.
// Step 2. If there is not at least one file, return an error.
// Step 3. Get the list of imports statements from the saref core ontology. If none, return.
// owl:imports <https://saref.etsi.org/core/v4.1.1/vocabularies/properties/>
// Clause_9_8_1_Checker::
// Step 4. Fetch the list of turtle files in the vocabularies directory.
// Step 5. If the list is empty, return an error.
// Step 6. Match each imports statements to the presence of each file using the last tag (e.g. properties.ttl)
// Ignore turtle files which do not have a corresponding imports statements.
// Step 7. Check the syntax of each vocabulary file.
public void checkClause() throws SAREFPipelineException {
File dir = new File(repository.getDirectory(), "vocabularies");
final String directoryName = "vocabularies";
// Step 1:
File dir = new File(repository.getDirectory(), directoryName);
if (!dir.isDirectory()) {
return;
try {
CreateDirectory(directoryName);
} catch (IOException ex) {
logError(getMessage(Clause_9_8_Checker.MESSAGE.error));
}
}
try {
new Clause_9_8_1_Checker(repositoryManager).check();
} catch (SAREFPipelineException ex) {
logError(getMessage(Clause_9_8_Checker.MESSAGE.error), ex);
// Step 2:
if (dir.list().length < 1) {
log(getMessage(Clause_9_8_Checker.MESSAGE.missing));
return;
}
// Step 3: get list of import declarations from SAREF4AUTO.ttl and pass to Clause_9_8_1_Checker.
final OWLOntology coreOntology = pipeline.getOntologyManager().loadOntology(version, errorLogger);
// importDeclarations contains ALL the imports statements, not just the files in the vocabularies directory.
Set<OWLImportsDeclaration> importDeclarations = coreOntology.getImportsDeclarations();
if (importDeclarations.isEmpty()) return;
new Clause_9_8_1_Checker(repositoryManager, importDeclarations).check();
}
}
......@@ -70,6 +70,7 @@ public class TermsChecker extends SAREFRepositoryErrorLogger {
}
}
// This warning can be safely ignored.
private void checkDefinedNotExemplified(SAREFVersion version) {
String msg = version.getDefinedTerms().stream().filter(
term -> !term.getIsExemplifiedBy().stream().anyMatch(example -> example.getVersion().equals(version)))
......@@ -78,5 +79,4 @@ public class TermsChecker extends SAREFRepositoryErrorLogger {
logWarning(getMessage(MESSAGE.defined_not_exemplified, project.getName(), version.getVersionName(), msg));
}
}
}
......@@ -51,6 +51,24 @@ public class SAREFExtension implements SAREFProject {
this.resource = ResourceFactory.createResource(getNamespace());
}
// Specifically for vocabulary files.
public SAREFExtension(
String acronym,
String name,
String path,
String prefix,
String namespace,
String ontologyFileName) {
String acronymStr = acronym;
this.acronym = new SAREFAcronym(acronymStr);
this.name = name.toUpperCase();
this.path = path;
this.prefix = prefix;
this.namespace = namespace;
this.ontologyFileName = ontologyFileName;
this.resource = ResourceFactory.createResource(namespace);
}
public final String getAcronym() {
return acronym.getAcronym();
}
......@@ -65,6 +83,7 @@ public class SAREFExtension implements SAREFProject {
return path;
}
@Override
public String getPrefix() {
return prefix;
}
......
......@@ -65,6 +65,7 @@ import fr.mines_stetienne.ci.saref.SAREFPipeline;
import fr.mines_stetienne.ci.saref.SAREFPipeline.Mode;
import fr.mines_stetienne.ci.saref.entities.SAREFExample;
import fr.mines_stetienne.ci.saref.entities.SAREFVersion;
import fr.mines_stetienne.ci.saref.entities.SAREFExtension;
import fr.mines_stetienne.ci.saref.managers.parsers.JenaModelDocumentFormatFactory;
import fr.mines_stetienne.ci.saref.managers.parsers.SourceOntologyParser;
......@@ -141,6 +142,16 @@ public class OntologyManager extends SAREFErrorLogger {
}
}
// Read vocabulary file from file system (iri); added new constructor to SAREFExtension since it does not have getIRI() method.
public OWLOntology loadOntology(SAREFExtension vocabulary, Logger logger, IRI iri) {
try {
return ONTOLOGY_MANAGER.loadOntologyFromOntologyDocument(iri);
} catch (OWLOntologyCreationException ex) {
log(logger, getMessage(MESSAGE.loading_error), ex, Mode.DEVELOP, Mode.RELEASE, Mode.PRERELEASE_PORTAL, Mode.RELEASE_PORTAL);
return null;
}
}
public Set<Explanation<OWLAxiom>> getInconsistenceExplanations(OWLOntology ontology, Logger logger) {
final ExplanationGenerator<OWLAxiom> gen = inconsistentOntologyExplanationGeneratorFactory
.createExplanationGenerator(ontology);
......
/*
* Copyright 2024 ETSI
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package fr.mines_stetienne.ci.saref.managers;
import java.io.IOException;
import java.net.URI;
import java.net.URLEncoder;
import java.net.http.*;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
//import fr.mines_stetienne.ci.saref.SAREFErrorLogger;
public class ShaclValidationManager {
private final static String BASE_URL = "http://localhost:8080/shacl/"; // <<<
private String ontologyToValidate;
private final HttpClient client;
private final ObjectMapper objectMapper;
private static String encode(Object obj) {
return URLEncoder.encode(obj.toString(), StandardCharsets.UTF_8);
}
private enum MESSAGE {
http_exception, ioexception
}
// The full list of available tags: https://www.itb.ec.europa.eu/docs/guides/latest/validatingRDF/
// Replace default values for {contentToValidate, validationType}
private String shaclRequestBody = "{ "+ // generic validator option
"'contentToValidate': 'string', "+ // entire content of ontology file (or URI)
"'embeddingMethod': 'STRING', "+ // {STRING,URL,BASE64}
"'contentSyntax': 'text/turtle', "+
"'validationType': 'string', "+ // Clause_9_4_3_1-3.ttl, Clause_9_4_4_2.ttl, Clause_9_6_3.ttl (for configured validator)
"'reportSyntax': 'text/turtle', "+
"'locale': 'en', "+
"'rdfReportSyntax': 'application/json' "+
"}";
JsonObject jsonObject = new JsonParser().parse(shaclRequestBody).getAsJsonObject();
/**
* Open a REST interface to the SHACL validator.
*/
public ShaclValidationManager(String ontologyToValidate) throws IOException {
client = HttpClient.newHttpClient();
objectMapper = new ObjectMapper();
this.ontologyToValidate = ontologyToValidate; //<<< test for file presence.
}
public static HttpRequest.BodyPublisher ofForm(Map<String, String> data) {
StringBuilder body = new StringBuilder();
for (String dataKey : data.keySet()) {
if (body.length() > 0) {
body.append("&");
}
body.append(encode(dataKey))
.append("=")
.append(encode(data.get(dataKey)));
}
return HttpRequest.BodyPublishers.ofString(body.toString());
}
/**
* POST request to Validate a single RDF instance: {domain}/api/validate/{requestBody}
<<< This method uses the validator configured to read the Clause_9_4_3_1-3.ttl, Clause_9_4_4_2.ttl, Clause_9_6_3.ttl shacl files.
* Response: HTTP code 200 for successful validation as JSON.
*/
public HttpResponse<String> validateOntologyWithShacl()
throws IOException, InterruptedException {
Map<String, String> data = new HashMap<>();
data = objectMapper.readValue(shaclRequestBody, HashMap.class);
data.put("contentToValidate", ontologyToValidate); // file:///home/davidgnabasik/dev/real_saref4auto/ontology/saref4auto.ttl or URL
data.put("validationType", validationType);
System.out.println("Map is: "+data);//<<<
/* "contentToValidate": "RDF_CONTENT_AS_BASE64_ENCODED_STRING",
"contentSyntax": "text/turtle",
"embeddingMethod": "STRING",
"validationType": "Clause_9_4_3_1",
"reportSyntax": "application/json",
"locale": "en" */
HttpRequest request = HttpRequest.newBuilder()
.header("Content-Type", "text/turtle") // application/x-www-form-urlencoded
.uri(URI.create(BASE_URL + validationType + "/api/validate" ))
.POST(ofForm(data))
.build();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
System.out.println("Status code: " + response.statusCode());
System.out.println("\n Body: " + response.body());
return response;
}
/**
* If shaclDomain is empty, return info for all defined domains.
* @throws IOException
* @throws InterruptedException
*/
public HttpResponse<String> getShaclDomainInfo(String shaclDomain) throws IOException, InterruptedException {
Map<String, String> data = new HashMap<>();
data = objectMapper.readValue(shaclRequestBody, HashMap.class);
data.put("validationType", shaclDomain); // {Ontology,Commodity}}
String url = BASE_URL + "shacl/api/info";
if (shaclDomain.length() > 0) url = BASE_URL + "shacl/" + shaclDomain + "/api/info";
HttpRequest request = HttpRequest.newBuilder()
.header("Accept", "text/turtle")
.header("Content-Type", "text/turtle")
.uri(URI.create(url))
.GET()
.build();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
System.out.println("Status code: " + response.statusCode());
System.out.println("\n Body: " + response.body());
return response;
}
}
/*
[
{
"domain": "Commodity",
"validationTypes": [
{
"type": "Commodity",
"description": "Commodity"
}
]
},
{
"domain": "Ontology",
"validationTypes": [
{
"type": "Clause_9_4_3_1",
"description": "Clause_9_4_3_1"
},
{
"type": "Clause_9_4_3_2",
"description": "Clause_9_4_3_2"
},
{
"type": "Clause_9_4_3_3",
"description": "Clause_9_4_3_3"
},
{
"type": "Clause_9_4_3_4",
"description": "Clause_9_4_3_4"
},
{
"type": "Clause_9_6_3",
"description": "Clause_9_6_3"
},
{
"type": "ontologyPerson",
"description": "ontologyPerson"
}
]
}
]
*/
/* POST JSON report format:
{
"date": "2024-04-05T10:18:24.197+0000",
"result": "SUCCESS",
"overview": {
"profileID": "Clause_9_4_3_1"
},
"counters": {
"nrOfAssertions": 0,
"nrOfErrors": 0,
"nrOfWarnings": 0
},
"context": {},
"reports": {},
"name": "SHACL Validation"
}
*/
line=The requirements specification files in the patterns directory shall be in Turtle 1.1 format.
ioexception=Error while checking the `patterns` directory.
one=The `patterns` directory of the SAREF project version should contain at least one reference ontology pattern file in ttl format (e.g., pattern.ttl) to illustrate how the ontology components conform to the suite of SAREF patterns.
missing=The `patterns` directory should contain at least one file. This file shall conform to the pattern specification as defined in clause 10.3 in TS 103 673.
name=The patterns document shall be named `%s`.
empty=The example graph is empty
\ No newline at end of file
error=There is an error reading or writing the `examples` directory regarding Clause 9.6.
missing=The `examples` directory should contain at least one file. This file shall conform to the pattern specification as defined in clause 9.6 in TS 103 673.
error=There is an error in the `patterns` directory regarding Clause 9_8_1.
missing=The `patterns` directory should contain at least one file. This file shall conform to the pattern specification as defined in clause 10.3 in TS 103 673.
ioexception=Error while checking the `patterns` directory.
line=The requirements specification files in the patterns directory shall be in Turtle 1.1 format.
one=The `patterns` directory of the SAREF project version shall contain a single file called the *ontology document* of the SAREF project version, and named `%s`
name=The patterns document shall be named `%s`.
turtle=The patterns document shall contain the sources of an ontology in the Turtle 1.1 format. See detailed error below.
error=There is an error in the `vocabularies` directory regarding Clause 9_8_1.
missing=The `vocabularies` directory should contain at least one file. This file shall conform to the pattern specification as defined in clause 9.8 in TS 103 673.
ioexception=Error while checking the `vocabularies` directory.
line=The requirements specification files in the vocabularies directory shall be in Turtle 1.1 format.
one=The `vocabularies` directory of the SAREF project version shall contain a single file called the *ontology document* of the SAREF project version, and named `%s`
name=The vocabularies document shall be named `%s`.
turtle=The vocabularies document shall contain the sources of an ontology in the Turtle 1.1 format. See detailed error below.
error=Error while checking the `vocabularies` directory.
error=There is an error reading or writing the `vocabularies` directory regarding Clause 9.8.
missing=The `vocabularies` directory should contain at least one file. This file shall conform to the pattern specification as defined in clause 9.8 in TS 103 673.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment