diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..ae2cf3918fb50d9ffca00191a8c705853b2e283e --- /dev/null +++ b/pom.xml @@ -0,0 +1,223 @@ + + + 4.0.0 + fr.emse.gitlab.saref + saref-pipeline + 1.0-SNAPSHOT + jar + + SAREF-Pipeline + The pipeline to check SAREF extension projects and generate documentation + 2019 + + + + Maxime Lefrançois + maxime.lefrancois@emse.fr + http://maxime-lefrancois.info/ + Ecole des mines de Saint-Etienne + http://www.mines-stetienne.fr/ + +1 + + http://gravatar.com/userimage/102097381/dac8aeb15edac9a93e09a7974743957f + + + + Omar Qawasmeh + omar.alqawasmeh@emse.fr + https://perso.univ-st-etienne.fr/alo09685/ + Ecole des mines de Saint-Etienne + http://www.mines-stetienne.fr/ + +1 + + + + + École des Mines de Saint-Étienne + http://www.mines-stetienne.fr/ + + + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + + + + + + ossrh + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + + 1.8 + 1.8 + yyyy-MM-dd'T'HH:mm:ssZ + 1.8 + UTF-8 + UTF-8 + git + 2.0-SNAPSHOT + + + + + junit + junit + 4.12 + test + + + org.hamcrest + hamcrest-core + 1.3 + test + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + commons-cli + commons-cli + 1.4 + + + commons-io + commons-io + 2.6 + + + org.apache.commons + commons-compress + 1.20 + + + org.apache.commons + commons-text + 1.8 + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-xml + 2.10.1 + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + 2.10.1 + + + + + org.eclipse.jgit + org.eclipse.jgit + 5.2.1.201812262042-r + + + + com.google.code.gson + gson + 2.8.2 + jar + + + org.apache.jena + jena-tdb + 3.13.0 + + + fr.emse.ci + sparql-generate-markdown + ${sparql-generate.version} + + + fr.emse.ci + sparql-generate-jena + ${sparql-generate.version} + + + org.topbraid + shacl + 1.3.1 + + + + net.sourceforge.owlapi + owlexplanation + 5.0.0 + + + net.sourceforge.owlapi + org.semanticweb.hermit + 1.4.5.519 + + + + + + ${project.artifactId} + + + org.apache.maven.plugins + maven-shade-plugin + 2.4.1 + + + package + + shade + + + + + fr.emse.gitlab.saref.Main + + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + + java + + + + + fr.emse.gitlab.saref.Main + false + + + + + diff --git a/run.sh b/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..ddd77072dd0a6afe866f2791f5e264aa47d11bbd --- /dev/null +++ b/run.sh @@ -0,0 +1 @@ +mvn exec:java -Dexec.mainClass="fr.emse.gitlab.saref.Main" -Dexec.args="-d ../saref-core" diff --git a/saref-pipeline-cli/.gitignore b/saref-pipeline-cli/.gitignore deleted file mode 100644 index b83d22266ac8aa2f8df2edef68082c789727841d..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target/ diff --git a/saref-pipeline-cli/pom.xml b/saref-pipeline-cli/pom.xml deleted file mode 100644 index 114536590066639ae4321923bda20db161e46115..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/pom.xml +++ /dev/null @@ -1,97 +0,0 @@ - - 4.0.0 - - - fr.emse.gitlab.saref - saref-pipeline-parent - 1.0-SNAPSHOT - ../saref-pipeline-parent/pom.xml - - - saref-pipeline-cli - SAREF-Pipeline-cli - jar - - - - Omar Qawasmeh - omar.alqawasmeh@emse.fr - https://perso.univ-st-etienne.fr/alo09685/ - Ecole des mines de Saint-Etienne - http://www.mines-stetienne.fr/ - +1 - - - - - - org.slf4j - slf4j-log4j12 - 1.7.25 - - - - - org.eclipse.jgit - org.eclipse.jgit - 5.2.1.201812262042-r - - - com.google.code.gson - gson - 2.8.2 - jar - - - commons-cli - commons-cli - 1.4 - - - fr.emse.gitlab.saref - saref-pipeline-core - 1.0-SNAPSHOT - - - - - ${project.artifactId} - - - org.apache.maven.plugins - maven-shade-plugin - 2.4.1 - - - package - - shade - - - - - fr.emse.gitlab.saref.Main - - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - - - - - - \ No newline at end of file diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/Main.java b/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/Main.java deleted file mode 100644 index 47f574a443872a3a576356ab83daf0d732558b05..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/Main.java +++ /dev/null @@ -1,118 +0,0 @@ -package fr.emse.gitlab.saref; - -import static fr.emse.gitlab.saref.CMDConfigurations.*; - -import java.awt.Desktop; -import java.io.File; -import java.io.IOException; -import java.io.StringWriter; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URLEncoder; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; -import javax.xml.bind.Marshaller; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.ParseException; -import org.apache.commons.io.FileUtils; -import org.apache.log4j.Layout; -import org.apache.log4j.PatternLayout; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import fr.emse.gitlab.saref.api.JobRunner; -import fr.emse.gitlab.saref.entities.TestSuites; -import fr.emse.gitlab.saref.jobs.library.ReadOntologies; -import fr.emse.gitlab.saref.jobs.library.RepositoryStructureChecker; -import fr.emse.gitlab.saref.utils.SAREF; - -public class Main { - - static final Logger LOG = LoggerFactory.getLogger(Main.class); - private static final Layout LAYOUT = new PatternLayout("%d{mm:ss,SSS} %t %-5p %c:%L - %m%n"); - private static final org.apache.log4j.Logger ROOT_LOGGER = org.apache.log4j.Logger.getRootLogger(); - - private static TestSuites testSuites = new TestSuites(); - private static File directory; - private static File target; - - public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException, JAXBException, ParseException { - CommandLine cl = CMDConfigurations.parseArguments(args); - - if (cl.getOptions().length == 0 || cl.hasOption(ARG_HELP)) { - CMDConfigurations.displayHelp(); - return; - } - - String dirName = cl.getOptionValue(ARG_DIRECTORY, ARG_DIRECTORY_DEFAULT); - if (dirName.equals("")) { - dirName = ARG_DIRECTORY_DEFAULT; - } - - - - - if (args.length == 0) { - directory = new File("").getAbsoluteFile(); - } else if (args.length > 1) { - throw new IllegalArgumentException( - "Expecting at most one argument: the location of the SAREF directory where to run the pipeline"); - } else { - directory = new File(args[0]).getAbsoluteFile(); - } - - target = new File(directory, "target"); - FileUtils.forceMkdir(target); - - File logFile = new File(directory, "target/output.log"); - ROOT_LOGGER.addAppender(new org.apache.log4j.RollingFileAppender(LAYOUT, logFile.getAbsolutePath(), false)); - - LOG.info("Starting pipeline"); - JobRunner checker = new RepositoryStructureChecker(directory); - checker.doJob(testSuites); - if(testSuites.getErrors() > 0) { - reportAndExit(-1); - } - - checker = new ReadOntologies(directory); - checker.doJob(testSuites); - - - reportAndExit(0); - } - - private static void reportAndExit(int code) { - try { - File report = new File(target, "report_output.xml"); - JAXBContext jaxbContext = JAXBContext.newInstance(TestSuites.class); - Marshaller jaxbMarshaller = jaxbContext.createMarshaller(); - jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); - jaxbMarshaller.marshal(testSuites, report); - jaxbMarshaller.marshal(testSuites, System.out); - final StringWriter sw = new StringWriter(); - jaxbMarshaller.marshal(testSuites, sw); - if (Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE)) { - Desktop.getDesktop().browse(new URI(SAREF.BASE + "report.html?report=" + URLEncoder.encode(sw.toString(), "UTF-8"))); - } - } catch(JAXBException | URISyntaxException | IOException ex) { - LOG.error("Exception:", ex); - ex.printStackTrace(); - } - System.exit(code); - } - - // private static void testRead() { - // try { - // JAXBContext jaxbContext = JAXBContext.newInstance(TestSuite.class); - // Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller(); - // - // TestCase testCase = (TestCase) jaxbUnmarshaller.unmarshal(target); - // - // } catch (JAXBException e) { - // e.printStackTrace(); - // } - // } - -} diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/jobs/library/ReadOntologies.java b/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/jobs/library/ReadOntologies.java deleted file mode 100644 index faf90686c5117203566adecf4711eb7a33e76e0d..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/jobs/library/ReadOntologies.java +++ /dev/null @@ -1,112 +0,0 @@ -package fr.emse.gitlab.saref.jobs.library; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.nio.file.Files; -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.riot.Lang; - -import fr.emse.gitlab.saref.jobs.AbstractJobRunner; -import fr.emse.gitlab.saref.utils.SAREF; - -public class ReadOntologies extends AbstractJobRunner { - - static final Map PREFIXES = new HashMap(); - static { - PREFIXES.put("owl", "http://www.w3.org/2002/07/owl#"); - PREFIXES.put("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); - PREFIXES.put("xsd", "http://www.w3.org/2001/XMLSchema#"); - PREFIXES.put("dcterms", "http://purl.org/dc/terms/"); - PREFIXES.put("vann", "http://purl.org/vocab/vann/"); - PREFIXES.put("foaf", "http://xmlns.com/foaf/0.1/"); - PREFIXES.put("schema", "http://schema.org/"); - PREFIXES.put("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); - PREFIXES.put("voaf", "http://purl.org/vocommons/voaf#"); - PREFIXES.put("dce", "http://purl.org/dc/elements/1.1/"); - PREFIXES.put("dct", "http://purl.org/dc/terms/"); - PREFIXES.put("xml", "http://www.w3.org/XML/1998/namespace/"); - PREFIXES.put("saref", "https://saref.etsi.org/core/"); - } - - public ReadOntologies(File dir) { - super(dir); - } - - @Override - protected void doJob0() { - String repoName = getRepoName(); - String ontologyName = repoName.equals("saref-core") ? "saref.ttl" : repoName + ".ttl"; - checkOntology(ontologyName); - checkExamples(); - } - - private void checkOntology(String ontologyName) { - File ontologyFile = new File(directory, "ontology/" + ontologyName); - Model model = ModelFactory.createDefaultModel(); - try (FileInputStream input = new FileInputStream(ontologyFile)) { - model.read(input, null, Lang.TTL.getLabel()); - } catch (Exception ex) { - error("Exception while reading the ontology file", ex); - return; - } - final Map prefixes = model.getNsPrefixMap(); - for(String s : PREFIXES.keySet()) { - if(prefixes.containsKey(s)) { - if(!prefixes.get(s).equals(PREFIXES.get(s))) { - failure(String.format("Prefix `%s:` in the ontology file is expected to be equal to `<%s>`. Got: `<%s>`", s, PREFIXES.get(s), prefixes.get(s))); - } - } - } - for(Map.Entry entry : prefixes.entrySet()) { - String s = entry.getKey(); - String l = entry.getValue(); - if(l.contains("saref")) { - if(!l.matches(SAREF.REGEX_ONTO_SERIES_URI)) { - failure(String.format("Prefix `%s:` in the ontology file contains string \"saref\", but does not seem to match the official SAREF ontologies namespaces: `\\\\%s\\\\`. Got: `<%s>`", s, SAREF.REGEX_ONTO_SERIES_URI, l)); - } - } - } - } - - private void checkExamples() { - File dir = new File(directory, "examples"); - try { - Files.walk(dir.toPath()).filter(p -> { - return p.endsWith(".ttl"); - }).forEach(p -> { - File exampleFile = p.toFile(); - Model model = ModelFactory.createDefaultModel(); - try (FileInputStream input = new FileInputStream(exampleFile)) { - model.read(input, null, Lang.TTL.getLabel()); - } catch (Exception ex) { - failure(String.format("Exception while reading the example file %s", exampleFile.toString()), - ex.getClass().getSimpleName(), ex.getMessage()); - } - }); - } catch (IOException ex) { - error("Exception while walking the example directory", ex); - } - } - - private String getRepoName() { - String REGEX = "^(?saref-core|saref4[a-z][a-z][a-z][a-z])"; - String name = directory.getName(); - Pattern pattern = Pattern.compile(REGEX); - Matcher matcher = pattern.matcher(name); - if (!matcher.find()) { - error(String.format( - "The SAREF pipeline must be run inside a directory whose name begins with `saref-core`, or `saref4abcd` (where abcd can be any sequence of four letters). Got: %s", - name)); - return null; - } - return matcher.group("ext"); - } - -} diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/tests/Main.java b/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/tests/Main.java deleted file mode 100644 index e7f614e94f084e4e6ff972266172ce0fc5ce442e..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/tests/Main.java +++ /dev/null @@ -1,96 +0,0 @@ -///** -// * -// */ -//package fr.emse.gitlab.saref.tests; -// -//import static fr.emse.gitlab.saref.tests.CMDConfigurations.*; -// -//import java.io.File; -//import java.io.FileWriter; -//import java.io.IOException; -//import java.util.Arrays; -//import java.util.concurrent.TimeUnit; -// -//import org.apache.commons.cli.CommandLine; -//import org.apache.commons.cli.ParseException; -//import org.apache.log4j.Level; -//import org.apache.log4j.Logger; -// -//import fr.emse.gitlab.saref.testsuites.TestCase; -//import fr.emse.gitlab.saref.testsuites.TestSuite; -// -///** -// * @author Omar Qawasmeh -// * -// * -// */ -//public class Main { -// -// private static TestCase c1 = new TestCase(); // check Ttl -// private static TestCase c2 = new TestCase(); // check prefix -// private static TestCase c3 = new TestCase(); // check shacl -// -// @SuppressWarnings("static-access") -// public static void main(String[] args) throws ParseException, IOException { -// -// Logger.getRootLogger().setLevel(Level.OFF); -// -// CommandLine cl = CMDConfigurations.parseArguments(args); -// -// // if (cl.getOptions().length == 0 || cl.hasOption(ARG_HELP)) { -// // CMDConfigurations.displayHelp(); -// // return; -// // } -// -// // String[] fileNames = cl.getOptionValues(ARG_FILE); -// // if (fileNames != null) { -// // for (String fileName : fileNames) { -// // File file = new File(fileName); -// // -// // System.out.println(file+"omar"); -// // } -// // } -// -// // String[] dataTtls = cl.getOptionValues(ARG_FILE); -// // File dataTtl = new File(dataTtls[0]); -// -// String dataTtlArgs = args[0]; -// String CI_JOB_ID = args[1]; -// String CI_PROJECT_NAME = args[2]; -// String CI_COMMIT = args[3]; -// -// // System.out.println(dataTtlArgs); -// File dataTtl = new File(dataTtlArgs); -// System.out.println("******************************************************************************"); -// System.out.println("To view your report file please visit:\n\n" -// + "http://localhost:8080/report.html?q=http://saref.gitlab.emse.fr/-/" + args[2] + "/-/jobs/" + args[1] -// + "/artifacts/report_output.xml"); -// System.out.println("******************************************************************************"); -// // String dataTtl ="src/main/resources/saref.ttl"; -// -// TestSuite ts = new TestSuite(); -// -//// ts.setName("Report for commit number \"" + CI_COMMIT + "\" of project " + CI_PROJECT_NAME); -// -// PrefixesTest prefixTst = new PrefixesTest(); -// boolean c2_Status = prefixTst.comparePrefixMap(dataTtl, c2, ts); -// -// CheckTurtleFormat checkTtl = new CheckTurtleFormat(); -// boolean c1_Status = checkTtl.checkTtlFormat(dataTtl, c1, ts); -// -// ShaclTests shaclTst = new ShaclTests(); -// boolean c3_Status = shaclTst.checkShaclShape(dataTtl, c3, ts, CI_JOB_ID, CI_PROJECT_NAME); -// -//// ts.setTestcase(Arrays.asList( c1, c2, c3 )); -// -//// if (c1_Status && c2_Status && c3_Status) { -//// ts.jaxbObjectToXML(ts); -//// } else { -//// ts.jaxbObjectToXML(ts); -//// System.exit(42); -//// -//// } -// -// } -// -//} diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/tests/ShaclTests.java b/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/tests/ShaclTests.java deleted file mode 100644 index 54b8d5394d9fa764679cb733672959ae8dae208e..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/tests/ShaclTests.java +++ /dev/null @@ -1,134 +0,0 @@ -///** -// * -// */ -//package fr.emse.gitlab.saref.tests; -// -//import java.io.File; -//import java.io.FileInputStream; -//import java.io.FileNotFoundException; -//import java.io.FileOutputStream; -//import java.io.OutputStream; -//import java.nio.file.Path; -//import java.nio.file.Paths; -//import java.util.HashMap; -//import java.util.HashSet; -//import java.util.Map; -//import java.util.Set; -// -//import javax.xml.bind.JAXBContext; -//import javax.xml.bind.JAXBException; -//import javax.xml.bind.Marshaller; -// -//import org.apache.jena.rdf.model.Model; -//import org.apache.jena.rdf.model.Resource; -//import org.apache.jena.riot.Lang; -//import org.apache.jena.riot.RDFDataMgr; -//import org.apache.jena.riot.RDFFormat; -//import org.slf4j.Logger; -//import org.slf4j.LoggerFactory; -//import org.topbraid.jenax.util.JenaUtil; -//import org.topbraid.shacl.validation.ValidationUtil; -//import org.topbraid.shacl.vocabulary.SH; -// -//import com.fasterxml.jackson.core.sym.Name; -// -//import fr.emse.gitlab.saref.testsuites.TestCase; -//import fr.emse.gitlab.saref.testsuites.TestSuite; -// -///** -// * @author Omar Qawasmeh -// * -// * -// */ -//public class ShaclTests { -// -// // private static Logger logger = LoggerFactory.getLogger(Main.class); -// -// public static boolean checkShaclShape(File dataTtl, TestCase tstCase, TestSuite tstSuite, String projectID, -// String projectName) throws FileNotFoundException { -// boolean testResults = true; -// -// // File fileData = new File(dataTtl); // from String to file -// FileInputStream fisDataTtl = new FileInputStream(dataTtl); // from File -// // to FIS -// -// tstCase.setClassName(ShaclTests.class.getName()); -// tstCase.setName(new Object() { -// }.getClass().getEnclosingMethod().getName()); -// -// try { -// -// Model dataModel = JenaUtil.createDefaultModel(); // data model -// dataModel.read(fisDataTtl, "", Lang.TURTLE.getLabel()); // read the -// // FIS as -// // data -// // model -// // (fisDataTtl) -// -// String shapeTtl = "sarefShape.ttl"; // shape file, fixed. -// // String shapeTtl = "src/main/resources/sarefShape.ttl"; /* shape -// // file, fixed. */ -// -// Model shapeModel = JenaUtil.createDefaultModel(); -// shapeModel.read(shapeTtl); -// -// Resource reportResource = ValidationUtil.validateModel(dataModel, shapeModel, true); -// -// boolean sameFormat = reportResource.getProperty(SH.conforms).getBoolean(); -// -// // logger.trace("Conforms = " + sameFormat); -// -// if (!sameFormat) { -// -// // System.err.println("Errorr! check your ttl format"); -// tstCase.setStatus("danger"); -// // tstCase.setSystemErr(new String[] { "Your Ttl file violates -// // the Shacl shape format" }); -// tstCase.setSystemErr("Your ontology file is missing some metadata"); -// -// String report = "shacl-report.ttl"; -// -// File reportFile = new File(report); -// -// reportFile.createNewFile(); -// -// OutputStream reportOutputStream = new FileOutputStream(reportFile); -// RDFDataMgr.write(reportOutputStream, reportResource.getModel(), RDFFormat.TURTLE); -// -// // Path reportPath = Paths.get(reportOutputStream.toString()); -// // System.err.println(reportFile.getAbsolutePath()); -//// tstCase.setInfo("You can download the report shape at: \n \n" + "http://saref.gitlab.emse.fr/-/" -//// + projectName + "/-/jobs/" + projectID + "/artifacts/shacl-report.ttl"); -// -// // tstSuite.setTestcase(new TestCase[] { tstCase }); -// // tstSuite.jaxbObjectToXML(tstSuite); -// -// testResults = false; -// -// // System.exit(42); -// -// } else { -// // System.out.println("Turtle format is ok"); -// // tstCase.setSystemOut(new String[] { "Your Ttl file is -// // complete and it follows our shacl shape." }); -// tstCase.setSystemOut("Your ontology file contains all the required metadata"); -// tstCase.setStatus("success"); -// testResults = true; -// -// } -// } catch (Throwable t) { -// testResults = false; -// tstCase.setStatus("danger"); -// tstCase.setSystemErr("Your ontology file is missing some metadata"); -// -// // tstCase.setSystemErr(new String[] { "Your Ttl file violates the -// // Shacl shape format" }); -// // System.err.println(t.getMessage()); -// // logger.error(t.getMessage()); -// } -// -// return testResults; -// -// } -// -//} diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/utils/SAREF.java b/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/utils/SAREF.java deleted file mode 100644 index e9b515ef809c4e06c7a5e8356bbe770af1dc9c22..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/utils/SAREF.java +++ /dev/null @@ -1,23 +0,0 @@ -package fr.emse.gitlab.saref.utils; - -import java.util.regex.Pattern; - -public class SAREF { - public final static String BASE = "https://saref.etsi.org/"; - - public static final String REGEX_EXT = "(?core|saref4[a-z][a-z][a-z][a-z])"; - public static final String REGEX_VERSION = "v(?[1-9][0-9]*)\\.(?[0-9]+)\\.(?[0-9]+)"; - - public static final String REGEX_RELEASE_BRANCH = "^refs/remotes/origin/release-" + REGEX_VERSION + "$"; - public static final Pattern REGEX_RELEASE_BRANCH_PATTERN = Pattern.compile(REGEX_RELEASE_BRANCH); - - public static final String REGEX_TERM_URI = "^" + BASE + REGEX_EXT + "/(?[^/]+)$"; - public static final Pattern REGEX_TERM_PATTERN = Pattern.compile(REGEX_TERM_URI); - - public static final String REGEX_ONTO_URI = "^" + BASE + REGEX_EXT + "/" + REGEX_VERSION + "/$"; - public static final Pattern REGEX_ONTO_PATTERN = Pattern.compile(REGEX_ONTO_URI); - - public static final String REGEX_ONTO_SERIES_URI = "^" + BASE + REGEX_EXT + "/$"; - public static final Pattern REGEX_ONTO_SERIES_PATTERN = Pattern.compile(REGEX_ONTO_SERIES_URI); - -} diff --git a/saref-pipeline-cli/src/main/java/module-info.java b/saref-pipeline-cli/src/main/java/module-info.java deleted file mode 100644 index 08f0b9f2025250f97ef08f21e86b983e98565568..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/src/main/java/module-info.java +++ /dev/null @@ -1,4 +0,0 @@ -//module fr.emse.gitlab.saref.climodule { -// requires fr.emse.gitlab.saref.api; -// requires fr.emse.gitlab.saref.entities; -//} \ No newline at end of file diff --git a/saref-pipeline-cli/src/main/resources/sarefShape.ttl b/saref-pipeline-cli/src/main/resources/sarefShape.ttl deleted file mode 100644 index 7fe53b7eae828075a8685b8b3e9bb312abf3aba5..0000000000000000000000000000000000000000 --- a/saref-pipeline-cli/src/main/resources/sarefShape.ttl +++ /dev/null @@ -1,166 +0,0 @@ -@prefix dash: . -@prefix saref: . -@prefix owl: . -@prefix rdf: . -@prefix rdfs: . -@prefix sh: . -@prefix xsd: . -@prefix dcterms: . -@prefix vann: . - -saref:DatatypeExampleShape - a sh:NodeShape ; - #sh:targetNode ; - sh:targetClass owl:Ontology ; - - #owl:Ontology - sh:property [ - sh:targetNode ; - sh:path rdf:type ; - sh:value owl:Ontology ; - sh:minCount 1 ; - sh:maxCount 1 ; - - ] ; - - - #owl:versionIRI - sh:property [ - - sh:path owl:versionIRI ; - sh:nodeKind sh:IRI ; - sh:minCount 1 ; - sh:maxCount 1 ; - - ] ; - - #owl:versionInfo - sh:property [ - - sh:path owl:versionInfo ; - sh:datatype xsd:string ; - sh:minCount 1 ; - - ] ; - - #owl:priorVersion potentially an owl:priorVersion, that points to the IRI of the previous module version; - sh:property [ - - ] ; - - #owl:imports potentially one or more owl:imports that point to other ontologies (or ontology modules) to import, in their specific version. - sh:property [ - - ] ; - - - #voaf:vocabulary - #sh:property [ - # sh:path voaf:vocabulary ; - # sh:minCount 1 ; - #] ; - - - #dctemrs:title - sh:property [ - sh:path dcterms:title ; - sh:datatype xsd:string ; - sh:minCount 1 ; - #sh:languageIn ("en") ; - ] ; - - #dcterms:description - sh:property [ - sh:path dcterms:description ; - sh:datatype xsd:string ; - sh:minCount 1 ; - #sh:languageIn ("en") ; - ] ; - - #dctemrs:issued - sh:property [ - sh:path dcterms:issued ; - sh:datatype xsd:date ; - sh:minCount 1 ; - - ] ; - - #dctemrs:modified - sh:property [ - sh:path dcterms:modified ; - sh:datatype xsd:date ; - sh:minCount 1 ; - - ] ; - - #dcterms:creator - # sh:property [ - # sh:path dcterms:creator ; - # sh:nodeKind sh:IRI ; - # sh:minCount 1 ; - #] ; - - #dcterms:contributor - sh:property [ - sh:path dcterms:contributor ; - sh:nodeKind sh:IRI ; - sh:minCount 0 ; - - ] ; - - #dcterms:publisher - sh:property [ - sh:path dcterms:publisher ; - sh:nodeKind sh:IRI ; - sh:pattern "https://www.etsi.org/" ; - sh:minCount 1 ; - sh:maxCount 1 ; - - ] ; - - #dcterms:license - sh:property [ - sh:path dcterms:license ; - sh:nodeKind sh:IRI ; - sh:pattern "https://forge.etsi.org/etsi-software-license" ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - - #vann:preferredNamespacePrefix - sh:property [ - sh:path vann:preferredNamespacePrefix ; - sh:pattern "saref" ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - - #vann:preferredNamespaceUri - sh:property [ - sh:path vann:preferredNamespaceUri ; - #sh:nodeKind sh:IRI ; - sh:pattern "https://saref.etsi.org/saref#" ; - sh:minCount 1 ; - sh:maxCount 1 ; - ] ; - - - #rdfs:label #what else to add? - # sh:property [ - # sh:targetClass owl:ObjectProperty ; - # sh:path rdfs:label ; - # sh:languageIn ( "en" "fr" "ar") ; - # ] ; - - #rdfs:comment #what else to add? - # sh:property [ - # sh:targetClass owl:ObjectProperty ; - # sh:path rdfs:comment ; - # sh:languageIn ( "en" "fr" "ar") ; - # ] . - - - - - - diff --git a/saref-pipeline-core/.gitignore b/saref-pipeline-core/.gitignore deleted file mode 100644 index b83d22266ac8aa2f8df2edef68082c789727841d..0000000000000000000000000000000000000000 --- a/saref-pipeline-core/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target/ diff --git a/saref-pipeline-core/pom.xml b/saref-pipeline-core/pom.xml deleted file mode 100644 index c42cd4fc577a00dbb80809dbf9902b180e71723e..0000000000000000000000000000000000000000 --- a/saref-pipeline-core/pom.xml +++ /dev/null @@ -1,36 +0,0 @@ - - 4.0.0 - - - fr.emse.gitlab.saref - saref-pipeline-parent - 1.0-SNAPSHOT - ../saref-pipeline-parent/pom.xml - - - saref-pipeline-core - SAREF-Pipeline-core - jar - - - - Omar Qawasmeh - omar.alqawasmeh@emse.fr - https://perso.univ-st-etienne.fr/alo09685/ - Ecole des mines de Saint-Etienne - http://www.mines-stetienne.fr/ - +1 - - - - - - com.fasterxml.jackson.dataformat - jackson-dataformat-xml - 2.10.1 - - - - \ No newline at end of file diff --git a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/api/AbstractJobRunner.java b/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/api/AbstractJobRunner.java deleted file mode 100644 index 0538cc699e76a4f8a9e09afe0a63226adf1d98cd..0000000000000000000000000000000000000000 --- a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/api/AbstractJobRunner.java +++ /dev/null @@ -1,67 +0,0 @@ -package fr.emse.gitlab.saref.api; - -import java.io.File; -import java.io.PrintWriter; -import java.io.StringWriter; - -import fr.emse.gitlab.saref.api.JobRunner; -import fr.emse.gitlab.saref.entities.TestCase; -import fr.emse.gitlab.saref.entities.TestCase.Status; -import fr.emse.gitlab.saref.entities.TestSuite; -import fr.emse.gitlab.saref.entities.TestSuites; - -public abstract class AbstractJobRunner implements JobRunner { - - private final TestSuite testSuite = new TestSuite(); - protected final File directory; - - public AbstractJobRunner(File dir) { - this.directory = dir; - } - - public final void doJob(TestSuites testSuites) { - doJob0(); - testSuites.addTestsuite(testSuite); - } - - protected abstract void doJob0(); - - protected void error(String name, Exception ex) { - TestCase tc = new TestCase(name); - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - ex.printStackTrace(pw); - tc.setSystemErr(sw.toString()); - tc.setError(ex.getClass().getCanonicalName(), ex.getMessage()); - testSuite.addTestcase(tc); - } - - protected void error(String name) { - TestCase tc = new TestCase(name); - tc.setStatus(Status.ERROR); - testSuite.addTestcase(tc); - } - - protected void warning(String name) { - TestCase tc = new TestCase(name); - tc.setStatus(Status.WARNING); - testSuite.addTestcase(tc); - } - - protected void failure(String name) { - TestCase tc = new TestCase(name); - tc.setStatus(Status.FAILURE); - testSuite.addTestcase(tc); - } - - protected void failure(String name, String type, String message) { - TestCase tc = new TestCase(name); - tc.setFailure(type, message); - testSuite.addTestcase(tc); - } - - protected void success(String name) { - testSuite.addTestcase(new TestCase(name)); - } - -} diff --git a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/api/JobRunner.java b/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/api/JobRunner.java deleted file mode 100644 index 41e101b6920a5797a632456eb85751aab126123b..0000000000000000000000000000000000000000 --- a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/api/JobRunner.java +++ /dev/null @@ -1,9 +0,0 @@ -package fr.emse.gitlab.saref.api; - -import fr.emse.gitlab.saref.entities.TestSuites; - -public interface JobRunner { - - void doJob(TestSuites testSuites); - -} diff --git a/saref-pipeline-core/src/main/java/module-info.java b/saref-pipeline-core/src/main/java/module-info.java deleted file mode 100644 index 503f72e9ed5d694a6259dd00a9644f9241ce9a80..0000000000000000000000000000000000000000 --- a/saref-pipeline-core/src/main/java/module-info.java +++ /dev/null @@ -1,5 +0,0 @@ -module fr.emse.gitlab.saref.coremodule { - exports fr.emse.gitlab.saref.api; - exports fr.emse.gitlab.saref.entities; - requires java.xml.bind; -} \ No newline at end of file diff --git a/saref-pipeline-parent/pom.xml b/saref-pipeline-parent/pom.xml deleted file mode 100644 index acd39a5b3f82d8fcc425fbae5ff09e36644f3d89..0000000000000000000000000000000000000000 --- a/saref-pipeline-parent/pom.xml +++ /dev/null @@ -1,119 +0,0 @@ - - - 4.0.0 - fr.emse.gitlab.saref - saref-pipeline-parent - 1.0-SNAPSHOT - pom - - SAREF-Pipeline-Parent - The parent project for the SAREF Pipeline - 2019 - - - ../saref-pipeline-core - ../saref-pipeline-cli - ../saref-pipeline-shacl - ../saref-pipeline-sparql-generate - - - - - Maxime Lefrançois - maxime.lefrancois@emse.fr - http://maxime-lefrancois.info/ - Ecole des mines de Saint-Etienne - http://www.mines-stetienne.fr/ - +1 - - http://gravatar.com/userimage/102097381/dac8aeb15edac9a93e09a7974743957f - - - - - - École des Mines de Saint-Étienne - http://www.mines-stetienne.fr/ - - - - scm:git:https://${repository.domain}/${repository.user}/${repository.name}.git - https://${repository.domain}/${repository.user}/${repository.name} - scm:git:https://${repository.domain}/${repository.user}/${repository.name}.git - - 1.2.3 - - - - http://${repository.domain}/${repository.user}/${repository.name}/issues - GitHub Issues - - - - - The Apache Software License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - - - - - - ossrh - https://oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - - - - 11 - 11 - yyyy-MM-dd'T'HH:mm:ssZ - 1.9 - UTF-8 - UTF-8 - git - - - - - junit - junit - 4.12 - test - - - org.hamcrest - hamcrest-core - 1.3 - test - - - - - - - org.apache.maven.plugins - maven-enforcer-plugin - 3.0.0-M3 - - - enforce-maven - - enforce - - - - - 3.5.0 - - - - - - - - - diff --git a/saref-pipeline-shacl/.gitignore b/saref-pipeline-shacl/.gitignore deleted file mode 100644 index b83d22266ac8aa2f8df2edef68082c789727841d..0000000000000000000000000000000000000000 --- a/saref-pipeline-shacl/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target/ diff --git a/saref-pipeline-shacl/pom.xml b/saref-pipeline-shacl/pom.xml deleted file mode 100644 index 871737bc3f582a5f1c813270694988b92e71ff7a..0000000000000000000000000000000000000000 --- a/saref-pipeline-shacl/pom.xml +++ /dev/null @@ -1,38 +0,0 @@ - - 4.0.0 - - - fr.emse.gitlab.saref - saref-pipeline-parent - 1.0-SNAPSHOT - ../saref-pipeline-parent/pom.xml - - - saref-pipeline-shacl - SAREF-Pipeline-SHACL - jar - - - - Omar Qawasmeh - omar.alqawasmeh@emse.fr - https://perso.univ-st-etienne.fr/alo09685/ - Ecole des mines de Saint-Etienne - http://www.mines-stetienne.fr/ - +1 - - - - - - - - - - diff --git a/saref-pipeline-shacl/src/main/java/module-info.java b/saref-pipeline-shacl/src/main/java/module-info.java deleted file mode 100644 index fc69faa4be0dd28f27f90cf3bcbdf0e97a63e66b..0000000000000000000000000000000000000000 --- a/saref-pipeline-shacl/src/main/java/module-info.java +++ /dev/null @@ -1,2 +0,0 @@ -module fr.emse.gitlab.saref.shaclmodule { -} \ No newline at end of file diff --git a/saref-pipeline-sparql-generate/.gitignore b/saref-pipeline-sparql-generate/.gitignore deleted file mode 100644 index b83d22266ac8aa2f8df2edef68082c789727841d..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target/ diff --git a/saref-pipeline-sparql-generate/pom.xml b/saref-pipeline-sparql-generate/pom.xml deleted file mode 100644 index 8009d0de6f599a5c8a13ebe596b59243cab95e7a..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/pom.xml +++ /dev/null @@ -1,40 +0,0 @@ - - 4.0.0 - - - fr.emse.gitlab.saref - saref-pipeline-parent - 1.0-SNAPSHOT - ../saref-pipeline-parent/pom.xml - - - saref-pipeline-sparql-generate - SAREF-Pipeline-SAREF-Pipeline - jar - - - 2.0-SNAPSHOT - - - - - - - - - - - - - - - - - - - - - - diff --git a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/Documentation.java b/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/Documentation.java deleted file mode 100644 index 888679ed4da2a65468ffae0e16643619343df2d9..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/Documentation.java +++ /dev/null @@ -1,560 +0,0 @@ -package fr.emse.gitlab.saref.sparqlext; - -import fr.emse.gitlab.saref.api.JobRunner; -import fr.emse.gitlab.saref.entities.TestSuites; - -public class Documentation implements JobRunner { - - @Override - public void doJob(TestSuites testSuites) { - System.out.println("hello"); - - } - -// static final Logger LOG = LoggerFactory.getLogger(Documentation.class); -// -// static final Gson gson = new Gson(); -// -// static final Pattern REGEX_REPO_PATTERN = Pattern.compile("^saref(-core|4[a-z][a-z][a-z][a-z])$", Pattern.CASE_INSENSITIVE); -// -// static final File GIT_DIR = new File("target/sources"); -// static final File SITE_DIR = new File("target/site"); -// static final File STATIC_TARGET_DIR = new File("target/site/static"); -// static final String DATASET_DIR = "target/tdb"; -// -// static final String NS = "https://saref.etsi.org/"; -// private static final String REGEX_EXT = "(?core|saref4[a-z][a-z][a-z][a-z])"; -// private static final String REGEX_VERSION = "v(?[1-9][0-9]*)\\.(?[0-9]+)\\.(?[0-9]+)"; -// -// private static final String REGEX_RELEASE_BRANCH = "^refs/remotes/origin/release-" + REGEX_VERSION + "$"; -// private static final Pattern REGEX_RELEASE_BRANCH_PATTERN = Pattern.compile(REGEX_RELEASE_BRANCH); -// -// private static final String REGEX_TERM_URI = "^" + NS + REGEX_EXT + "/(?[^/]+)$"; -// private static final Pattern REGEX_TERM_PATTERN = Pattern.compile(REGEX_TERM_URI); -// -// private static final String REGEX_ONTO_URI = "^" + NS + REGEX_EXT + "/" + REGEX_VERSION + "/$"; -// private static final Pattern REGEX_ONTO_PATTERN = Pattern.compile(REGEX_ONTO_URI); -// -// private static final String REGEX_EXT_URI = "^" + NS + REGEX_EXT + "/"; -// private static final Pattern REGEX_EXT_PATTERN = Pattern.compile(REGEX_EXT_URI); -// -// private static Dataset dataset; -// private static final Model config = ModelFactory.createDefaultModel(); -// private static final Map ngs = new HashMap<>(); -// -// private static final Resource ETSI_URL = ResourceFactory.createResource("https://www.etsi.org/"); -// private static final Resource ETSI_LICENSE = ResourceFactory -// .createResource("https://forge.etsi.org/etsi-software-license"); -// -// private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd"); -// -// static final String DOC_NS = NS + "documentation/"; -// private static final String TERM_QUERY = "term/main.rqg"; -// private static final Var VAR_GRAPH = VarUtils.allocVar("term"); -// -// static final String urlString = "https://forge.etsi.org/rep/api/v4/groups/saref"; -// -// static final boolean useTDB = true; -// -// -// -// public static void main(String[] args) throws MalformedURLException, IOException { -// // do we have a directory named ontology here ? if not, we assume that we are executing from a repo. -// File ontologyDir = new File("ontology"); -// -// -// -// dataset = TDBFactory.createDataset(DATASET_DIR); -//// Model m = dataset.getNamedModel("https://saref.etsi.org/core/accomplishes"); -//// m.write(System.out, "TTL"); -// try { -// checkFolderStructure(); -// -// checkUnusedTerms(); -// -// checkNameConflicts(); -// -// Group group = readGroup(); -// -// cloneRepos(group); -// -//// Group group = new Group(); -//// List projects = new ArrayList<>(); -//// group.projects = projects; -//// -//// Project repo = new Project(); -//// repo.name = "saref-core"; -//// repo.directory = new File(GIT_DIR, repo.name); -//// repo.namespace = getNamespace(repo); -//// repo.prefix = getPrefix(repo); -//// projects.add(repo); -//// -//// repo = new Project(); -//// repo.name = "saref4ener"; -//// repo.directory = new File(GIT_DIR, repo.name); -//// repo.namespace = getNamespace(repo); -//// repo.prefix = getPrefix(repo); -//// projects.add(repo); -// -// readRepos(group); -// -// writeToTDBDataset(); -// -// FileUtils.forceMkdir(SITE_DIR); -// copyStaticFiles(); -// -// SPARQLExtStreamManager sm = initializeStreamManager(); -// -// RootPlan planForTerm = createPlanForTerm(sm); -// -// generateFiles(sm, planForTerm); -// } finally { -// dataset.close(); -// } -// -// } -// -// private static void generateFiles(SPARQLExtStreamManager sm, RootPlan planForTerm) throws IOException { -// dataset.begin(ReadWrite.READ); -// for (Iterator it = dataset.listNames(); it.hasNext();) { -// String name = it.next(); -// System.out.println("# Write " + name); -// if (name.equals(EX.config)) { -// continue; -// } -// Model model = dataset.getNamedModel(name); -// String fileName = name.substring(NS.length()); -// if (fileName.endsWith("/")) { -// fileName = fileName + fileName.substring(0, fileName.indexOf("/")); -// } -// FileUtils.forceMkdir(new File(SITE_DIR, fileName).getParentFile()); -// // File file = new File(SITE_DIR, String.format("%s.ttl", fileName)); -// // try (FileOutputStream fos = new FileOutputStream(file)) { -// // model.write(fos, "TTL"); -// // } -// for (Languages l : Languages.values()) { -// File file = new File(SITE_DIR, String.format("%s.%s", fileName, l.getExt())); -// try (FileOutputStream fos = new FileOutputStream(file)) { -// model.write(fos, l.getLang()); -// } -// } -// try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(new File(SITE_DIR, fileName + ".html")));) { -// Context context = ContextUtils.build(writer).setBase(NS).setDebugTemplate(true).setInputDataset(dataset) -// .setStreamManager(sm) -// .build(); -// BindingHashMap binding = new BindingHashMap(); -// binding.add(VAR_GRAPH, NodeFactory.createURI(name)); -// List bindings = new ArrayList<>(); -// bindings.add(binding); -//// planForTerm.execTemplateStream(bindings, context); -// } -// } -// dataset.end(); -// } -// -// private static RootPlan createPlanForTerm(SPARQLExtStreamManager sm) throws IOException { -// String query; -// query = IOUtils.toString(sm.open(new LookUpRequest(TERM_QUERY, SPARQLExt.MEDIA_TYPE)), StandardCharsets.UTF_8); -// return PlanFactory.create(query, DOC_NS); -// } -// -// private static void checkFolderStructure() { -// -// } -// -// private static void checkUnusedTerms() { -// /* -// * Query to find terms that are not defined in any ontology: -// * -// * PREFIX ex: PREFIX rdfs: -// * SELECT ?term ?version WHERE { -// * -// * ?term ex:isUsedBy ?version . MINUS { ?term rdfs:isDefinedBy ?v2 . } } -// * -// */ -// } -// -// private static void checkNameConflicts() { -// /* -// * Query to find local name conflicts in different ontologies -// * -// * PREFIX ex: PREFIX rdfs: -// * SELECT DISTINCT * WHERE { -// * -// * ?term1 ex:localName ?name ; rdfs:isDefinedBy ?v1 . ?term2 ex:localName ?name -// * ; rdfs:isDefinedBy ?v2 . FILTER( ?term1 != ?term2 ) } -// * -// */ -// } -// -// private static SPARQLExtStreamManager initializeStreamManager() { -// File documentationDir = new File(Main.class.getClassLoader().getResource("documentation").getFile()); -// Path dirPath = Paths.get(documentationDir.toURI()); -// LocatorFileAccept locator = new LocatorFileAccept(documentationDir.toURI().getPath()); -// LocationMapperAccept mapper = new LocationMapperAccept(); -// SPARQLExtStreamManager sm = SPARQLExtStreamManager.makeStreamManager(locator); -// sm.setLocationMapper(mapper); -// try { -// Files.walk(dirPath).filter((p) -> { -// return p.toFile().isFile(); -// }).forEach((p) -> { -// String relativePath = dirPath.relativize(p).toString(); -// String fileurl = DOC_NS + relativePath.replace("\\", "/"); -// mapper.addAltEntry(fileurl, p.toString()); -// }); -// } catch (IOException ex) { -// LOG.warn("Error while computing the URIs for the files in the working directory.", ex); -// } -// return sm; -// } -// -// private static void copyStaticFiles() throws IOException { -// File staticDir = new File(Main.class.getClassLoader().getResource("static").getFile()); -// FileUtils.copyDirectory(staticDir, STATIC_TARGET_DIR); -// } -// -// private static Group readGroup() throws IOException { -// final URL url = new URL(urlString); -// InputStreamReader reader = new InputStreamReader(url.openStream()); -// return gson.fromJson(reader, Group.class); -// } -// -// private static void writeToTDBDataset() { -// dataset.begin(ReadWrite.WRITE); -// dataset.addNamedModel(EX.config, config); -// for (String name : ngs.keySet()) { -// dataset.addNamedModel(name, ngs.get(name)); -// } -// -// Model defaultModel = dataset.getDefaultModel(); -// defaultModel.add(ResourceFactory.createResource("s"), ResourceFactory.createProperty("p"), ResourceFactory.createResource("o")); -// dataset.commit(); -// } -// -// private static void readRepos(Group group) throws IOException { -// for (Project repo : group.projects) { -// try (Git git = Git.open(repo.directory);) { -// readRepo(repo, git); -// } -// } -// } -// -// static String getNamespace(String graphName) { -// Matcher m = REGEX_EXT_PATTERN.matcher(graphName); -// if (!m.find()) { -// throw new IllegalArgumentException("got " + graphName); -// } -// return NS + m.group("ext") + "/"; -// } -// -// static String getOntologyFileName(Project project) { -// if (project.name.equals("saref-core")) { -// return "saref.ttl"; -// } else { -// return String.format("%s.ttl", project.name); -// } -// } -// -// static String getNamespace(Project project) { -// if (project.name.equals("saref-core")) { -// return String.format("%score/", NS); -// } else { -// return String.format("%s%s/", NS, project.name); -// } -// } -// -// static String getPrefix(String graphName) { -// Matcher m = REGEX_EXT_PATTERN.matcher(graphName); -// if (!m.find()) { -// throw new IllegalArgumentException("got " + graphName); -// } -// String ext = m.group("ext"); -// if(ext.equals("core")) { -// return "saref:"; -// } else { -// String shortName = ext.substring(5); -// return String.format("s%s:", shortName); -// } -// } -// -// static String getPrefix(Project project) { -// if (project.name.equals("saref-core")) { -// return "saref:"; -// } else { -// String shortName = project.name.substring(5); -// return String.format("s%s:", shortName); -// } -// } -// -// static String getVersionURI(Project project, Version version) { -// if (project.name.equals("saref-core")) { -// return String.format("%s%sv%s.%s.%s/", NS, "core/", version.major, version.minor, version.patch); -// } else { -// return String.format("%s%s/v%s.%s.%s/", NS, project.name, version.major, version.minor, version.patch); -// } -// } -// -// static String getVersionPrefix(Project project, Version version) { -// if (project.name.equals("saref-core")) { -// return String.format("saref-%s.%s.%s:", version.major, version.minor, version.patch); -// } else { -// String shortName = project.name.substring(5); -// return String.format("s%s-%s.%s.%s:", shortName, version.major, version.minor, version.patch); -// } -// } -// -// private static void cloneRepos(Group group) throws IOException { -// FileUtils.forceMkdir(GIT_DIR); -// for (Project repo : new ArrayList<>(group.projects)) { -// if (!REGEX_REPO_PATTERN.matcher(repo.name).matches()) { -// group.projects.remove(repo); -// continue; -// } -// repo.directory = new File(GIT_DIR, repo.name); -// repo.namespace = getNamespace(repo); -// repo.prefix = getPrefix(repo); -// if (repo.directory.exists()) { -// continue; -// } -// LOG.debug("Cloning project " + repo.name); -// try (Git git = Git.cloneRepository().setURI(repo.http_url_to_repo).setDirectory(repo.directory).call()) { -// } catch (Exception ex) { -// LOG.warn("Could not clone project " + repo.name, ex); -// } -// } -// } -// -// private static void readRepo(Project repo, Git git) { -// System.out.println("project" + repo.name); -// -// repo.resource = ResourceFactory.createResource(repo.namespace); -// config.add(repo.resource, RDF.type, OWL2.Ontology); -// -// try { -// List remoteBranches = git.branchList().setListMode(ListBranchCommand.ListMode.REMOTE).call(); -// for (Ref ref : remoteBranches) { -// String branch = ref.getName(); -// Matcher m = REGEX_RELEASE_BRANCH_PATTERN.matcher(branch); -// if (!m.find()) { -// continue; -// } -// Version version = new Version(); -// version.major = Integer.parseInt(m.group("major")); -// version.minor = Integer.parseInt(m.group("minor")); -// version.patch = Integer.parseInt(m.group("patch")); -// version.uri = getVersionURI(repo, version); -// version.resource = ResourceFactory.createResource(version.uri); -// version.prefix = getVersionPrefix(repo, version); -// version.ref = ref; -// RevCommit commit = git.log().add(version.ref.getObjectId()).call().iterator().next(); -// version.issued = commit.getCommitterIdent().getWhen(); -// repo.releases.add(version); -// System.out.println("version " + version); -// } -// -// // order versions -// Collections.sort(repo.releases, (Version o1, Version o2) -> { -// if (o1.major - o2.major != 0) { -// return o1.major - o2.major; -// } -// if (o1.minor - o2.minor != 0) { -// return o1.minor - o2.minor; -// } -// return o1.patch - o2.patch; -// }); -// -// for (int i = 0; i < repo.releases.size(); i++) { -// Version version = repo.releases.get(i); -// -// Model onto = readOntology(repo, version); -// ngs.put(version.uri, onto); -// -// // compute some metadata -// Version priorVersion = i == 0 ? null : repo.releases.get(i - 1); -// addOntologyMetadata(onto, repo, git, version, priorVersion); -// -// // for each term, add: -// // some triples in the default graph -// // -> used to detect naming clashes -// // some triples in the named graph of the term -// // -> used to display the page of the term -// Set definedTerms = new HashSet<>(); -// Set usedTerms = new HashSet<>(); -// computeTerms(onto, repo, version, definedTerms, usedTerms); -// config.add(repo.resource, EX.hasVersion, version.resource); -// config.add(version.resource, RDF.type, EX.OntologyVersion); -// config.add(version.resource, EX.versionInfo, -// String.format("v%s.%s.%s", version.major, version.minor, version.patch)); -// if (priorVersion != null) { -// config.add(version.resource, EX.priorVersion, priorVersion.resource); -// } -// -// for (Resource t : definedTerms) { -// Model termModel = getNamedModel(t); -// String localName = t.getURI().substring(repo.namespace.length()); -// termModel.add(t, RDFS.isDefinedBy, version.resource); -// // keep the most recent definition of the term -// if (i == repo.releases.size() - 1) { -// termModel.add(t, DCTerms.modified, SIMPLE_DATE_FORMAT.format(version.issued), -// XSDDatatype.XSDdate); -// try (QueryExecution exec = QueryExecutionFactory.create("DESCRIBE <" + t.getURI() + ">", -// onto)) { -// termModel.add(exec.execDescribe()); -// } -// } -// config.add(t, RDF.type, RDFP.Resource); -// config.add(t, EX.localName, localName); -// config.add(t, RDFS.isDefinedBy, version.resource); -// } -// -// for (Resource t : usedTerms) { -// Model termModel = getNamedModel(t); -// termModel.add(t, EX.isUsedBy, version.resource); -// config.add(t, RDF.type, RDFP.Resource); -// config.add(t, EX.isUsedBy, version.resource); -// } -// } -// } catch (Exception ex) { -// LOG.warn("Exception for " + repo.name, ex); -// } -// } -// -// private static Model readOntology(Project repo, Version version) throws Exception { -// // checkout the HEAD of the release branch -// Git.open(repo.directory).checkout().setName(version.ref.getName()).call(); -// Model onto = ModelFactory.createDefaultModel(); -// // read the ontology -// File ontologyFile = new File(repo.directory, "ontology/" + getOntologyFileName(repo)); -// try (FileReader fr = new FileReader(ontologyFile)) { -// onto.read(fr, NS, "TTL"); -// } -// return onto; -// } -// -// private static void addOntologyMetadata(Model onto, Project repo, Git git, Version version, Version priorVersion) -// throws Exception { -// onto.add(repo.resource, RDF.type, OWL2.Ontology); -// -// // we assume dc:title, dc:description, rdfs:comment, are defined, and have -// // language tags -// onto.removeAll(repo.resource, OWL2.versionIRI, null); -// onto.add(repo.resource, OWL2.versionIRI, version.resource); -// -// onto.removeAll(repo.resource, OWL2.versionInfo, null); -// onto.add(repo.resource, OWL2.versionInfo, -// String.format("v%s.%s.%s", version.major, version.minor, version.patch)); -// -// onto.removeAll(repo.resource, OWL2.priorVersion, null); -// if (priorVersion != null) { -// onto.add(repo.resource, OWL2.priorVersion, priorVersion.resource); -// } -// -// onto.removeAll(repo.resource, DCTerms.publisher, null); -// onto.add(repo.resource, DCTerms.publisher, ETSI_URL); -// -// onto.removeAll(repo.resource, DCTerms.license, null); -// onto.add(repo.resource, DCTerms.license, ETSI_LICENSE); -// -// // list of contributors -// Set contributors = new HashSet<>(); -// for (Iterator it = git.log().add(version.ref.getObjectId()).call().iterator(); it.hasNext();) { -// RevCommit commit = it.next(); -// Contributor contributor = new Contributor(commit.getCommitterIdent()); -// contributors.add(contributor); -// } -// for (Contributor contributor : contributors) { -// // git can only provide name + email. -// // would need to maintain some list of mapping email -> url in the future. -// Resource anon = onto.createResource(); -// onto.add(repo.resource, DCTerms.contributor, anon); -// onto.add(anon, FOAF.name, contributor.getName()); -// onto.add(anon, FOAF.mbox, contributor.getEmailAddress()); -// } -// -// // prefixes -// onto.removeAll(repo.resource, VANN.preferredNamespacePrefix, null); -// onto.removeAll(repo.resource, VANN.preferredNamespaceUri, null); -// onto.add(repo.resource, VANN.preferredNamespacePrefix, repo.prefix); -// onto.add(repo.resource, VANN.preferredNamespaceUri, repo.namespace, XSDDatatype.XSDanyURI); -// -// // issued -// onto.removeAll(repo.resource, DCTerms.issued, null); -// onto.add(repo.resource, DCTerms.issued, SIMPLE_DATE_FORMAT.format(version.issued), XSDDatatype.XSDdate); -// } -// -// private static void computeTerms(Model onto, Project repo, Version version, Set definedTerms, -// Set usedTerms) { -// onto.listStatements().forEachRemaining(stmt -> { -// Resource s = stmt.getSubject(); -// Resource p = stmt.getPredicate(); -// Resource o = stmt.getObject().isResource() ? (Resource) stmt.getObject() : null; -// computeTerms(s, repo, version, definedTerms, usedTerms); -// computeTerms(p, repo, version, definedTerms, usedTerms); -// computeTerms(o, repo, version, definedTerms, usedTerms); -// }); -// } -// -// private static void computeTerms(Resource t, Project repo, Version version, Set definedTerms, -// Set usedTerms) { -// if (t == null || !t.isURIResource() || !t.getURI().startsWith(NS) || t.getURI().endsWith("/")) { -// return; -// } -// String uri = t.getURI(); -// if (uri.startsWith(repo.namespace)) { -// definedTerms.add(t); -// } else if (!uri.startsWith(repo.namespace)) { -// usedTerms.add(t); -// } -// } -// -// private static Model getNamedModel(Resource t) { -// String uri = t.getURI(); -// if (ngs.containsKey(uri)) { -// return ngs.get(uri); -// } else { -// Model model = ModelFactory.createDefaultModel(); -// model.add(t, RDF.type, RDFS.Resource); -// ngs.put(uri, model); -// return model; -// } -// } -// -// private static class EX { -// -// private static String NS = "http://example.org/"; -// private static String config = NS + "config"; -// private static Property localName = ResourceFactory.createProperty(NS, "localName"); -// private static Property hasVersion = ResourceFactory.createProperty(NS, "hasVersion"); -// private static Property versionInfo = ResourceFactory.createProperty(NS, "versionInfo"); -// private static Property priorVersion = ResourceFactory.createProperty(NS, "priorVersion"); -// private static Property isUsedBy = ResourceFactory.createProperty(NS, "isUsedBy"); -// private static Resource OntologyVersion = ResourceFactory.createResource(NS + "OntologyVersion"); -// } -// -// private static class VANN { -// -// private static String NS = "http://purl.org/vocab/vann/"; -// private static Property preferredNamespacePrefix = ResourceFactory.createProperty(NS, -// "preferredNamespacePrefix"); -// private static Property preferredNamespaceUri = ResourceFactory.createProperty(NS, "preferredNamespaceUri"); -// } -// -// private static class RDFP { -// public static final String NS = "https://w3id.org/rdfp/"; -// public static final Property presentedBy = ResourceFactory.createProperty(NS + "presentedBy"); -// public static final Property presentationFor = ResourceFactory.createProperty(NS + "presentationForq"); -// public static final Property loweringRule = ResourceFactory.createProperty(NS + "loweringRule"); -// public static final Property liftingRule = ResourceFactory.createProperty(NS + "liftingRule"); -// public static final Property validationRule = ResourceFactory.createProperty(NS + "validationRule"); -// public static final Resource Graph = ResourceFactory.createResource(NS + "Graph"); -// public static final Resource Resource = ResourceFactory.createResource(NS + "Resource"); -// public static final Property representedBy = ResourceFactory.createProperty(NS + "representedBy"); -// public static final Property mediaType = ResourceFactory.createProperty(NS + "mediaType"); -// public static final Property alias = ResourceFactory.createProperty(NS + "alias"); -// public static final Property describedBy = ResourceFactory.createProperty(NS + "describedBy"); -// public static final Property filePath = ResourceFactory.createProperty(NS + "filePath"); -// public static final Resource Ontology = ResourceFactory.createResource(NS + "Ontology"); -// public static final Property fileSelector = ResourceFactory.createProperty(NS + "fileSelector"); -// } - -} diff --git a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Contributor.java b/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Contributor.java deleted file mode 100644 index 346a19928070e351ffe1c7c7f53e45f3ab5dea15..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Contributor.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2020 École des Mines de Saint-Étienne. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package fr.emse.gitlab.saref.sparqlext.entities; - -import org.eclipse.jgit.lib.PersonIdent; - -/** - * - * @author maxime.lefrancois - */ -public class Contributor { - - private final String name; - - private final String emailAddress; - - public Contributor(PersonIdent person) { - name = person.getName(); - emailAddress = person.getEmailAddress(); - } - - public Contributor(String aName, String aEmailAddress) { - name = aName; - emailAddress = aEmailAddress; - } - - /** - * Get name of person - * - * @return Name of person - */ - public String getName() { - return name; - } - - /** - * Get email address of person - * - * @return email address of person - */ - public String getEmailAddress() { - return emailAddress; - } - - /** - * {@inheritDoc} - *

- * Hashcode is based only on the email address. - */ - @Override - public int hashCode() { - return getEmailAddress().hashCode(); - } - - /** - * {@inheritDoc} - */ - @Override - public boolean equals(Object o) { - if (o instanceof Contributor) { - final Contributor p = (Contributor) o; - return getName().equals(p.getName()) - && getEmailAddress().equals(p.getEmailAddress()); - } - return false; - } - - /** - * {@inheritDoc} - */ - @Override - public String toString() { - final StringBuilder r = new StringBuilder(); - - r.append("Contributor["); - r.append(getName()); - r.append(", "); - r.append(getEmailAddress()); - r.append("]"); - - return r.toString(); - } -} diff --git a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Group.java b/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Group.java deleted file mode 100644 index c9cab8819d37712370364af27bba8dd20ee7e6de..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Group.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2020 École des Mines de Saint-Étienne. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package fr.emse.gitlab.saref.sparqlext.entities; - -import java.util.List; - -/** - * - * @author maxime.lefrancois - */ -public class Group { - public List projects; -} diff --git a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Project.java b/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Project.java deleted file mode 100644 index a5988c505ebc27df4ef7a6a8bc4e0972ec2d0e36..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Project.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2020 École des Mines de Saint-Étienne. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package fr.emse.gitlab.saref.sparqlext.entities; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import org.apache.jena.rdf.model.Resource; - -/** - * - * @author maxime.lefrancois - */ -public class Project { - - public String name; - public String http_url_to_repo; - public transient File directory; - public transient List releases = new ArrayList<>(); - public transient String namespace; - public transient String prefix; - public transient Resource resource; -} diff --git a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Version.java b/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Version.java deleted file mode 100644 index dfc9bb95280ba53b76fcb2ee461c8386ca0f3ef1..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Version.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2020 École des Mines de Saint-Étienne. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package fr.emse.gitlab.saref.sparqlext.entities; - -import java.util.Date; -import java.util.Set; -import org.apache.jena.rdf.model.Resource; -import org.eclipse.jgit.lib.Ref; - -/** - * - * @author maxime.lefrancois - */ -public class Version { - - public int major; - public int minor; - public int patch; - public Ref ref; - public String uri; - public String prefix; - public Date issued; - public transient Resource resource; - - @Override - public String toString() { - return String.format("v%s.%s.%s", major, minor, patch); - } - - -} diff --git a/saref-pipeline-sparql-generate/src/main/java/module-info.java b/saref-pipeline-sparql-generate/src/main/java/module-info.java deleted file mode 100644 index 62660dd99c3e421c7bca689dc0194a7893ead220..0000000000000000000000000000000000000000 --- a/saref-pipeline-sparql-generate/src/main/java/module-info.java +++ /dev/null @@ -1,7 +0,0 @@ -import fr.emse.gitlab.saref.api.JobRunner; -import fr.emse.gitlab.saref.sparqlext.Documentation; - -module fr.emse.gitlab.saref.sparqlextmodule { - requires fr.emse.gitlab.saref.core; - provides JobRunner with Documentation; -} \ No newline at end of file diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/CMDConfigurations.java b/src/main/java/fr/emse/gitlab/saref/CMDConfigurations.java similarity index 64% rename from saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/CMDConfigurations.java rename to src/main/java/fr/emse/gitlab/saref/CMDConfigurations.java index cf89195f0fbcfcdc089a90bbe2f888a3bff85e2b..2fc83e29732e07d23274585b2b6618fce036a9ba 100644 --- a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/CMDConfigurations.java +++ b/src/main/java/fr/emse/gitlab/saref/CMDConfigurations.java @@ -25,6 +25,15 @@ public class CMDConfigurations { public static final String ARG_DIRECTORY_DEFAULT = ""; public static final String ARG_DIRECTORY_LONG = "dir"; public static final String ARG_DIRECTORY_MAN = "Location of the SAREF extension directory (default is .)"; + public static final String ARG_INCLUDE_ALL = "a"; + public static final String ARG_INCLUDE_ALL_LONG = "all"; + public static final String ARG_INCLUDE_ALL_MAN = "Include all branches (not for production)"; + public static final String ARG_INCLUDE_MASTER = "m"; + public static final String ARG_INCLUDE_MASTER_LONG = "master"; + public static final String ARG_INCLUDE_MASTER_MAN = "Include the HEAD of the master branches (not for production)"; + public static final String ARG_PRODUCTION = "p"; + public static final String ARG_PRODUCTION_LONG = "prod"; + public static final String ARG_PRODUCTION_MAN = "Production mode"; public static CommandLine parseArguments(String[] args) throws ParseException { DefaultParser commandLineParser = new DefaultParser(); @@ -35,7 +44,10 @@ public class CMDConfigurations { public static Options getCMDOptions() { return new Options().addOption(ARG_HELP, ARG_HELP_LONG, false, ARG_HELP_MAN) .addOption(ARG_INIT, ARG_INIT_LONG, true, ARG_INIT_MAN) - .addOption(ARG_DIRECTORY, ARG_DIRECTORY_LONG, true, ARG_DIRECTORY_MAN); + .addOption(ARG_DIRECTORY, ARG_DIRECTORY_LONG, true, ARG_DIRECTORY_MAN) + .addOption(ARG_INCLUDE_MASTER, ARG_INCLUDE_MASTER_LONG, false, ARG_INCLUDE_MASTER_MAN) + .addOption(ARG_INCLUDE_ALL, ARG_INCLUDE_ALL_LONG, false, ARG_INCLUDE_ALL_MAN) + .addOption(ARG_PRODUCTION, ARG_PRODUCTION_LONG, false, ARG_PRODUCTION_MAN); } public static void displayHelp() { diff --git a/src/main/java/fr/emse/gitlab/saref/Constants.java b/src/main/java/fr/emse/gitlab/saref/Constants.java new file mode 100644 index 0000000000000000000000000000000000000000..1946b65e336b7bc2a3e05b1e2b97de9a78a67f01 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/Constants.java @@ -0,0 +1,46 @@ +package fr.emse.gitlab.saref; + +import java.io.File; +import java.util.regex.Pattern; + +public class Constants { + public final static String BASE = "https://saref.etsi.org/"; + + public static final String REGEX_EXT = "(?core|saref4[a-z]{4})"; + public static final String REGEX_VERSION = "v(?[1-9][0-9]*)\\.(?[0-9]+)\\.(?[0-9]+)"; + + public static final String REGEX_MASTER_BRANCH = "^refs/remotes/origin/master$"; + public static final String REGEX_RELEASE_BRANCH = "^refs/remotes/origin/release-" + REGEX_VERSION + "$"; + public static final String REGEX_OTHER_BRANCH = "^refs/remotes/origin/(?[^/]+)$"; + public static final Pattern REGEX_RELEASE_BRANCH_PATTERN = Pattern.compile(REGEX_RELEASE_BRANCH); + public static final Pattern REGEX_OTHER_BRANCH_PATTERN = Pattern.compile(REGEX_OTHER_BRANCH); + + public static final String REGEX_TERM_URI = "^" + BASE + REGEX_EXT + "/(?[^/]+)$"; + public static final Pattern REGEX_TERM_PATTERN = Pattern.compile(REGEX_TERM_URI); + + public static final String REGEX_ONTO_URI = "^" + BASE + REGEX_EXT + "/" + REGEX_VERSION + "/$"; + public static final Pattern REGEX_ONTO_PATTERN = Pattern.compile(REGEX_ONTO_URI); + + public static final String REGEX_ONTO_SERIES_URI = "^" + BASE + REGEX_EXT + "/$"; + public static final Pattern REGEX_ONTO_SERIES_PATTERN = Pattern.compile(REGEX_ONTO_SERIES_URI); + + // output + + public static final String TARGET_DIR = "target"; + public static final String GIT_DIR = TARGET_DIR + File.separator + "sources"; + public static final String DATASET_DIR = TARGET_DIR + File.separator + "tdb"; + public static final String LOG_FILE_NAME = TARGET_DIR + File.separator + "output.log"; + public static final String SITE_DIR = TARGET_DIR + File.separator + "site"; + public static final String STATIC_TARGET_DIR = SITE_DIR + File.separator + "static"; + + public static final String SAREF_ACCESS_TOKEN = "SAREF_ACCESS_TOKEN"; + + public static final String SAREF_ACCESS_USERNAME = "SAREF_ACCESS_USERNAME"; + + public static final String propertyFile = ".saref-repositories"; + + public static boolean INCLUDE_MASTER = false; + public static boolean INCLUDE_ALL = false; + public static boolean PRODUCTION = false; + +} diff --git a/src/main/java/fr/emse/gitlab/saref/CopyStaticFiles.java b/src/main/java/fr/emse/gitlab/saref/CopyStaticFiles.java new file mode 100644 index 0000000000000000000000000000000000000000..1bb81ab21c14954904eed379c1cb17c26b4499d8 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/CopyStaticFiles.java @@ -0,0 +1,29 @@ +package fr.emse.gitlab.saref; + +import java.io.File; + +import org.apache.commons.io.FileUtils; + +import fr.emse.gitlab.saref.jobs.AbstractJobRunner; + +public class CopyStaticFiles extends AbstractJobRunner { + + + public CopyStaticFiles(File dir) { + super("Copy the static files", dir); + } + + @Override + public void doJob0() { + try { + File siteDir = new File(directory, Constants.SITE_DIR); + FileUtils.forceMkdir(siteDir); + File staticDir = new File(Main.class.getClassLoader().getResource("static").getFile()); + File staticTargetDir = new File(directory, Constants.STATIC_TARGET_DIR); + FileUtils.copyDirectory(staticDir, staticTargetDir); + } catch(Exception ex ) { + error("Exception while copying the static files", ex); + } + } + +} diff --git a/src/main/java/fr/emse/gitlab/saref/Main.java b/src/main/java/fr/emse/gitlab/saref/Main.java new file mode 100644 index 0000000000000000000000000000000000000000..c5f09e4ed7ae2705657554787e815c47b543e041 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/Main.java @@ -0,0 +1,137 @@ +package fr.emse.gitlab.saref; + +import static fr.emse.gitlab.saref.CMDConfigurations.*; + +import java.awt.Desktop; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.io.StringWriter; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLEncoder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Properties; +import java.util.Set; + +import javax.net.ssl.HttpsURLConnection; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.FileUtils; +import org.apache.jena.query.Dataset; +import org.apache.log4j.Layout; +import org.apache.log4j.PatternLayout; +import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; + +import fr.emse.gitlab.saref.entities.git.Repository; +import fr.emse.gitlab.saref.entities.git.Repositories; +import fr.emse.gitlab.saref.entities.tests.TestSuites; +import fr.emse.gitlab.saref.jobs.CreateDataset; +import fr.emse.gitlab.saref.jobs.JobRunner; +import fr.emse.gitlab.saref.jobs.ReadRepositories; +import fr.emse.gitlab.saref.jobs.WriteRDFFiles; +import fr.emse.gitlab.saref.jobs.CheckOWLProfile; +import fr.emse.gitlab.saref.jobs.CheckRepositoryStructure; +import fr.emse.gitlab.saref.jobs.CheckShapes; + +public class Main { + + static final Logger LOG = LoggerFactory.getLogger(Main.class); + private static final Layout LAYOUT = new PatternLayout("%d{mm:ss,SSS} %t %-5p %c:%L - %m%n"); + private static final org.apache.log4j.Logger ROOT_LOGGER = org.apache.log4j.Logger.getRootLogger(); + + private static TestSuites testSuites = new TestSuites(); + private static File directory; + private static File target; + private static boolean openBrowser = false; + + public static void main(String[] args) + throws IOException, InterruptedException, URISyntaxException, JAXBException, ParseException { + CommandLine cl = CMDConfigurations.parseArguments(args); + if (cl.getOptions().length == 0 || cl.hasOption(ARG_HELP)) { + CMDConfigurations.displayHelp(); + return; + } + if (cl.hasOption(ARG_INCLUDE_MASTER)) { + Constants.INCLUDE_MASTER = true; + } + if (cl.hasOption(ARG_INCLUDE_ALL)) { + Constants.INCLUDE_MASTER = true; + Constants.INCLUDE_ALL = true; + } + if (cl.hasOption(ARG_PRODUCTION)) { + if(cl.hasOption(ARG_INCLUDE_MASTER) || cl.hasOption(ARG_INCLUDE_ALL)) { + throw new RuntimeException("Option --production is incompatible with options --master or --all "); + } + Constants.PRODUCTION = true; + } + String dirName = cl.getOptionValue(ARG_DIRECTORY, ARG_DIRECTORY_DEFAULT); + if (dirName.equals("")) { + dirName = ARG_DIRECTORY_DEFAULT; + } + + directory = new File(dirName).getAbsoluteFile(); + + target = new File(directory, Constants.TARGET_DIR); + FileUtils.forceMkdir(target); + + File logFile = new File(directory, Constants.LOG_FILE_NAME); + ROOT_LOGGER.addAppender(new org.apache.log4j.RollingFileAppender(LAYOUT, logFile.getAbsolutePath(), false)); + + LOG.info("Starting pipeline"); + + doJob(new CheckRepositoryStructure(directory)); + doJob(new CheckShapes(directory)); + Repositories repositories = doJob(new ReadRepositories(directory)); + Dataset dataset = doJob(new CreateDataset(directory, repositories)); + doJob(new CopyStaticFiles(directory)); + Set mappers = doJob(new WriteRDFFiles(directory, dataset)); + doJob(new CheckOWLProfile(directory, mappers)); + reportAndExit(0); + } + + private static void reportAndExit(int code) { + try { + File report = new File(target, "report_output.xml"); + JAXBContext jaxbContext = JAXBContext.newInstance(TestSuites.class); + Marshaller jaxbMarshaller = jaxbContext.createMarshaller(); + jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); + jaxbMarshaller.marshal(testSuites, report); + jaxbMarshaller.marshal(testSuites, System.out); + final StringWriter sw = new StringWriter(); + jaxbMarshaller.marshal(testSuites, sw); + + if (openBrowser) { + if (Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE)) { + Desktop.getDesktop().browse(new URI( + Constants.BASE + "report.html?report=" + URLEncoder.encode(sw.toString(), "UTF-8"))); + } + } + } catch (JAXBException | URISyntaxException | IOException ex) { + LOG.error("Exception:", ex); + ex.printStackTrace(); + } + System.exit(code); + } + + private static T doJob(JobRunner checker) { + T result = checker.doJob(testSuites); + if (testSuites.getErrors() > 0) { + reportAndExit(-1); + } + return result; + } + +} diff --git a/src/main/java/fr/emse/gitlab/saref/entities/git/BranchVersion.java b/src/main/java/fr/emse/gitlab/saref/entities/git/BranchVersion.java new file mode 100644 index 0000000000000000000000000000000000000000..5fca95cb11984decb4f68eb71c32a1875aa14f84 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/entities/git/BranchVersion.java @@ -0,0 +1,35 @@ +package fr.emse.gitlab.saref.entities.git; + +import java.util.Date; + +import org.eclipse.jgit.lib.Ref; + +public class BranchVersion extends Version { + + final String branchName; + + public BranchVersion(String name, Ref ref, Date issued, String branchName) { + super(name, ref, issued); + this.branchName = branchName; + } + + @Override + public String getPrefix() { + return String.format("%s-%s:", super.getSuperPrefix(), branchName); + } + + @Override + public String getUri() { + return String.format("%s%s/", super.getSuperUri(), branchName); + } + + @Override + public String toString() { + return String.format("Version[%s, %s])", name, branchName); + } + + @Override + public String getVersionInfo() { + return branchName; + } +} diff --git a/src/main/java/fr/emse/gitlab/saref/entities/git/MasterVersion.java b/src/main/java/fr/emse/gitlab/saref/entities/git/MasterVersion.java new file mode 100644 index 0000000000000000000000000000000000000000..370c9052bd91763f5526fb4a068b84d88081a25a --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/entities/git/MasterVersion.java @@ -0,0 +1,36 @@ +package fr.emse.gitlab.saref.entities.git; + +import java.util.Date; + +import org.eclipse.jgit.lib.Ref; + + +public class MasterVersion extends Version { + + private static final String VERSION_INFO = "master"; + + public MasterVersion(String name, Ref ref, Date issued) { + super(name, ref, issued); + } + + @Override + public String getPrefix() { + return String.format("%s-%s:", super.getSuperPrefix(), VERSION_INFO); + } + + @Override + public String getUri() { + return String.format("%s%s/", super.getSuperUri(), VERSION_INFO); + } + + @Override + public String toString() { + return String.format("Version[%s, %s])", name, VERSION_INFO); + } + + @Override + public String getVersionInfo() { + return VERSION_INFO; + } + +} diff --git a/src/main/java/fr/emse/gitlab/saref/entities/git/ReleaseVersion.java b/src/main/java/fr/emse/gitlab/saref/entities/git/ReleaseVersion.java new file mode 100644 index 0000000000000000000000000000000000000000..1c5283e362cecd18c225a57abbdb9cfcdbc356c4 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/entities/git/ReleaseVersion.java @@ -0,0 +1,52 @@ +package fr.emse.gitlab.saref.entities.git; + +import java.util.Date; + +import org.eclipse.jgit.lib.Ref; + +public class ReleaseVersion extends Version { + + private final int major; + private final int minor; + private final int patch; + + public ReleaseVersion(String name, Ref ref, Date issued, int major, int minor, int patch) { + super(name, ref, issued); + this.major = major; + this.minor = minor; + this.patch = patch; + } + + public int getMajor() { + return major; + } + + public int getMinor() { + return minor; + } + + public int getPatch() { + return patch; + } + + @Override + public String getPrefix() { + return String.format("%s-%s.%s.%s:", super.getSuperPrefix(), major, minor, patch); + } + + @Override + public String getUri() { + return String.format("%sv%s.%s.%s/", super.getSuperUri(), major, minor, patch); + } + + @Override + public String toString() { + return String.format("Version[%s, v%s.%s.%s])", name, major, minor, patch); + } + + @Override + public String getVersionInfo() { + return String.format("v%s.%s.%s", major, minor, patch); + } + +} diff --git a/src/main/java/fr/emse/gitlab/saref/entities/git/Repositories.java b/src/main/java/fr/emse/gitlab/saref/entities/git/Repositories.java new file mode 100644 index 0000000000000000000000000000000000000000..6be1a191f2ad8130a6604fc78d9c24ee0e5d30cf --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/entities/git/Repositories.java @@ -0,0 +1,7 @@ +package fr.emse.gitlab.saref.entities.git; + +import java.util.HashSet; +import java.util.List; + +public class Repositories extends HashSet { +} diff --git a/src/main/java/fr/emse/gitlab/saref/entities/git/Repository.java b/src/main/java/fr/emse/gitlab/saref/entities/git/Repository.java new file mode 100644 index 0000000000000000000000000000000000000000..cca3ee80a36ae810fb7eff370ae34d19c02dd805 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/entities/git/Repository.java @@ -0,0 +1,94 @@ +/* + * Copyright 2020 École des Mines de Saint-Étienne. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package fr.emse.gitlab.saref.entities.git; + +import java.io.File; +import java.util.List; + +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; + +import fr.emse.gitlab.saref.Constants; + +/** + * + * @author maxime.lefrancois + */ +public class Repository { + private final String name; + private final File directory; + private final String http_url_to_repo; + private final List versions; + private transient Resource resource; + + + public Repository(String name, File directory, String http_url_to_repo, List versions) { + this.name = name; + this.directory = directory; + this.http_url_to_repo = http_url_to_repo; + this.versions = versions; + } + + public String getName() { + return name; + } + + public Resource getResource() { + if(resource == null) { + resource = ResourceFactory.createResource(getNamespace()); + } + return resource; + } + + public String getNamespace() { + if (name.equals("saref-core")) { + return String.format("%score/", Constants.BASE); + } else { + return String.format("%s%s/", Constants.BASE, name); + } + } + + public String getPrefix() { + if (name.equals("saref-core")) { + return "saref:"; + } else { + return String.format("s%s:", name.substring(5)); + } + } + + public File getDirectory() { + return directory; + } + + public String getHttp_url_to_repo() { + return http_url_to_repo; + } + + public List getVersions() { + return versions; + } + + public String getOntologyFileName() { + if (name.equals("saref-core")) { + return "saref.ttl"; + } else { + return String.format("%s.ttl", name); + } + } + + + +} \ No newline at end of file diff --git a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Term.java b/src/main/java/fr/emse/gitlab/saref/entities/git/Term.java similarity index 91% rename from saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Term.java rename to src/main/java/fr/emse/gitlab/saref/entities/git/Term.java index 83bf89074a42215d53ac1e1b4f42255bb8e3c99b..5445986614abdefafb4ae5ba4583df4a02f1b6b1 100644 --- a/saref-pipeline-sparql-generate/src/main/java/fr/emse/gitlab/saref/sparqlext/entities/Term.java +++ b/src/main/java/fr/emse/gitlab/saref/entities/git/Term.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package fr.emse.gitlab.saref.sparqlext.entities; +package fr.emse.gitlab.saref.entities.git; /** * @@ -53,7 +53,7 @@ public class Term { */ @Override public boolean equals(Object o) { - if (o instanceof Contributor) { + if (o instanceof Term) { final Term p = (Term) o; return getId().equals(p.getId()) && getLocalName().equals(p.getLocalName()); diff --git a/src/main/java/fr/emse/gitlab/saref/entities/git/Version.java b/src/main/java/fr/emse/gitlab/saref/entities/git/Version.java new file mode 100644 index 0000000000000000000000000000000000000000..7555ce1ae92a79654f5991b9fb3f27c12522cf41 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/entities/git/Version.java @@ -0,0 +1,85 @@ +/* + * Copyright 2020 École des Mines de Saint-Étienne. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package fr.emse.gitlab.saref.entities.git; + +import java.util.Date; + +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; +import org.eclipse.jgit.lib.Ref; + +import fr.emse.gitlab.saref.Constants; + +/** + * + * @author maxime.lefrancois + */ +public abstract class Version { + + protected final String name; + protected final Ref ref; + protected final Date issued; + private Resource resource; + + public Version(String name, Ref ref, Date issued) { + this.name = name; + this.ref = ref; + this.issued = issued; + } + + public final String getName() { + return name; + } + + public final Ref getRef() { + return ref; + } + + public final Date getIssued() { + return issued; + } + + public abstract String getUri(); + + public abstract String getPrefix(); + + public abstract String getVersionInfo(); + + public final Resource getResource() { + if(resource == null) { + resource = ResourceFactory.createResource(getUri()); + } + return resource; + } + + protected final String getSuperUri() { + if (name.equals("saref-core")) { + return String.format("%score/", Constants.BASE); + } else { + return String.format("%s%s/", Constants.BASE, name); + } + } + + protected final String getSuperPrefix() { + if (name.equals("saref-core")) { + return "saref"; + } else { + String shortName = name.substring(5); + return String.format("s%s", shortName); + } + } + +} diff --git a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/Property.java b/src/main/java/fr/emse/gitlab/saref/entities/tests/Property.java similarity index 76% rename from saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/Property.java rename to src/main/java/fr/emse/gitlab/saref/entities/tests/Property.java index b85a011e54cc09af9f042c233f5bd8394f0217b3..1cf841154898939cf733effe52663995a6db7771 100644 --- a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/Property.java +++ b/src/main/java/fr/emse/gitlab/saref/entities/tests/Property.java @@ -1,7 +1,7 @@ /** * */ -package fr.emse.gitlab.saref.entities; +package fr.emse.gitlab.saref.entities.tests; import javax.xml.bind.annotation.XmlRootElement; diff --git a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestCase.java b/src/main/java/fr/emse/gitlab/saref/entities/tests/TestCase.java similarity index 98% rename from saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestCase.java rename to src/main/java/fr/emse/gitlab/saref/entities/tests/TestCase.java index e261438c9c1460544cc549138b3dcba59de8b400..b66c789ba1855afd0dbc243a2d125179a650c0a8 100644 --- a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestCase.java +++ b/src/main/java/fr/emse/gitlab/saref/entities/tests/TestCase.java @@ -1,7 +1,7 @@ /** * */ -package fr.emse.gitlab.saref.entities; +package fr.emse.gitlab.saref.entities.tests; import java.io.Serializable; diff --git a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestSuite.java b/src/main/java/fr/emse/gitlab/saref/entities/tests/TestSuite.java similarity index 96% rename from saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestSuite.java rename to src/main/java/fr/emse/gitlab/saref/entities/tests/TestSuite.java index d287b263c89206e54ade10cc65adff818e8af356..817e3ab82562e34ad15e647d958b05fca198a6d3 100644 --- a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestSuite.java +++ b/src/main/java/fr/emse/gitlab/saref/entities/tests/TestSuite.java @@ -1,7 +1,7 @@ /** * */ -package fr.emse.gitlab.saref.entities; +package fr.emse.gitlab.saref.entities.tests; import java.io.Serializable; import java.util.ArrayList; diff --git a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestSuites.java b/src/main/java/fr/emse/gitlab/saref/entities/tests/TestSuites.java similarity index 96% rename from saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestSuites.java rename to src/main/java/fr/emse/gitlab/saref/entities/tests/TestSuites.java index af17d5f5f86fa46a88449bfc6623ae9cbf734cef..75caf34110666052d1217a864bf0752d20bfbd5b 100644 --- a/saref-pipeline-core/src/main/java/fr/emse/gitlab/saref/entities/TestSuites.java +++ b/src/main/java/fr/emse/gitlab/saref/entities/tests/TestSuites.java @@ -1,7 +1,7 @@ /** * */ -package fr.emse.gitlab.saref.entities; +package fr.emse.gitlab.saref.entities.tests; import java.io.Serializable; import java.util.ArrayList; diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/AbstractJobRunner.java b/src/main/java/fr/emse/gitlab/saref/jobs/AbstractJobRunner.java new file mode 100644 index 0000000000000000000000000000000000000000..2713f6eead85ec27abf806b37a57f07c2ccea263 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/AbstractJobRunner.java @@ -0,0 +1,112 @@ +package fr.emse.gitlab.saref.jobs; + +import java.io.File; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import fr.emse.gitlab.saref.entities.tests.TestCase; +import fr.emse.gitlab.saref.entities.tests.TestSuite; +import fr.emse.gitlab.saref.entities.tests.TestSuites; +import fr.emse.gitlab.saref.entities.tests.TestCase.Status; +import fr.emse.gitlab.saref.jobs.JobRunner; + +public abstract class AbstractJobRunner implements JobRunner { + + private final TestSuite testSuite; + protected final File directory; + + public AbstractJobRunner(String testName, File dir) { + this.directory = dir; + if(testName == null) { + testName = this.getClass().getCanonicalName(); + } + testSuite = new TestSuite(testName); + } + + public final T doJob(TestSuites testSuites) { + T result = getValue(); + testSuites.addTestsuite(testSuite); + return result; + } + + protected T getValue() { + doJob0(); + return null; + }; + + protected void doJob0() { + }; + + protected void error(String name, Exception ex) { + TestCase tc = new TestCase(name); + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + ex.printStackTrace(pw); + tc.setSystemErr(sw.toString()); + tc.setError(ex.getClass().getCanonicalName(), ex.getMessage()); + testSuite.addTestcase(tc); + } + + protected void failure(String name, Exception ex) { + TestCase tc = new TestCase(name); + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + ex.printStackTrace(pw); + tc.setSystemErr(sw.toString()); + tc.setFailure(ex.getClass().getCanonicalName(), ex.getMessage()); + testSuite.addTestcase(tc); + } + + protected void error(String name, String type, String message) { + TestCase tc = new TestCase(name); + tc.setError(type, message); + testSuite.addTestcase(tc); + } + + protected void error(String name) { + TestCase tc = new TestCase(name); + tc.setStatus(Status.ERROR); + testSuite.addTestcase(tc); + } + + protected void warning(String name) { + TestCase tc = new TestCase(name); + tc.setStatus(Status.WARNING); + testSuite.addTestcase(tc); + } + + protected void failure(String name) { + TestCase tc = new TestCase(name); + tc.setStatus(Status.FAILURE); + testSuite.addTestcase(tc); + } + + protected void failure(String name, String type, String message) { + TestCase tc = new TestCase(name); + tc.setFailure(type, message); + testSuite.addTestcase(tc); + } + + protected void success(String name) { + testSuite.addTestcase(new TestCase(name)); + } + + + final protected String getRepoName() { + String REGEX = "^(?saref-core|saref4[a-z][a-z][a-z][a-z])"; + String name = directory.getName(); + Pattern pattern = Pattern.compile(REGEX); + Matcher matcher = pattern.matcher(name); + if (!matcher.find()) { + error(String.format( + "The SAREF pipeline must be run inside a directory whose name begins with `saref-core`, or `saref4abcd` (where abcd can be any sequence of four letters). Got: %s", + name)); + return null; + } + return matcher.group("ext"); + } + + +} diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/CheckExamples.java b/src/main/java/fr/emse/gitlab/saref/jobs/CheckExamples.java new file mode 100644 index 0000000000000000000000000000000000000000..c40a7fd1530f08c06cf0b0dbcf0959790ee132c7 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/CheckExamples.java @@ -0,0 +1,63 @@ +package fr.emse.gitlab.saref.jobs; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.util.HashMap; +import java.util.Map; + +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.riot.Lang; + +public class CheckExamples extends AbstractJobRunner { + + static final Map PREFIXES = new HashMap(); + static { + PREFIXES.put("owl", "http://www.w3.org/2002/07/owl#"); + PREFIXES.put("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); + PREFIXES.put("xsd", "http://www.w3.org/2001/XMLSchema#"); + PREFIXES.put("dcterms", "http://purl.org/dc/terms/"); + PREFIXES.put("vann", "http://purl.org/vocab/vann/"); + PREFIXES.put("foaf", "http://xmlns.com/foaf/0.1/"); + PREFIXES.put("schema", "http://schema.org/"); + PREFIXES.put("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); + PREFIXES.put("voaf", "http://purl.org/vocommons/voaf#"); + PREFIXES.put("dce", "http://purl.org/dc/elements/1.1/"); + PREFIXES.put("dct", "http://purl.org/dc/terms/"); + PREFIXES.put("xml", "http://www.w3.org/XML/1998/namespace/"); + PREFIXES.put("saref", "https://saref.etsi.org/core/"); + } + + public CheckExamples(File dir) { + super("Check examples", dir); + } + + @Override + protected void doJob0() { +// String repoName = getRepoName(); + checkExamples(); + } + + private void checkExamples() { + File dir = new File(directory, "examples"); + try { + Files.walk(dir.toPath()).filter(p -> { + return p.endsWith(".ttl"); + }).forEach(p -> { + File exampleFile = p.toFile(); + Model model = ModelFactory.createDefaultModel(); + try (FileInputStream input = new FileInputStream(exampleFile)) { + model.read(input, null, Lang.TTL.getLabel()); + } catch (Exception ex) { + failure(String.format("Exception while reading the example file %s", exampleFile.toString()), + ex.getClass().getSimpleName(), ex.getMessage()); + } + }); + } catch (IOException ex) { + error("Exception while walking the example directory", ex); + } + } + +} diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/CheckOWLProfile.java b/src/main/java/fr/emse/gitlab/saref/jobs/CheckOWLProfile.java new file mode 100644 index 0000000000000000000000000000000000000000..8aae7e4b9e74f44d380c8a553c33d570b84d0e59 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/CheckOWLProfile.java @@ -0,0 +1,142 @@ +package fr.emse.gitlab.saref.jobs; + +import java.io.File; +import java.io.StringWriter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import org.semanticweb.HermiT.ReasonerFactory; +import org.semanticweb.owl.explanation.api.Explanation; +import org.semanticweb.owl.explanation.api.ExplanationGenerator; +import org.semanticweb.owl.explanation.api.ExplanationGeneratorFactory; +import org.semanticweb.owl.explanation.impl.blackbox.Configuration; +import org.semanticweb.owl.explanation.impl.blackbox.ContractionStrategy; +import org.semanticweb.owl.explanation.impl.blackbox.DivideAndConquerContractionStrategy; +import org.semanticweb.owl.explanation.impl.blackbox.EntailmentCheckerFactory; +import org.semanticweb.owl.explanation.impl.blackbox.ExpansionStrategy; +import org.semanticweb.owl.explanation.impl.blackbox.InitialEntailmentCheckStrategy; +import org.semanticweb.owl.explanation.impl.blackbox.StructuralTypePriorityExpansionStrategy; +import org.semanticweb.owl.explanation.impl.blackbox.checker.BlackBoxExplanationGeneratorFactory; +import org.semanticweb.owl.explanation.impl.blackbox.checker.InconsistentOntologyExplanationGeneratorFactory; +import org.semanticweb.owl.explanation.impl.blackbox.checker.SatisfiabilityEntailmentCheckerFactory; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.profiles.OWL2DLProfile; +import org.semanticweb.owlapi.profiles.OWLProfileReport; +import org.semanticweb.owlapi.profiles.OWLProfileViolation; +import org.semanticweb.owlapi.util.SimpleIRIMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CheckOWLProfile extends AbstractJobRunner { + + static final Logger LOG = LoggerFactory.getLogger(CheckOWLProfile.class); + static final String SHACL_WARNING = "http://www.w3.org/ns/shacl#Warning"; + static final String SHACL_VIOLATION = "http://www.w3.org/ns/shacl#Violation"; + + static final String SELECT_VIOLATION = "PREFIX sh: \n" + + "SELECT ?focusNode ?resultMessage ?resultPath ?value ?severity\n" + "WHERE { \n" + + " ?violation sh:focusNode ?focusNode .\n" + " ?violation sh:resultMessage ?resultMessage .\n" + + " ?violation sh:resultSeverity ?severity .\n" + + " OPTIONAL { ?violation sh:resultPath ?resultPath . }\n" + + " OPTIONAL { ?violation sh:value ?value . } \n" + "}"; + + static final Map PREFIXES = new HashMap(); + static { + PREFIXES.put("owl", "http://www.w3.org/2002/07/owl#"); + PREFIXES.put("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); + PREFIXES.put("xsd", "http://www.w3.org/2001/XMLSchema#"); + PREFIXES.put("dcterms", "http://purl.org/dc/terms/"); + PREFIXES.put("vann", "http://purl.org/vocab/vann/"); + PREFIXES.put("schema", "http://schema.org/"); + PREFIXES.put("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); + PREFIXES.put("voaf", "http://purl.org/vocommons/voaf#"); + PREFIXES.put("dce", "http://purl.org/dc/elements/1.1/"); + PREFIXES.put("dct", "http://purl.org/dc/terms/"); + PREFIXES.put("xml", "http://www.w3.org/XML/1998/namespace/"); + PREFIXES.put("saref", "https://saref.etsi.org/core/"); + } + + private final Set mappers; + + public CheckOWLProfile(File dir, Set mappers) { + super("Check consistency of the ontology", dir); + this.mappers = mappers; + } + + @Override + protected void doJob0() { + String repoName = getRepoName(); + String ontologyName = repoName.equals("saref-core") ? "saref.ttl" : repoName + ".ttl"; + + File ontologyFile = new File(directory, "ontology/" + ontologyName); + final OWLDataFactory dataFactory = OWLManager.getOWLDataFactory(); + final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + + ontologyManager.setIRIMappers(mappers); + + final Supplier m = () -> ontologyManager; + final OWLOntology ontology; + try { + ontology = ontologyManager.loadOntologyFromOntologyDocument(ontologyFile); + } catch (OWLOntologyCreationException ex) { + error("Error while loading the ontology in OWLAPI", ex); + return; + } + final OWLProfileReport report = new OWL2DLProfile().checkOntology(ontology); + for (OWLProfileViolation v : report.getViolations()) { + failure(v.toString()); + } + final ReasonerFactory reasonerFactory = new ReasonerFactory(); + final InconsistentOntologyExplanationGeneratorFactory inconsistentOntologyExplanationFeneratorFactory = new InconsistentOntologyExplanationGeneratorFactory( + reasonerFactory, ontologyManager.getOWLDataFactory(), m, 10000); + final ExplanationGenerator gen = inconsistentOntologyExplanationFeneratorFactory + .createExplanationGenerator(ontology); + final OWLAxiom inc = dataFactory.getOWLSubClassOfAxiom(dataFactory.getOWLThing(), dataFactory.getOWLNothing()); + try { + final Set> incExplanation = gen.getExplanations(inc, 10); + if (!incExplanation.isEmpty()) { + StringWriter sw = new StringWriter(); + incExplanation.forEach(e -> { + sw.append(e.getAxioms().toString()).append("\n"); + }); + error("The ontology is inconsistent.", "Explanations from OWLApi", sw.toString()); + return; + } + } catch (Exception ex) { + error("Error while reasoning with the ontology", ex); + return; + } + final EntailmentCheckerFactory checker = new SatisfiabilityEntailmentCheckerFactory(reasonerFactory, + m); + final ExpansionStrategy expansionStrategy = new StructuralTypePriorityExpansionStrategy<>( + InitialEntailmentCheckStrategy.PERFORM, m); + final ContractionStrategy contractionStrategy = new DivideAndConquerContractionStrategy<>(); + final Configuration config = new Configuration<>(checker, expansionStrategy, contractionStrategy, + null, m); + final ExplanationGeneratorFactory explanationGeneratorFactory = new BlackBoxExplanationGeneratorFactory( + config); + final ExplanationGenerator fgen = explanationGeneratorFactory.createExplanationGenerator(ontology); + ontology.classesInSignature().forEach(c -> { + OWLAxiom axiom = dataFactory.getOWLSubClassOfAxiom(c, dataFactory.getOWLNothing()); + Set> explanation = fgen.getExplanations(axiom, 10); + if (!explanation.isEmpty()) { + StringWriter sw = new StringWriter(); + explanation.forEach(e -> { + sw.append(e.getAxioms().toString()).append("\n"); + }); + failure(String.format("Class %s cannot be satisfied.", c), "Explanations from OWLApi", sw.toString()); + } + }); + } + +} diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/jobs/library/RepositoryStructureChecker.java b/src/main/java/fr/emse/gitlab/saref/jobs/CheckRepositoryStructure.java similarity index 90% rename from saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/jobs/library/RepositoryStructureChecker.java rename to src/main/java/fr/emse/gitlab/saref/jobs/CheckRepositoryStructure.java index 3da0fc89a6038085efee967d619df80a16dae899..c1f89776489ebf527aec433c249f1a18771fc43e 100644 --- a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/jobs/library/RepositoryStructureChecker.java +++ b/src/main/java/fr/emse/gitlab/saref/jobs/CheckRepositoryStructure.java @@ -1,4 +1,4 @@ -package fr.emse.gitlab.saref.jobs.library; +package fr.emse.gitlab.saref.jobs; import java.io.BufferedReader; import java.io.File; @@ -13,18 +13,15 @@ import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; -import fr.emse.gitlab.saref.jobs.AbstractJobRunner; - -public class RepositoryStructureChecker extends AbstractJobRunner { +public class CheckRepositoryStructure extends AbstractJobRunner { static Logger logger = Logger.getLogger("MaintenanceReport"); private final Set gitignoreLines = new HashSet<>( Arrays.asList("target", "*~", ".DS_Store", "catalog-v001.xml")); - private final String NAME = "Check the structure of the repository"; - public RepositoryStructureChecker(File dir) { - super(dir); + public CheckRepositoryStructure(File dir) { + super("Check the structure of the repository", dir); } public void doJob0() { @@ -46,6 +43,7 @@ public class RepositoryStructureChecker extends AbstractJobRunner { checkGitIgnoreFolder(); checkLICENSE(); checkExamplesFolder(); + } private void checkThereExistsDirectory(String dirName) { @@ -109,7 +107,7 @@ public class RepositoryStructureChecker extends AbstractJobRunner { return; } File licenseModelFile = new File( - RepositoryStructureChecker.class.getClassLoader().getResource("LICENSE_MODEL").getFile()); + CheckRepositoryStructure.class.getClassLoader().getResource("LICENSE_MODEL").getFile()); try (BufferedReader licenseModelFileReader = new BufferedReader(new FileReader(licenseModelFile)); BufferedReader licenseFileReader = new BufferedReader(new FileReader(licenseFile));) { @@ -175,7 +173,7 @@ public class RepositoryStructureChecker extends AbstractJobRunner { return p.endsWith(".ttl"); }); if (!containsExample) { - failure("The `examples` folder should contain at least one example file (some `.ttl` document"); + failure("The `examples` folder should contain at least one example file (some `.ttl` document)"); } } catch (IOException ex) { error("Exception while browsing the `examples` folder", ex); diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/CheckShapes.java b/src/main/java/fr/emse/gitlab/saref/jobs/CheckShapes.java new file mode 100644 index 0000000000000000000000000000000000000000..ae8cde80bd2e48d56003a60c9d334869a2ea509a --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/CheckShapes.java @@ -0,0 +1,131 @@ +package fr.emse.gitlab.saref.jobs; + +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; + +import org.apache.jena.query.QueryExecution; +import org.apache.jena.query.QueryExecutionFactory; +import org.apache.jena.rdf.model.Literal; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.RDFNode; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.riot.Lang; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.topbraid.jenax.util.JenaUtil; +import org.topbraid.shacl.validation.ValidationUtil; +import org.topbraid.shacl.vocabulary.SH; + +import fr.emse.gitlab.saref.Constants; + +public class CheckShapes extends AbstractJobRunner { + + static final Logger LOG = LoggerFactory.getLogger(CheckShapes.class); + static final String SHACL_WARNING = "http://www.w3.org/ns/shacl#Warning"; + static final String SHACL_VIOLATION = "http://www.w3.org/ns/shacl#Violation"; + + static final String SELECT_VIOLATION = "PREFIX sh: \n" + + "SELECT ?focusNode ?resultMessage ?resultPath ?value ?severity\n" + "WHERE { \n" + + " ?violation sh:focusNode ?focusNode .\n" + " ?violation sh:resultMessage ?resultMessage .\n" + + " ?violation sh:resultSeverity ?severity .\n" + + " OPTIONAL { ?violation sh:resultPath ?resultPath . }\n" + + " OPTIONAL { ?violation sh:value ?value . } \n" + "}"; + + static final Map PREFIXES = new HashMap(); + static { + PREFIXES.put("owl", "http://www.w3.org/2002/07/owl#"); + PREFIXES.put("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); + PREFIXES.put("xsd", "http://www.w3.org/2001/XMLSchema#"); + PREFIXES.put("dcterms", "http://purl.org/dc/terms/"); + PREFIXES.put("vann", "http://purl.org/vocab/vann/"); + PREFIXES.put("schema", "http://schema.org/"); + PREFIXES.put("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); + PREFIXES.put("voaf", "http://purl.org/vocommons/voaf#"); + PREFIXES.put("dce", "http://purl.org/dc/elements/1.1/"); + PREFIXES.put("dct", "http://purl.org/dc/terms/"); + PREFIXES.put("xml", "http://www.w3.org/XML/1998/namespace/"); + PREFIXES.put("saref", "https://saref.etsi.org/core/"); + } + + public CheckShapes(File dir) { + super("Check that the ontology satisfies shapes", dir); + } + + @Override + protected void doJob0() { + String repoName = getRepoName(); + String ontologyName = repoName.equals("saref-core") ? "saref.ttl" : repoName + ".ttl"; + + File ontologyFile = new File(directory, "ontology/" + ontologyName); + Model model = ModelFactory.createDefaultModel(); + try (FileInputStream input = new FileInputStream(ontologyFile)) { + model.read(input, null, Lang.TTL.getLabel()); + } catch (Exception ex) { + error("Exception while reading the ontology file", ex); + return; + } + checkPrefixes(model); + checkShapes(model); + } + + private void checkPrefixes(Model model) { + final Map prefixes = model.getNsPrefixMap(); + for (String s : PREFIXES.keySet()) { + if (prefixes.containsKey(s)) { + if (!prefixes.get(s).equals(PREFIXES.get(s))) { + failure(String.format( + "Prefix `%s:` in the ontology file is expected to be equal to `<%s>`. Got: `<%s>`", s, + PREFIXES.get(s), prefixes.get(s))); + } + } + } + for (Map.Entry entry : prefixes.entrySet()) { + String s = entry.getKey(); + String l = entry.getValue(); + if (l.contains("saref")) { + if (!l.matches(Constants.REGEX_ONTO_SERIES_URI)) { + failure(String.format( + "Prefix `%s:` in the ontology file contains string \"saref\", but does not seem to match the official SAREF ontologies namespaces: `\\\\%s\\\\`. Got: `<%s>`", + s, Constants.REGEX_ONTO_SERIES_URI, l)); + } + } + } + } + + private void checkShapes(Model model) { + Model shapeModel = JenaUtil.createDefaultModel(); + try (InputStream in = CheckShapes.class.getClassLoader().getResourceAsStream("sarefShape.ttl")) { + shapeModel.read(in, Constants.BASE, "TTL"); + } catch (Exception ex) { + error("Exception while reading the shape file", ex); + return; + } + Resource reportResource = ValidationUtil.validateModel(model, shapeModel, true); + boolean conforms = reportResource.getProperty(SH.conforms).getBoolean(); + if (!conforms) { + Model reportModel = reportResource.getModel(); + reportModel.setNsPrefixes(PREFIXES); + reportModel.setNsPrefix("sh", "http://www.w3.org/ns/shacl#"); + try (QueryExecution exec = QueryExecutionFactory.create(SELECT_VIOLATION, reportModel);) { + exec.execSelect().forEachRemaining(sol -> { + Resource severity = sol.getResource("severity"); + Resource focusNode = sol.get("focusNode").asResource(); + Literal resultMessage = sol.getLiteral("resultMessage"); + RDFNode value = sol.get("value"); + String gotString = (value != null && value.isURIResource()) ? String.format(" Got: %s", value) : ""; + if (severity != null && severity.getURI().equals(SHACL_VIOLATION)) { + error(String.format("Shape violation on node %s: %s%s", focusNode, resultMessage, gotString)); + } else { + failure(String.format("Shape violation on node %s: %s%s", focusNode, resultMessage, gotString)); + } + }); + } + } + } + + +} diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/CreateDataset.java b/src/main/java/fr/emse/gitlab/saref/jobs/CreateDataset.java new file mode 100644 index 0000000000000000000000000000000000000000..967a74fda10b4c8131c8e5a328773823e0724d7b --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/CreateDataset.java @@ -0,0 +1,230 @@ +package fr.emse.gitlab.saref.jobs; + +import java.io.File; +import java.io.FileReader; +import java.text.SimpleDateFormat; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.jena.datatypes.xsd.XSDDatatype; +import org.apache.jena.query.Dataset; +import org.apache.jena.query.QueryExecution; +import org.apache.jena.query.QueryExecutionFactory; +import org.apache.jena.query.ReadWrite; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; +import org.apache.jena.tdb.TDBFactory; +import org.apache.jena.vocabulary.DCTerms; +import org.apache.jena.vocabulary.OWL2; +import org.apache.jena.vocabulary.RDF; +import org.apache.jena.vocabulary.RDFS; +import org.eclipse.jgit.api.Git; + +import fr.emse.gitlab.saref.Constants; +import fr.emse.gitlab.saref.entities.git.Repositories; +import fr.emse.gitlab.saref.entities.git.Repository; +import fr.emse.gitlab.saref.entities.git.Version; +import fr.emse.gitlab.saref.vocabs.EX; +import fr.emse.gitlab.saref.vocabs.RDFP; +import fr.emse.gitlab.saref.vocabs.VANN; + +public class CreateDataset extends AbstractJobRunner { + + private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd"); + private static final Resource ETSI_URL = ResourceFactory.createResource("https://www.etsi.org/"); + private static final Resource ETSI_LICENSE = ResourceFactory + .createResource("https://forge.etsi.org/etsi-software-license"); + + private final Repositories repositories; + private final Dataset dataset; + private final Model config = ModelFactory.createDefaultModel(); + private final Map ngs = new HashMap<>(); + + public CreateDataset(File directory, Repositories repositories) { + super("Create the RDF Dataset", directory); + this.repositories = repositories; + dataset = TDBFactory.createDataset(Constants.DATASET_DIR); + } + + @Override + protected Dataset getValue() { + for (Repository repository : repositories) { + readRepository(repository); + } + writeToTDBDataset(); + return dataset; + } + + private void readRepository(Repository repo) { + try (Git git = Git.open(repo.getDirectory())) { + config.add(repo.getResource(), RDF.type, OWL2.Ontology); + for (int i = 0; i < repo.getVersions().size(); i++) { + readVersion(repo, git, i); + } + } catch (Exception ex) { + failure("Exception while reading repository " + repo.getName(), ex); + } + } + + private void readVersion(Repository repo, Git git, int i) { + Version version = repo.getVersions().get(i); + try { + git.checkout().setName(version.getRef().getName()).call(); + Model onto = ModelFactory.createDefaultModel(); + File ontologyFile = new File(repo.getDirectory(), "ontology/" + repo.getOntologyFileName()); + try (FileReader fr = new FileReader(ontologyFile)) { + onto.read(fr, Constants.BASE, "TTL"); + } + ngs.put(version.getUri(), onto); + addOntologyMetadata(onto, repo, git, i); + augmentTermGraphs(onto, repo, git, i); + } catch (Exception ex) { + failure(String.format("Failed to read repository %s version %s", repo.getName(), version.getName()), ex); + } + } + + /** + * for each term, add: some triples in the default graph (used to detect naming + * clashes), and some triples in the named graph of the term (used to display + * the page of the term) + * + * @param onto + * @param repo + * @param git + * @param i + */ + private void augmentTermGraphs(Model onto, Repository repo, Git git, int i) { + Version version = repo.getVersions().get(i); + Version priorVersion = i == 0 ? null : repo.getVersions().get(i - 1); + Set definedTerms = new HashSet<>(); + Set usedTerms = new HashSet<>(); + computeTerms(onto, repo, version, definedTerms, usedTerms); + config.add(repo.getResource(), EX.hasVersion, version.getResource()); + config.add(version.getResource(), RDF.type, EX.OntologyVersion); + config.add(version.getResource(), EX.versionInfo, version.getVersionInfo()); + if (priorVersion != null) { + config.add(version.getResource(), EX.priorVersion, priorVersion.getResource()); + } + for (Resource t : definedTerms) { + Model termModel = getNamedModel(t); + String localName = t.getURI().substring(repo.getNamespace().length()); + termModel.add(t, RDFS.isDefinedBy, version.getResource()); + // keep the most recent definition of the term + if (i == repo.getVersions().size() - 1) { + termModel.add(t, DCTerms.modified, SIMPLE_DATE_FORMAT.format(version.getIssued()), XSDDatatype.XSDdate); + try (QueryExecution exec = QueryExecutionFactory.create("DESCRIBE <" + t.getURI() + ">", onto)) { + termModel.add(exec.execDescribe()); + } + } + config.add(t, RDF.type, RDFP.Resource); + config.add(t, EX.localName, localName); + config.add(t, RDFS.isDefinedBy, version.getResource()); + } + for (Resource t : usedTerms) { + Model termModel = getNamedModel(t); + termModel.add(t, EX.isUsedBy, version.getResource()); + config.add(t, RDF.type, RDFP.Resource); + config.add(t, EX.isUsedBy, version.getResource()); + } + + } + + private void writeToTDBDataset() { + dataset.begin(ReadWrite.WRITE); + dataset.addNamedModel(EX.config, config); + for (String name : ngs.keySet()) { + dataset.addNamedModel(name, ngs.get(name)); + } + dataset.commit(); + } + + private void addOntologyMetadata(Model onto, Repository repo, Git git, int i) throws Exception { + Version version = repo.getVersions().get(i); + Version priorVersion = i == 0 ? null : repo.getVersions().get(i - 1); + Resource resource = repo.getResource(); + onto.add(resource, RDF.type, OWL2.Ontology); + + onto.removeAll(resource, OWL2.versionIRI, null); + onto.add(resource, OWL2.versionIRI, version.getResource()); + + onto.removeAll(resource, OWL2.versionInfo, null); + onto.add(resource, OWL2.versionInfo, version.getVersionInfo()); + + onto.removeAll(resource, OWL2.priorVersion, null); + if (priorVersion != null) { + onto.add(resource, OWL2.priorVersion, priorVersion.getResource()); + } + + onto.removeAll(resource, DCTerms.publisher, null); + onto.add(resource, DCTerms.publisher, ETSI_URL); + + onto.removeAll(resource, DCTerms.license, null); + onto.add(resource, DCTerms.license, ETSI_LICENSE); + +// // list of contributors +// Set contributors = new HashSet<>(); +// for (Iterator it = git.log().add(version.ref.getObjectId()).call().iterator(); it.hasNext();) { +// RevCommit commit = it.next(); +// Contributor contributor = new Contributor(commit.getCommitterIdent()); +// contributors.add(contributor); +// } +// for (Contributor contributor : contributors) { +// // git can only provide name + email. +// // would need to maintain some list of mapping email -> url in the future. +// Resource anon = onto.createResource(); +// onto.add(repo.resource, DCTerms.contributor, anon); +// onto.add(anon, FOAF.name, contributor.getName()); +// onto.add(anon, FOAF.mbox, contributor.getEmailAddress()); +// } + + onto.removeAll(resource, VANN.preferredNamespacePrefix, null); + onto.removeAll(resource, VANN.preferredNamespaceUri, null); + onto.add(resource, VANN.preferredNamespacePrefix, repo.getPrefix()); + onto.add(resource, VANN.preferredNamespaceUri, repo.getNamespace(), XSDDatatype.XSDanyURI); + + onto.removeAll(resource, DCTerms.issued, null); + onto.add(resource, DCTerms.issued, SIMPLE_DATE_FORMAT.format(version.getIssued()), XSDDatatype.XSDdate); + } + + private void computeTerms(Model onto, Repository repo, Version version, Set definedTerms, + Set usedTerms) { + onto.listStatements().forEachRemaining(stmt -> { + Resource s = stmt.getSubject(); + Resource p = stmt.getPredicate(); + Resource o = stmt.getObject().isResource() ? (Resource) stmt.getObject() : null; + computeTerms(s, repo, version, definedTerms, usedTerms); + computeTerms(p, repo, version, definedTerms, usedTerms); + computeTerms(o, repo, version, definedTerms, usedTerms); + }); + } + + private void computeTerms(Resource t, Repository repo, Version version, Set definedTerms, + Set usedTerms) { + if (t == null || !t.isURIResource() || !t.getURI().startsWith(Constants.BASE) || t.getURI().endsWith("/")) { + return; + } + String uri = t.getURI(); + if (uri.startsWith(repo.getNamespace())) { + definedTerms.add(t); + } else if (!uri.startsWith(repo.getNamespace())) { + usedTerms.add(t); + } + } + + private Model getNamedModel(Resource t) { + String uri = t.getURI(); + if (ngs.containsKey(uri)) { + return ngs.get(uri); + } else { + Model model = ModelFactory.createDefaultModel(); + model.add(t, RDF.type, RDFS.Resource); + ngs.put(uri, model); + return model; + } + } + +} diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/CreateDocumentation.java b/src/main/java/fr/emse/gitlab/saref/jobs/CreateDocumentation.java new file mode 100644 index 0000000000000000000000000000000000000000..bce4e6f0fd585a7e314642b8e529d8aae08cf621 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/CreateDocumentation.java @@ -0,0 +1,584 @@ +//package fr.emse.gitlab.saref.jobs; +// +//import java.io.File; +//import java.io.FileOutputStream; +//import java.io.IOException; +//import java.nio.charset.StandardCharsets; +//import java.nio.file.Files; +//import java.nio.file.Path; +//import java.nio.file.Paths; +//import java.util.ArrayList; +//import java.util.HashSet; +//import java.util.Iterator; +//import java.util.List; +//import java.util.Set; +// +//import org.apache.commons.io.FileUtils; +//import org.apache.commons.io.IOUtils; +//import org.apache.jena.atlas.io.IndentedWriter; +//import org.apache.jena.graph.NodeFactory; +//import org.apache.jena.query.Dataset; +//import org.apache.jena.query.ReadWrite; +//import org.apache.jena.rdf.model.Model; +//import org.apache.jena.sparql.core.Var; +//import org.apache.jena.sparql.engine.binding.Binding; +//import org.apache.jena.sparql.engine.binding.BindingHashMap; +//import org.apache.jena.sparql.util.Context; +//import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; +// +//import fr.emse.ci.sparqlext.SPARQLExt; +//import fr.emse.ci.sparqlext.engine.PlanFactory; +//import fr.emse.ci.sparqlext.engine.RootPlan; +//import fr.emse.ci.sparqlext.stream.LocationMapperAccept; +//import fr.emse.ci.sparqlext.stream.LocatorFileAccept; +//import fr.emse.ci.sparqlext.stream.LookUpRequest; +//import fr.emse.ci.sparqlext.stream.SPARQLExtStreamManager; +//import fr.emse.ci.sparqlext.utils.ContextUtils; +//import fr.emse.ci.sparqlext.utils.VarUtils; +//import fr.emse.gitlab.saref.Constants; +//import fr.emse.gitlab.saref.entities.tests.TestSuites; +//import fr.emse.gitlab.saref.vocabs.EX; +// +//public class CreateDocumentation extends AbstractJobRunner { +// +// private final Dataset dataset; +// private static final Var VAR_GRAPH = VarUtils.allocVar("term"); +// +// public CreateDocumentation(File directory, Dataset dataset) { +// super("Create the HTML documentation", directory); +// this.dataset = dataset; +// } +// +// public void doJob() { +// +// SPARQLExtStreamManager streamManager = initializeStreamManager(); +// +// +// dataset.begin(ReadWrite.READ); +// for (Iterator it = dataset.listNames(); it.hasNext();) { +// String name = it.next(); +// if (name.equals(EX.config)) { +// continue; +// } +// Model model = dataset.getNamedModel(name); +// try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(new File(SITE_DIR, fileName + ".html")));) { +// Context context = ContextUtils.build(writer).setBase(Constants.BASE).setDebugTemplate(Constants.PRODUCTION).setInputDataset(dataset) +// .setStreamManager(sm) +// .build(); +// BindingHashMap binding = new BindingHashMap(); +// binding.add(VAR_GRAPH, NodeFactory.createURI(name)); +// List bindings = new ArrayList()<>(); +// bindings.add(binding); +// planForTerm.execTemplateStream(bindings, context); +// } +// } +// dataset.end(); +// } +// +// private RootPlan createPlanForTerm(SPARQLExtStreamManager sm) throws IOException { +// String query; +// query = IOUtils.toString(sm.open(new LookUpRequest(TERM_QUERY, SPARQLExt.MEDIA_TYPE)), StandardCharsets.UTF_8); +// return PlanFactory.create(query, DOC_NS); +// } +// +// private SPARQLExtStreamManager initializeStreamManager() { +// File documentationDir = new File(CreateDocumentation.class.getClassLoader().getResource("documentation").getFile()); +// Path dirPath = Paths.get(documentationDir.toURI()); +// LocatorFileAccept locator = new LocatorFileAccept(documentationDir.toURI().getPath()); +// LocationMapperAccept mapper = new LocationMapperAccept(); +// SPARQLExtStreamManager sm = SPARQLExtStreamManager.makeStreamManager(locator); +// sm.setLocationMapper(mapper); +// try { +// Files.walk(dirPath).filter((p) -> { +// return p.toFile().isFile(); +// }).forEach((p) -> { +// String relativePath = dirPath.relativize(p).toString(); +// String fileurl = DOC_NS + relativePath.replace("\\", "/"); +// mapper.addAltEntry(fileurl, p.toString()); +// }); +// } catch (IOException ex) { +// failure("Error while computing the URIs for the files in the working directory.", ex); +// } +// return sm; +// } +// +// +//// static final Logger LOG = LoggerFactory.getLogger(Documentation.class); +//// +//// static final Gson gson = new Gson(); +//// +//// static final Pattern REGEX_REPO_PATTERN = Pattern.compile("^saref(-core|4[a-z][a-z][a-z][a-z])$", Pattern.CASE_INSENSITIVE); +//// +//// static final File GIT_DIR = new File("target/sources"); +//// static final File SITE_DIR = new File("target/site"); +//// static final File STATIC_TARGET_DIR = new File("target/site/static"); +//// static final String DATASET_DIR = "target/tdb"; +//// +//// static final String NS = "https://saref.etsi.org/"; +//// private static final String REGEX_EXT = "(?core|saref4[a-z][a-z][a-z][a-z])"; +//// private static final String REGEX_VERSION = "v(?[1-9][0-9]*)\\.(?[0-9]+)\\.(?[0-9]+)"; +//// +//// private static final String REGEX_RELEASE_BRANCH = "^refs/remotes/origin/release-" + REGEX_VERSION + "$"; +//// private static final Pattern REGEX_RELEASE_BRANCH_PATTERN = Pattern.compile(REGEX_RELEASE_BRANCH); +//// +//// private static final String REGEX_TERM_URI = "^" + NS + REGEX_EXT + "/(?[^/]+)$"; +//// private static final Pattern REGEX_TERM_PATTERN = Pattern.compile(REGEX_TERM_URI); +//// +//// private static final String REGEX_ONTO_URI = "^" + NS + REGEX_EXT + "/" + REGEX_VERSION + "/$"; +//// private static final Pattern REGEX_ONTO_PATTERN = Pattern.compile(REGEX_ONTO_URI); +//// +//// private static final String REGEX_EXT_URI = "^" + NS + REGEX_EXT + "/"; +//// private static final Pattern REGEX_EXT_PATTERN = Pattern.compile(REGEX_EXT_URI); +//// +//// private static Dataset dataset; +//// private static final Model config = ModelFactory.createDefaultModel(); +//// private static final Map ngs = new HashMap<>(); +//// +//// private static final Resource ETSI_URL = ResourceFactory.createResource("https://www.etsi.org/"); +//// private static final Resource ETSI_LICENSE = ResourceFactory +//// .createResource("https://forge.etsi.org/etsi-software-license"); +//// +//// private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd"); +//// +//// static final String DOC_NS = NS + "documentation/"; +//// private static final String TERM_QUERY = "term/main.rqg"; +//// private static final Var VAR_GRAPH = VarUtils.allocVar("term"); +//// +//// static final String urlString = "https://forge.etsi.org/rep/api/v4/groups/saref"; +//// +//// static final boolean useTDB = true; +//// +//// +//// +//// public static void main(String[] args) throws MalformedURLException, IOException { +//// // do we have a directory named ontology here ? if not, we assume that we are executing from a repo. +//// File ontologyDir = new File("ontology"); +//// +//// +//// +//// dataset = TDBFactory.createDataset(DATASET_DIR); +////// Model m = dataset.getNamedModel("https://saref.etsi.org/core/accomplishes"); +////// m.write(System.out, "TTL"); +//// try { +//// checkFolderStructure(); +//// +//// checkUnusedTerms(); +//// +//// checkNameConflicts(); +//// +//// Group group = readGroup(); +//// +//// cloneRepos(group); +//// +////// Group group = new Group(); +////// List projects = new ArrayList<>(); +////// group.projects = projects; +////// +////// Project repo = new Project(); +////// repo.name = "saref-core"; +////// repo.directory = new File(GIT_DIR, repo.name); +////// repo.namespace = getNamespace(repo); +////// repo.prefix = getPrefix(repo); +////// projects.add(repo); +////// +////// repo = new Project(); +////// repo.name = "saref4ener"; +////// repo.directory = new File(GIT_DIR, repo.name); +////// repo.namespace = getNamespace(repo); +////// repo.prefix = getPrefix(repo); +////// projects.add(repo); +//// +//// readRepos(group); +//// +//// writeToTDBDataset(); +//// +// +//// +//// SPARQLExtStreamManager sm = initializeStreamManager(); +//// +//// RootPlan planForTerm = createPlanForTerm(sm); +//// +//// generateFiles(sm, planForTerm); +//// } finally { +//// dataset.close(); +//// } +//// +//// } +//// +//// +//// +//// private static void checkFolderStructure() { +//// +//// } +//// +//// private static void checkUnusedTerms() { +//// /* +//// * Query to find terms that are not defined in any ontology: +//// * +//// * PREFIX ex: PREFIX rdfs: +//// * SELECT ?term ?version WHERE { +//// * +//// * ?term ex:isUsedBy ?version . MINUS { ?term rdfs:isDefinedBy ?v2 . } } +//// * +//// */ +//// } +//// +//// private static void checkNameConflicts() { +//// /* +//// * Query to find local name conflicts in different ontologies +//// * +//// * PREFIX ex: PREFIX rdfs: +//// * SELECT DISTINCT * WHERE { +//// * +//// * ?term1 ex:localName ?name ; rdfs:isDefinedBy ?v1 . ?term2 ex:localName ?name +//// * ; rdfs:isDefinedBy ?v2 . FILTER( ?term1 != ?term2 ) } +//// * +//// */ +//// } +//// +//// +//// private static Group readGroup() throws IOException { +//// final URL url = new URL(urlString); +//// InputStreamReader reader = new InputStreamReader(url.openStream()); +//// return gson.fromJson(reader, Group.class); +//// } +//// +//// private static void writeToTDBDataset() { +//// dataset.begin(ReadWrite.WRITE); +//// dataset.addNamedModel(EX.config, config); +//// for (String name : ngs.keySet()) { +//// dataset.addNamedModel(name, ngs.get(name)); +//// } +//// +//// Model defaultModel = dataset.getDefaultModel(); +//// defaultModel.add(ResourceFactory.createResource("s"), ResourceFactory.createProperty("p"), ResourceFactory.createResource("o")); +//// dataset.commit(); +//// } +//// +//// private static void readRepos(Group group) throws IOException { +//// for (Project repo : group.projects) { +//// try (Git git = Git.open(repo.directory);) { +//// readRepo(repo, git); +//// } +//// } +//// } +//// +//// static String getNamespace(String graphName) { +//// Matcher m = REGEX_EXT_PATTERN.matcher(graphName); +//// if (!m.find()) { +//// throw new IllegalArgumentException("got " + graphName); +//// } +//// return NS + m.group("ext") + "/"; +//// } +//// +//// static String getOntologyFileName(Project project) { +//// if (project.name.equals("saref-core")) { +//// return "saref.ttl"; +//// } else { +//// return String.format("%s.ttl", project.name); +//// } +//// } +//// +//// static String getNamespace(Project project) { +//// if (project.name.equals("saref-core")) { +//// return String.format("%score/", NS); +//// } else { +//// return String.format("%s%s/", NS, project.name); +//// } +//// } +//// +//// static String getPrefix(String graphName) { +//// Matcher m = REGEX_EXT_PATTERN.matcher(graphName); +//// if (!m.find()) { +//// throw new IllegalArgumentException("got " + graphName); +//// } +//// String ext = m.group("ext"); +//// if(ext.equals("core")) { +//// return "saref:"; +//// } else { +//// String shortName = ext.substring(5); +//// return String.format("s%s:", shortName); +//// } +//// } +//// +//// static String getPrefix(Project project) { +//// if (project.name.equals("saref-core")) { +//// return "saref:"; +//// } else { +//// String shortName = project.name.substring(5); +//// return String.format("s%s:", shortName); +//// } +//// } +//// +//// static String getVersionURI(Project project, Version version) { +//// if (project.name.equals("saref-core")) { +//// return String.format("%s%sv%s.%s.%s/", NS, "core/", version.major, version.minor, version.patch); +//// } else { +//// return String.format("%s%s/v%s.%s.%s/", NS, project.name, version.major, version.minor, version.patch); +//// } +//// } +//// +//// static String getVersionPrefix(Project project, Version version) { +//// if (project.name.equals("saref-core")) { +//// return String.format("saref-%s.%s.%s:", version.major, version.minor, version.patch); +//// } else { +//// String shortName = project.name.substring(5); +//// return String.format("s%s-%s.%s.%s:", shortName, version.major, version.minor, version.patch); +//// } +//// } +//// +//// private static void cloneRepos(Group group) throws IOException { +//// FileUtils.forceMkdir(GIT_DIR); +//// for (Project repo : new ArrayList<>(group.projects)) { +//// if (!REGEX_REPO_PATTERN.matcher(repo.name).matches()) { +//// group.projects.remove(repo); +//// continue; +//// } +//// repo.directory = new File(GIT_DIR, repo.name); +//// repo.namespace = getNamespace(repo); +//// repo.prefix = getPrefix(repo); +//// if (repo.directory.exists()) { +//// continue; +//// } +//// LOG.debug("Cloning project " + repo.name); +//// try (Git git = Git.cloneRepository().setURI(repo.http_url_to_repo).setDirectory(repo.directory).call()) { +//// } catch (Exception ex) { +//// LOG.warn("Could not clone project " + repo.name, ex); +//// } +//// } +//// } +//// +//// private static void readRepo(Project repo, Git git) { +//// System.out.println("project" + repo.name); +//// +//// repo.resource = ResourceFactory.createResource(repo.namespace); +//// config.add(repo.resource, RDF.type, OWL2.Ontology); +//// +//// try { +//// List remoteBranches = git.branchList().setListMode(ListBranchCommand.ListMode.REMOTE).call(); +//// for (Ref ref : remoteBranches) { +//// String branch = ref.getName(); +//// Matcher m = REGEX_RELEASE_BRANCH_PATTERN.matcher(branch); +//// if (!m.find()) { +//// continue; +//// } +//// Version version = new Version(); +//// version.major = Integer.parseInt(m.group("major")); +//// version.minor = Integer.parseInt(m.group("minor")); +//// version.patch = Integer.parseInt(m.group("patch")); +//// version.uri = getVersionURI(repo, version); +//// version.resource = ResourceFactory.createResource(version.uri); +//// version.prefix = getVersionPrefix(repo, version); +//// version.ref = ref; +//// RevCommit commit = git.log().add(version.ref.getObjectId()).call().iterator().next(); +//// version.issued = commit.getCommitterIdent().getWhen(); +//// repo.releases.add(version); +//// System.out.println("version " + version); +//// } +//// +//// // order versions +//// Collections.sort(repo.releases, (Version o1, Version o2) -> { +//// if (o1.major - o2.major != 0) { +//// return o1.major - o2.major; +//// } +//// if (o1.minor - o2.minor != 0) { +//// return o1.minor - o2.minor; +//// } +//// return o1.patch - o2.patch; +//// }); +//// +//// for (int i = 0; i < repo.releases.size(); i++) { +//// Version version = repo.releases.get(i); +//// +//// Model onto = readOntology(repo, version); +//// ngs.put(version.uri, onto); +//// +//// // compute some metadata +//// Version priorVersion = i == 0 ? null : repo.releases.get(i - 1); +//// addOntologyMetadata(onto, repo, git, version, priorVersion); +//// +//// // for each term, add: +//// // some triples in the default graph +//// // -> used to detect naming clashes +//// // some triples in the named graph of the term +//// // -> used to display the page of the term +//// Set definedTerms = new HashSet<>(); +//// Set usedTerms = new HashSet<>(); +//// computeTerms(onto, repo, version, definedTerms, usedTerms); +//// config.add(repo.resource, EX.hasVersion, version.resource); +//// config.add(version.resource, RDF.type, EX.OntologyVersion); +//// config.add(version.resource, EX.versionInfo, +//// String.format("v%s.%s.%s", version.major, version.minor, version.patch)); +//// if (priorVersion != null) { +//// config.add(version.resource, EX.priorVersion, priorVersion.resource); +//// } +//// +//// for (Resource t : definedTerms) { +//// Model termModel = getNamedModel(t); +//// String localName = t.getURI().substring(repo.namespace.length()); +//// termModel.add(t, RDFS.isDefinedBy, version.resource); +//// // keep the most recent definition of the term +//// if (i == repo.releases.size() - 1) { +//// termModel.add(t, DCTerms.modified, SIMPLE_DATE_FORMAT.format(version.issued), +//// XSDDatatype.XSDdate); +//// try (QueryExecution exec = QueryExecutionFactory.create("DESCRIBE <" + t.getURI() + ">", +//// onto)) { +//// termModel.add(exec.execDescribe()); +//// } +//// } +//// config.add(t, RDF.type, RDFP.Resource); +//// config.add(t, EX.localName, localName); +//// config.add(t, RDFS.isDefinedBy, version.resource); +//// } +//// +//// for (Resource t : usedTerms) { +//// Model termModel = getNamedModel(t); +//// termModel.add(t, EX.isUsedBy, version.resource); +//// config.add(t, RDF.type, RDFP.Resource); +//// config.add(t, EX.isUsedBy, version.resource); +//// } +//// } +//// } catch (Exception ex) { +//// LOG.warn("Exception for " + repo.name, ex); +//// } +//// } +//// +//// private static Model readOntology(Project repo, Version version) throws Exception { +//// // checkout the HEAD of the release branch +//// Git.open(repo.directory).checkout().setName(version.ref.getName()).call(); +//// Model onto = ModelFactory.createDefaultModel(); +//// // read the ontology +//// File ontologyFile = new File(repo.directory, "ontology/" + getOntologyFileName(repo)); +//// try (FileReader fr = new FileReader(ontologyFile)) { +//// onto.read(fr, NS, "TTL"); +//// } +//// return onto; +//// } +//// +//// private static void addOntologyMetadata(Model onto, Project repo, Git git, Version version, Version priorVersion) +//// throws Exception { +//// onto.add(repo.resource, RDF.type, OWL2.Ontology); +//// +//// // we assume dc:title, dc:description, rdfs:comment, are defined, and have +//// // language tags +//// onto.removeAll(repo.resource, OWL2.versionIRI, null); +//// onto.add(repo.resource, OWL2.versionIRI, version.resource); +//// +//// onto.removeAll(repo.resource, OWL2.versionInfo, null); +//// onto.add(repo.resource, OWL2.versionInfo, +//// String.format("v%s.%s.%s", version.major, version.minor, version.patch)); +//// +//// onto.removeAll(repo.resource, OWL2.priorVersion, null); +//// if (priorVersion != null) { +//// onto.add(repo.resource, OWL2.priorVersion, priorVersion.resource); +//// } +//// +//// onto.removeAll(repo.resource, DCTerms.publisher, null); +//// onto.add(repo.resource, DCTerms.publisher, ETSI_URL); +//// +//// onto.removeAll(repo.resource, DCTerms.license, null); +//// onto.add(repo.resource, DCTerms.license, ETSI_LICENSE); +//// +//// // list of contributors +//// Set contributors = new HashSet<>(); +//// for (Iterator it = git.log().add(version.ref.getObjectId()).call().iterator(); it.hasNext();) { +//// RevCommit commit = it.next(); +//// Contributor contributor = new Contributor(commit.getCommitterIdent()); +//// contributors.add(contributor); +//// } +//// for (Contributor contributor : contributors) { +//// // git can only provide name + email. +//// // would need to maintain some list of mapping email -> url in the future. +//// Resource anon = onto.createResource(); +//// onto.add(repo.resource, DCTerms.contributor, anon); +//// onto.add(anon, FOAF.name, contributor.getName()); +//// onto.add(anon, FOAF.mbox, contributor.getEmailAddress()); +//// } +//// +//// // prefixes +//// onto.removeAll(repo.resource, VANN.preferredNamespacePrefix, null); +//// onto.removeAll(repo.resource, VANN.preferredNamespaceUri, null); +//// onto.add(repo.resource, VANN.preferredNamespacePrefix, repo.prefix); +//// onto.add(repo.resource, VANN.preferredNamespaceUri, repo.namespace, XSDDatatype.XSDanyURI); +//// +//// // issued +//// onto.removeAll(repo.resource, DCTerms.issued, null); +//// onto.add(repo.resource, DCTerms.issued, SIMPLE_DATE_FORMAT.format(version.issued), XSDDatatype.XSDdate); +//// } +//// +//// private static void computeTerms(Model onto, Project repo, Version version, Set definedTerms, +//// Set usedTerms) { +//// onto.listStatements().forEachRemaining(stmt -> { +//// Resource s = stmt.getSubject(); +//// Resource p = stmt.getPredicate(); +//// Resource o = stmt.getObject().isResource() ? (Resource) stmt.getObject() : null; +//// computeTerms(s, repo, version, definedTerms, usedTerms); +//// computeTerms(p, repo, version, definedTerms, usedTerms); +//// computeTerms(o, repo, version, definedTerms, usedTerms); +//// }); +//// } +//// +//// private static void computeTerms(Resource t, Project repo, Version version, Set definedTerms, +//// Set usedTerms) { +//// if (t == null || !t.isURIResource() || !t.getURI().startsWith(NS) || t.getURI().endsWith("/")) { +//// return; +//// } +//// String uri = t.getURI(); +//// if (uri.startsWith(repo.namespace)) { +//// definedTerms.add(t); +//// } else if (!uri.startsWith(repo.namespace)) { +//// usedTerms.add(t); +//// } +//// } +//// +//// private static Model getNamedModel(Resource t) { +//// String uri = t.getURI(); +//// if (ngs.containsKey(uri)) { +//// return ngs.get(uri); +//// } else { +//// Model model = ModelFactory.createDefaultModel(); +//// model.add(t, RDF.type, RDFS.Resource); +//// ngs.put(uri, model); +//// return model; +//// } +//// } +//// +//// private static class EX { +//// +//// private static String NS = "http://example.org/"; +//// private static String config = NS + "config"; +//// private static Property localName = ResourceFactory.createProperty(NS, "localName"); +//// private static Property hasVersion = ResourceFactory.createProperty(NS, "hasVersion"); +//// private static Property versionInfo = ResourceFactory.createProperty(NS, "versionInfo"); +//// private static Property priorVersion = ResourceFactory.createProperty(NS, "priorVersion"); +//// private static Property isUsedBy = ResourceFactory.createProperty(NS, "isUsedBy"); +//// private static Resource OntologyVersion = ResourceFactory.createResource(NS + "OntologyVersion"); +//// } +//// +//// private static class VANN { +//// +//// private static String NS = "http://purl.org/vocab/vann/"; +//// private static Property preferredNamespacePrefix = ResourceFactory.createProperty(NS, +//// "preferredNamespacePrefix"); +//// private static Property preferredNamespaceUri = ResourceFactory.createProperty(NS, "preferredNamespaceUri"); +//// } +//// +//// private static class RDFP { +//// public static final String NS = "https://w3id.org/rdfp/"; +//// public static final Property presentedBy = ResourceFactory.createProperty(NS + "presentedBy"); +//// public static final Property presentationFor = ResourceFactory.createProperty(NS + "presentationForq"); +//// public static final Property loweringRule = ResourceFactory.createProperty(NS + "loweringRule"); +//// public static final Property liftingRule = ResourceFactory.createProperty(NS + "liftingRule"); +//// public static final Property validationRule = ResourceFactory.createProperty(NS + "validationRule"); +//// public static final Resource Graph = ResourceFactory.createResource(NS + "Graph"); +//// public static final Resource Resource = ResourceFactory.createResource(NS + "Resource"); +//// public static final Property representedBy = ResourceFactory.createProperty(NS + "representedBy"); +//// public static final Property mediaType = ResourceFactory.createProperty(NS + "mediaType"); +//// public static final Property alias = ResourceFactory.createProperty(NS + "alias"); +//// public static final Property describedBy = ResourceFactory.createProperty(NS + "describedBy"); +//// public static final Property filePath = ResourceFactory.createProperty(NS + "filePath"); +//// public static final Resource Ontology = ResourceFactory.createResource(NS + "Ontology"); +//// public static final Property fileSelector = ResourceFactory.createProperty(NS + "fileSelector"); +//// } +// +//} diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/JobRunner.java b/src/main/java/fr/emse/gitlab/saref/jobs/JobRunner.java new file mode 100644 index 0000000000000000000000000000000000000000..b008e280138bee45349b3b0841b69349ba89a2bf --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/JobRunner.java @@ -0,0 +1,9 @@ +package fr.emse.gitlab.saref.jobs; + +import fr.emse.gitlab.saref.entities.tests.TestSuites; + +public interface JobRunner { + + T doJob(TestSuites testSuites); + +} diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/ReadRepositories.java b/src/main/java/fr/emse/gitlab/saref/jobs/ReadRepositories.java new file mode 100644 index 0000000000000000000000000000000000000000..b19839917e281562f033517592cb5bcdd21b14b3 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/ReadRepositories.java @@ -0,0 +1,254 @@ +package fr.emse.gitlab.saref.jobs; + +import java.io.Console; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.text.StringSubstitutor; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.ListBranchCommand; +import org.eclipse.jgit.lib.Ref; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.transport.CredentialsProvider; +import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; + +import fr.emse.gitlab.saref.Constants; +import fr.emse.gitlab.saref.entities.git.BranchVersion; +import fr.emse.gitlab.saref.entities.git.MasterVersion; +import fr.emse.gitlab.saref.entities.git.ReleaseVersion; +import fr.emse.gitlab.saref.entities.git.Repositories; +import fr.emse.gitlab.saref.entities.git.Repository; +import fr.emse.gitlab.saref.entities.git.Version; + +public class ReadRepositories extends AbstractJobRunner { + + static final Logger LOG = LoggerFactory.getLogger(ReadRepositories.class); + + static final String REGEX_REPO_STRING = "^saref(-core|4[a-z]{4})$"; + static final Pattern REGEX_REPO_PATTERN = Pattern.compile(REGEX_REPO_STRING, Pattern.CASE_INSENSITIVE); + + public ReadRepositories(File dir) { + super("Read and clone repositories in \".saref-repositories.yml\"", dir); + } + + /* + * Needs to work the same way: - on the computer of a knowledge engineer - when + * I trigger a CI job. see + * https://docs.gitlab.com/ee/user/project/new_ci_build_permissions_model.html + * + * by default, on the machine that will trigger the job, if it is gitlab, we may + * use username gitlab-ci-token and password $CI_JOB_TOKEN --> that is, if the + * environment variable GITLAB_CI is set. + * + * + * + * but on the other machines, we need to provide a username and password. This + * is especially true if we want to use repositories from different platforms + * (gitlab of emse, gitlab of etsi, github, ...) Environment variables are + * substituted in these values. + * + * If they are not set, then we may prompt the user. If the credentials are not + * set, then by default we use gitlab-ci-token and password $CI_JOB_TOKEN + * + */ + @Override + protected Repositories getValue() { + final Repositories repositories = new Repositories(); + final File file = new File(directory, ".saref-repositories.yml"); + YAMLRepos repos; + try { + ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); + repos = mapper.readValue(file, YAMLRepos.class); + } catch (Exception ex) { + error("Error while reading the list of repositories \".saref-repositories.yml\"", ex); + return repositories; + } + File repoDir = new File(directory, Constants.GIT_DIR); + try { + FileUtils.forceDelete(repoDir); + FileUtils.forceMkdir(repoDir); + } catch (IOException ex) { + error(String.format("Error while creating repository directory %s", repoDir.getPath()), ex); + return repositories; + } + for (YAMLRepos.Entry entry : repos.entrySet()) { + String hostName = entry.getKey(); + YAMLHost host = entry.getValue(); + CredentialsProvider crendentialsProvider = getCredentialsProvider(hostName, host.credentials); + for (String repo : host.repos) { + String name = repo.substring(repo.lastIndexOf("/") + 1); + if (!REGEX_REPO_PATTERN.matcher(name).matches()) { + failure(String.format("The project name shall match the regular expression \\%s\\. got: %s", + REGEX_REPO_STRING, name)); + continue; + } + File directory = new File(repoDir, name); + String http_url_to_repo = String.format("https://%s/%s.git", hostName, repo); + try (Git git = Git.cloneRepository().setCredentialsProvider(crendentialsProvider) + .setURI(http_url_to_repo).setDirectory(directory).call()) { + LOG.info(String.format("Cloned repository %s to %s", http_url_to_repo, name)); + List versions = readRepository(git, name, directory); + Repository repository = new Repository(name, directory, http_url_to_repo, versions); + repositories.add(repository); + } catch (Exception ex) { + failure(String.format("Failed to clone repository %s to %s", http_url_to_repo, name), ex); + continue; + } + } + } + return repositories; + } + + private CredentialsProvider getCredentialsProvider(String hostName, YAMLCredentials credentials) { + final String username; + final char[] password; + final Console console = System.console(); + if (credentials != null && credentials.username != null) { + username = StringSubstitutor.replace(credentials.username, System.getenv()); + } else { + if (console != null) { + username = console + .readLine(String.format("Please enter your EOL account username for %s:\n", hostName)); + } else if (System.getenv("GITLAB_CI") != null) { + LOG.info("using username 'gitlab-ci-token'"); + username = "gitlab-ci-token"; + } else { + LOG.warn("using empty username"); + username = ""; + } + } + if (credentials != null && credentials.password != null) { + password = StringSubstitutor.replace(credentials.password, System.getenv()).toCharArray(); + } else { + if (console != null) { + password = console + .readPassword(String.format("Please enter your EOL account password for %s:\n", hostName)); + } else if (System.getenv("GITLAB_CI") != null) { + password = System.getenv("CI_JOB_TOKEN").toCharArray(); + } else { + LOG.warn("using empty password"); + password = "".toCharArray(); + } + } + return new UsernamePasswordCredentialsProvider(username, password); + } + + private List readRepository(Git git, String name, File directory) throws Exception { + List versions = new ArrayList<>(); + List remoteBranches = git.branchList().setListMode(ListBranchCommand.ListMode.REMOTE).call(); + for (Ref ref : remoteBranches) { + Version v = getVersion(git, name, ref); + if (v != null) { + versions.add(v); + } + } + Collections.sort(versions, new Comparator() { + @Override + public int compare(Version o1, Version o2) { + if (o1.equals(o2)) { + return 0; + } + if (o1.getName() != o2.getName()) { + return o1.getName().compareTo(o2.getName()); + } + if (o1 instanceof MasterVersion && o2 instanceof MasterVersion) { + return 0; + } + if (o1 instanceof MasterVersion) { + return 1; + } + if (o2 instanceof MasterVersion) { + return -1; + } + if (o1 instanceof ReleaseVersion && o2 instanceof ReleaseVersion) { + ReleaseVersion r1 = (ReleaseVersion) o1; + ReleaseVersion r2 = (ReleaseVersion) o2; + if (r1.getMajor() - r2.getMajor() != 0) { + return r1.getMajor() - r2.getMajor(); + } + if (r1.getMinor() - r2.getMinor() != 0) { + return r1.getMinor() - r2.getMinor(); + } + return r1.getPatch() - r2.getPatch(); + } + if (o1 instanceof ReleaseVersion) { + return 1; + } + if (o2 instanceof ReleaseVersion) { + return -1; + } + return 0; + } + }); + return versions; + } + + private Version getVersion(Git git, String name, Ref ref) throws Exception { + String branch = ref.getName(); + RevCommit commit = git.log().add(ref.getObjectId()).call().iterator().next(); + Date issued = commit.getCommitterIdent().getWhen(); + if (Constants.INCLUDE_MASTER && branch.equals(Constants.REGEX_MASTER_BRANCH)) { + return new MasterVersion(name, ref, issued); + } + Matcher m = Constants.REGEX_RELEASE_BRANCH_PATTERN.matcher(branch); + if (m.find()) { + int major = Integer.parseInt(m.group("major")); + int minor = Integer.parseInt(m.group("minor")); + int patch = Integer.parseInt(m.group("patch")); + return new ReleaseVersion(name, ref, issued, major, minor, patch); + } + if (Constants.INCLUDE_ALL) { + m = Constants.REGEX_OTHER_BRANCH_PATTERN.matcher(branch); + if (m.find()) { + String branchName = m.group("name"); + return new BranchVersion(name, ref, issued, branchName); + } + } + return null; + } + + private static class YAMLRepos extends HashMap { + private static final long serialVersionUID = 3434677850580166200L; + } + + private static class YAMLHost { + private YAMLCredentials credentials; + private List repos; + + @JsonCreator + public YAMLHost(@JsonProperty(value = "credentials", required = false) YAMLCredentials credentials, + @JsonProperty(value = "repos", required = false) List repos) { + this.credentials = credentials; + this.repos = repos; + } + } + + private static class YAMLCredentials { + private String username; + private String password; + + @JsonCreator + public YAMLCredentials(@JsonProperty(value = "username", required = false) String username, + @JsonProperty(value = "password", required = false) String password) { + this.username = username; + this.password = password; + } + } + +} diff --git a/src/main/java/fr/emse/gitlab/saref/jobs/WriteRDFFiles.java b/src/main/java/fr/emse/gitlab/saref/jobs/WriteRDFFiles.java new file mode 100644 index 0000000000000000000000000000000000000000..25447a76ccf541521d3600f714e7a17bb1c52d6d --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/jobs/WriteRDFFiles.java @@ -0,0 +1,70 @@ +package fr.emse.gitlab.saref.jobs; + +import java.io.File; +import java.io.FileOutputStream; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; + +import org.apache.commons.io.FileUtils; +import org.apache.jena.query.Dataset; +import org.apache.jena.query.ReadWrite; +import org.apache.jena.rdf.model.Model; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; +import org.semanticweb.owlapi.util.SimpleIRIMapper; + +import fr.emse.gitlab.saref.Constants; +import fr.emse.gitlab.saref.utils.Languages; +import fr.emse.gitlab.saref.vocabs.EX; + + +public class WriteRDFFiles extends AbstractJobRunner> { + + private final Dataset dataset; + + public WriteRDFFiles(File directory, Dataset dataset) { + super("Write RDF files", directory); + this.dataset = dataset; + } + + @Override + public Set getValue() { + File siteDir = new File(directory, Constants.SITE_DIR); + Set mappers = new HashSet<>(); + + dataset.begin(ReadWrite.READ); + for (Iterator it = dataset.listNames(); it.hasNext();) { + String name = it.next(); + if (name.equals(EX.config)) { + continue; + } + Model model = dataset.getNamedModel(name); + String fileName = name.substring(Constants.BASE.length()); + if (fileName.endsWith("/")) { + // is ontology + fileName = fileName + fileName.substring(0, fileName.indexOf("/")); + // register IRIMapper + File ttlFile = new File(siteDir, fileName + ".ttl"); + OWLOntologyIRIMapper mapper = new SimpleIRIMapper( + IRI.create(name), + IRI.create(ttlFile)); + mappers.add(mapper); + } + try { + FileUtils.forceMkdir(new File(siteDir, fileName).getParentFile()); + for (Languages l : Languages.values()) { + File file = new File(siteDir, String.format("%s.%s", fileName, l.getExt())); + try (FileOutputStream fos = new FileOutputStream(file)) { + model.write(fos, l.getLang()); + } + } + } catch(Exception ex) { + failure(String.format("Failed to write the model for %s", fileName), ex); + } + } + dataset.end(); + + return mappers; + } +} diff --git a/saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/utils/Languages.java b/src/main/java/fr/emse/gitlab/saref/utils/Languages.java similarity index 100% rename from saref-pipeline-cli/src/main/java/fr/emse/gitlab/saref/utils/Languages.java rename to src/main/java/fr/emse/gitlab/saref/utils/Languages.java diff --git a/src/main/java/fr/emse/gitlab/saref/vocabs/EX.java b/src/main/java/fr/emse/gitlab/saref/vocabs/EX.java new file mode 100644 index 0000000000000000000000000000000000000000..e70f4569b2e44ba5f5a4c5c7d0ef5fc2129e56b2 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/vocabs/EX.java @@ -0,0 +1,17 @@ +package fr.emse.gitlab.saref.vocabs; + +import org.apache.jena.rdf.model.Property; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; + +public class EX { + + public static final String NS = "http://example.org/"; + public static final String config = NS + "config"; + public static final Property localName = ResourceFactory.createProperty(NS, "localName"); + public static final Property hasVersion = ResourceFactory.createProperty(NS, "hasVersion"); + public static final Property versionInfo = ResourceFactory.createProperty(NS, "versionInfo"); + public static final Property priorVersion = ResourceFactory.createProperty(NS, "priorVersion"); + public static final Property isUsedBy = ResourceFactory.createProperty(NS, "isUsedBy"); + public static final Resource OntologyVersion = ResourceFactory.createResource(NS + "OntologyVersion"); +} diff --git a/src/main/java/fr/emse/gitlab/saref/vocabs/RDFP.java b/src/main/java/fr/emse/gitlab/saref/vocabs/RDFP.java new file mode 100644 index 0000000000000000000000000000000000000000..f49a8c31c231b992746ee9ae5c9b334aced7aa02 --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/vocabs/RDFP.java @@ -0,0 +1,23 @@ +package fr.emse.gitlab.saref.vocabs; + +import org.apache.jena.rdf.model.Property; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; + +public class RDFP { + public static final String NS = "https://w3id.org/rdfp/"; + public static final Property presentedBy = ResourceFactory.createProperty(NS + "presentedBy"); + public static final Property presentationFor = ResourceFactory.createProperty(NS + "presentationForq"); + public static final Property loweringRule = ResourceFactory.createProperty(NS + "loweringRule"); + public static final Property liftingRule = ResourceFactory.createProperty(NS + "liftingRule"); + public static final Property validationRule = ResourceFactory.createProperty(NS + "validationRule"); + public static final Resource Graph = ResourceFactory.createResource(NS + "Graph"); + public static final Resource Resource = ResourceFactory.createResource(NS + "Resource"); + public static final Property representedBy = ResourceFactory.createProperty(NS + "representedBy"); + public static final Property mediaType = ResourceFactory.createProperty(NS + "mediaType"); + public static final Property alias = ResourceFactory.createProperty(NS + "alias"); + public static final Property describedBy = ResourceFactory.createProperty(NS + "describedBy"); + public static final Property filePath = ResourceFactory.createProperty(NS + "filePath"); + public static final Resource Ontology = ResourceFactory.createResource(NS + "Ontology"); + public static final Property fileSelector = ResourceFactory.createProperty(NS + "fileSelector"); +} \ No newline at end of file diff --git a/src/main/java/fr/emse/gitlab/saref/vocabs/VANN.java b/src/main/java/fr/emse/gitlab/saref/vocabs/VANN.java new file mode 100644 index 0000000000000000000000000000000000000000..dd34a9ad019256a2ba056988d4d90129e446b3ce --- /dev/null +++ b/src/main/java/fr/emse/gitlab/saref/vocabs/VANN.java @@ -0,0 +1,12 @@ +package fr.emse.gitlab.saref.vocabs; + +import org.apache.jena.rdf.model.Property; +import org.apache.jena.rdf.model.ResourceFactory; + +public class VANN { + + public static final String NS = "http://purl.org/vocab/vann/"; + public static final Property preferredNamespacePrefix = ResourceFactory.createProperty(NS, + "preferredNamespacePrefix"); + public static final Property preferredNamespaceUri = ResourceFactory.createProperty(NS, "preferredNamespaceUri"); +} \ No newline at end of file diff --git a/saref-pipeline-cli/src/main/resources/LICENSE_MODEL b/src/main/resources/LICENSE_MODEL similarity index 100% rename from saref-pipeline-cli/src/main/resources/LICENSE_MODEL rename to src/main/resources/LICENSE_MODEL diff --git a/saref-pipeline-cli/src/main/resources/log4j.properties b/src/main/resources/log4j.properties similarity index 73% rename from saref-pipeline-cli/src/main/resources/log4j.properties rename to src/main/resources/log4j.properties index e6f91fbd373d6bfea2b3f9b0041a7f2edabba763..255b25cdea37aff358c0daae554e57c8cbe01ab5 100644 --- a/saref-pipeline-cli/src/main/resources/log4j.properties +++ b/src/main/resources/log4j.properties @@ -4,6 +4,10 @@ log4j.logger.org.eclipse.jetty=WARN log4j.logger.org.apache.jena.riot.system.stream=WARN log4j.logger.org.apache.jena.util=WARN log4j.logger.org.apache.http=WARN +log4j.logger.uk.ac.manchester.cs.owlapi=WARN +log4j.logger.org.semanticweb.owlapi=WARN +log4j.logger.org.eclipse.rdf4j=WARN +log4j.logger.org.eclipse.jgit=WARN log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.Target=System.out diff --git a/src/main/resources/sarefShape.ttl b/src/main/resources/sarefShape.ttl new file mode 100644 index 0000000000000000000000000000000000000000..82942936594cc05dd6ee585e79c67b9d0e6e4b4e --- /dev/null +++ b/src/main/resources/sarefShape.ttl @@ -0,0 +1,297 @@ +@prefix dash: . +@prefix owl: . +@prefix rdf: . +@prefix rdfs: . +@prefix sh: . +@prefix xsd: . +@prefix dcterms: . +@prefix vann: . +@prefix sarefsh: . +@prefix schema: . + +sarefsh:OntologyShape + a sh:NodeShape ; + sh:targetClass owl:Ontology ; + sh:closed true ; + sh:property [ + sh:path rdf:type ; + sh:minCount 1 ; + sh:hasValue owl:Ontology ; + ] ; + sh:property [ + sh:path owl:versionInfo ; + sh:maxCount 0 ; + sh:severity sh:Warning ; + sh:message "The annotation owl:versionInfo shall not be set in the source file, as it will be computed automatically." + ] ; + sh:property [ + sh:path owl:versionIRI ; + sh:maxCount 0 ; + sh:severity sh:Warning ; + sh:message "The annotation owl:versionIRI shall not be set in the source file, as it will be computed automatically." + ] ; + sh:property [ + sh:path owl:priorVersion ; + sh:maxCount 0 ; + sh:severity sh:Warning ; + sh:message "The annotation owl:priorVersion shall not be set in the source file, as it will be computed automatically." + ] ; + sh:property [ + sh:path owl:imports ; + sh:nodeKind sh:IRI ; + sh:pattern "^https://saref.etsi.org/(core|saref4[a-z]{4})/v[1-9][0-9]*\\.[1-9][0-9]*\\.[1-9][0-9]*/$" ; + sh:severity sh:Violation ; + sh:message "Only SAREF ontologies with specific versions shall be imported. Their URIs conform to the regular expression `^https://saref.etsi.org/(core|saref4[a-z]{4})/v[1-9][0-9]*\\.[1-9][0-9]*\\.[1-9][0-9]*/$`" + ] ; + sh:property [ + sh:path dcterms:title ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The ontology should have exactly one dcterms:title." + ] ; + sh:property [ + sh:path dcterms:abstract ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The ontology should have exactly one dcterms:abstract." + ] ; + sh:property [ + sh:path dcterms:description ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The ontology should have exactly one dcterms:description." + ] ; + sh:property [ + sh:path rdfs:comment ; + sh:nodeKind sh:Literal ; + sh:minCount 0 ; + ] ; + sh:property [ + sh:path dcterms:issued ; + sh:datatype xsd:date ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The ontology shall have exactly one dcterms:issued annotation, which shall be a valid xsd:date literal YYYY-MM-DD." + ] ; + sh:property [ + sh:path dcterms:modified ; + sh:datatype xsd:date ; + sh:maxCount 0 ; + sh:severity sh:Warning ; + sh:message "The annotation dcterms:modified shall not be set in the source file, as it will be computed automatically." + ] ; + sh:property [ + sh:path dcterms:source ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:pattern "^http://www.etsi.org/deliver/etsi_ts/" ; + sh:severity sh:Warning ; + sh:message "There shall be exactly one annotation dcterms:source that points to the ETSI Technical Specification URL `http://www.etsi.org/deliver/etsi_ts/...`." + ] ; + sh:property [ + sh:path dcterms:creator ; + sh:maxCount 0 ; + sh:severity sh:Violation ; + sh:message "The ontology shall have no dcterms:creator annotation." + ] ; + sh:property [ + sh:path dcterms:license ; + sh:nodeKind sh:IRI ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:pattern "https://forge.etsi.org/etsi-software-license" ; + sh:severity sh:Violation ; + sh:message "There shall be exactly one dcterms:license annotation, with value ." + ] ; + sh:property [ + sh:path dcterms:contributor ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:minCount 1 ; + sh:severity sh:Violation ; + sh:message "There shall be at least one dcterms:contributor, that shall be a blank node or a IRI." + ] ; + sh:property [ + sh:path dcterms:contributor ; + sh:severity sh:Violation ; + sh:node sarefsh:PersonShape ; + sh:message "Every contributor shall be a schema:Person with schema:givenName, schema:familyName, and schema:affiliation." + ] ; + sh:property [ + sh:path dcterms:publisher ; + sh:maxCount 0 ; + sh:severity sh:Warning ; + sh:message "The annotation dcterms:publisher shall not be set in the source file, as it will be computed automatically." + ] ; + sh:property [ + sh:path vann:preferredNamespacePrefix ; + sh:maxCount 0 ; + sh:severity sh:Warning ; + sh:message "The annotation vann:preferredNamespacePrefix shall not be set in the source file, as it will be computed automatically." + ] ; + sh:property [ + sh:path vann:preferredNamespaceUri ; + sh:maxCount 0 ; + sh:severity sh:Warning ; + sh:message "The annotation vann:preferredNamespaceUri shall not be set in the source file, as it will be computed automatically." + ] . + +sarefsh:PersonShape + a sh:NodeShape ; + sh:targetClass schema:Person ; + sh:property [ + sh:path rdf:type ; + sh:hasValue schema:Person ; + sh:severity sh:Violation ; + sh:message "Each contributor shall be a schema:Person." + ] ; + sh:property [ + sh:path schema:givenName ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "Each contributor shall have exactly one schema:givenName." + ] ; + sh:property [ + sh:path schema:familyName ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "Each contributor shall have exactly one schema:lastName." + ] ; + sh:property [ + sh:path schema:affiliation ; + sh:minCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:node sarefsh:AffiliationShape ; + sh:severity sh:Violation ; + sh:message "Each contributor shall have at least one schema:affiliation, that shall be a blank node or a IRI of type schema:Organization with exactly one schema:name." + ] . + +sarefsh:AffiliationShape + a sh:NodeShape ; + sh:targetClass schema:Organization ; + sh:property [ + sh:path rdf:type ; + sh:hasValue schema:Organization ; + sh:severity sh:Violation ; + sh:message "The affiliation of each contributor shall be a schema:Organization." + ] ; + sh:property [ + sh:path schema:name ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The affiliation of each contributor shall have exactly one schema:name." + ] . + +[] a sh:NodeShape ; + sh:targetClass owl:Class ; + sh:message "The owl:Class should have exactly one rdfs:label and one rdfs:comment" ; + sh:or ( + [ sh:nodeKind sh:BlankNode ] + [ + sh:and ( + [ + sh:path rdfs:label ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:label." + ] + [ + sh:path rdfs:comment ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:comment." + ] + ) + ] + ) . +[] a sh:NodeShape ; + sh:targetClass owl:ObjectProperty ; + sh:message "The owl:ObjectProperty should have exactly one rdfs:label and one rdfs:comment" ; + sh:or ( + [ sh:nodeKind sh:BlankNode ] + [ + sh:and ( + [ + sh:path rdfs:label ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:label." + ] + [ + sh:path rdfs:comment ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:comment." + ] + ) + ] + ) . +[] a sh:NodeShape ; + sh:targetClass owl:DatatypeProperty ; + sh:message "The owl:DatatypeProperty should have exactly one rdfs:label and one rdfs:comment" ; + sh:or ( + [ sh:nodeKind sh:BlankNode ] + [ + sh:and ( + [ + sh:path rdfs:label ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:label." + ] + [ + sh:path rdfs:comment ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:comment." + ] + ) + ] + ) . +[] a sh:NodeShape ; + sh:targetClass owl:NamedIndividual ; + sh:message "The owl:NamedIndividual should have exactly one rdfs:label and one rdfs:comment" ; + sh:or ( + [ sh:nodeKind sh:BlankNode ] + [ + sh:and ( + [ + sh:path rdfs:label ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:label." + ] + [ + sh:path rdfs:comment ; + sh:nodeKind sh:Literal ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:severity sh:Violation ; + sh:message "The term should have exactly one rdfs:comment." + ] + ) + ] + ) . \ No newline at end of file