Newer
Older
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DIRECTORY;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DIRECTORY_DEFAULT;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_HELP;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_IGNORE_EXAMPLES;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_IGNORE_GIT;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_IGNORE_TERMS;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_INCLUDE_MASTER;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_NO_SITE;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_REMOTE_ONLY;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_VERBOSE;
import java.awt.Desktop;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.atlas.web.TypedInputStream;
import org.apache.jena.query.ReadWrite;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.ResIterator;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingHashMap;
import org.apache.jena.sparql.util.Context;
import org.apache.jena.tdb.TDBFactory;
import org.apache.jena.vocabulary.RDF;
import org.apache.log4j.Layout;
import org.apache.log4j.PatternLayout;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRefNameException;
import org.eclipse.jgit.api.errors.RefAlreadyExistsException;
import org.eclipse.jgit.api.errors.RefNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import fr.emse.ci.sparqlext.SPARQLExt;
import fr.emse.ci.sparqlext.engine.PlanFactory;
import fr.emse.ci.sparqlext.engine.RootPlan;
import fr.emse.ci.sparqlext.stream.LocationMapperAccept;
import fr.emse.ci.sparqlext.stream.LocatorFileAccept;
import fr.emse.ci.sparqlext.stream.LookUpRequest;
import fr.emse.ci.sparqlext.stream.SPARQLExtStreamManager;
import fr.emse.ci.sparqlext.utils.ContextUtils;
import fr.emse.ci.sparqlext.utils.VarUtils;
import fr.emse.gitlab.saref.entities.git.Repositories;
import fr.emse.gitlab.saref.entities.git.Repository;
import fr.emse.gitlab.saref.entities.git.Version;
import fr.emse.gitlab.saref.entities.tests.TestSuites;
import fr.emse.gitlab.saref.jobs.CheckOWLProfile;
import fr.emse.gitlab.saref.jobs.CheckRepositoryStructure;
import fr.emse.gitlab.saref.jobs.ReadExamples;
import fr.emse.gitlab.saref.jobs.ReadOntology;
import fr.emse.gitlab.saref.jobs.ReadRepositories;
import fr.emse.gitlab.saref.utils.TestSuitesAppender;
import fr.emse.gitlab.saref.vocabs.EX;
static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_BASE + ".Main");
public final static String BASE_DOC = Constants.BASE + "documentation/";
public static final PathMatcher ttlMatcher = FileSystems.getDefault().getPathMatcher("glob:**/*.ttl");
private static TestSuites testSuites = new TestSuites("SAREF pipeline");
private static final Map<String, String> testSuiteNames = new HashMap<>();
private static File directory;
private static File target;
private static SPARQLExtStreamManager streamManager;
private static boolean openBrowser = true;
private static Dataset dataset;
private static Var VAR_TEST_SUITES = VarUtils.allocVar("testsuites");
static boolean remoteOnly;
static boolean ignoreGit;
static boolean includeMaster;
static boolean generateSite;
static boolean ignoreExamples;
static boolean verbose;
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException, JAXBException,
ParseException, RefAlreadyExistsException, RefNotFoundException, InvalidRefNameException,
CheckoutConflictException, GitAPIException {
CommandLine cl = CMDConfigurations.parseArguments(args);
if (cl.getOptions().length == 0 || cl.hasOption(ARG_HELP)) {
CMDConfigurations.displayHelp();
return;
}
String dirName = cl.getOptionValue(ARG_DIRECTORY, ARG_DIRECTORY_DEFAULT);
directory = new File(dirName).getCanonicalFile();
LOG.error("The directory does not exist " + directory);
System.exit(-1);
}
remoteOnly = cl.hasOption(ARG_REMOTE_ONLY);
ignoreGit = cl.hasOption(ARG_IGNORE_GIT);
includeMaster = cl.hasOption(ARG_INCLUDE_MASTER);
generateSite = !cl.hasOption(ARG_NO_SITE);
ignoreExamples = cl.hasOption(ARG_IGNORE_EXAMPLES);
ignoreTerms = cl.hasOption(ARG_IGNORE_TERMS);
setLogAppenders();
prepareDirectory();
streamManager = initializeStreamManager();
dataset = createFreshDataset();
LOG.info("Starting pipeline in " + directory);
repositories = new ReadRepositories("Fetching required repositories").readRepositories(directory,
remoteOnly, ignoreGit, includeMaster);
if (repositories.getDefaultRepository() != null) {
Repository repository = repositories.getDefaultRepository();
boolean checkExamples = !ignoreExamples;
testRepository(repository, checkExamples);
for (Repository repository : repositories.getNamedRepositories()) {
boolean checkExamples = !ignoreExamples && remoteOnly;
testRepository(repository, checkExamples);
checkOWLProfile();
new CheckConfig().doJob(dataset, ignoreExamples);
if (generateSite) {
new GeneratePortal("Generate static files for the portal", streamManager).doJob(dataset, directory, verbose,
ignoreExamples, ignoreTerms);
if (repositories.getDefaultRepository() != null && !ignoreGit) {
Repository repository = repositories.getDefaultRepository();
resetCheckout(repository);
for (Repository repository : repositories.getNamedRepositories()) {
reportAndExit((int) -Math.signum(testSuites.getErrors() + testSuites.getFailures()));
private static void setLogAppenders() throws IOException {
File logFile = new File(directory, Constants.LOG_FILE_NAME);
Layout layout = new PatternLayout("%d{mm:ss,SSS} %t %-5p %c:%L - %m%n");
org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger();
rootLogger.addAppender(new org.apache.log4j.RollingFileAppender(layout, logFile.getAbsolutePath(), false));
org.apache.log4j.Logger loggerBase = org.apache.log4j.Logger.getLogger(Constants.LOGGER_BASE);
TestSuitesAppender appender = new TestSuitesAppender(testSuites);
private static void prepareDirectory() throws IOException {
target = new File(directory, Constants.TARGET_DIR);
File site = new File(directory, Constants.SITE_DIR);
if (generateSite) {
try (Git git = Git.cloneRepository().setURI(Constants.SAREF_PORTAL_STATIC_GIT).setDirectory(site).call()) {
LOG.info(String.format("Cloning site repository %s", Constants.SAREF_PORTAL_STATIC_GIT));
} catch (Exception ex) {
try (Git git = Git.open(site)) {
git.pull().call();
LOG.info(String.format("Pulling site repository %s", Constants.SAREF_PORTAL_STATIC_GIT));
} catch (Exception e) {
throw new IOException(String.format("Failed to clone or pull site repository %s", Constants.SAREF_PORTAL_STATIC_GIT), e);
}
}
private static Dataset createFreshDataset() throws IOException {
File datasetDir = new File(directory, Constants.DATASET_DIR);
FileUtils.forceMkdir(datasetDir);
dataset = TDBFactory.createDataset(datasetDir.getAbsolutePath());
dataset.begin(ReadWrite.WRITE);
dataset.getDefaultModel().removeAll();
List<String> toRemove = new ArrayList<>();
Iterator<String> it = dataset.listNames();
while (it.hasNext()) {
toRemove.add(it.next());
}
for (String name : toRemove) {
dataset.removeNamedModel(name);
}
dataset.addNamedModel(Constants.CONFIG, ModelFactory.createDefaultModel());
dataset.commit();
return dataset;
}
private static void testRepository(Repository repository, boolean checkExamples) {
for (Version version : repository.getVersions()) {
try {
String versionTestSuiteName = version.toString() + " - testing repository structure";
new CheckoutJob(versionTestSuiteName).checkoutVersion(version);
new CheckRepositoryStructure(versionTestSuiteName).check(version);
} catch (SAREFPipelineException ex) {
LOG.error("Error while testing repository structure " + version, ex);
continue;
}
try {
String ontologyTestSuiteName = version.toString() + " - testing ontology file";
testSuiteNames.put(version.getUri(), ontologyTestSuiteName);
new ReadOntology(ontologyTestSuiteName).doJob(dataset, version);
} catch (SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for " + version.toString()
+ ". This version and all the examples will be ignored.", ex);
continue;
}
File examplesDir = new File(version.getRepository().getDirectory(), "examples");
try {
for (Iterator<Path> it = Files.walk(examplesDir.toPath()).filter(p -> {
}).iterator();it.hasNext();) {
Path p = it.next();
Path rel = examplesDir.toPath().relativize(p);
String iri = version.getResource().getURI() + "example/"
+ rel.toString().substring(0, rel.toString().length() - 4);
String exampleTestSuiteName = version.toString() + " testing example " + rel;
testSuiteNames.put(iri, exampleTestSuiteName);
File exampleFile = p.toFile();
try {
new ReadExamples(exampleTestSuiteName).doJob(dataset, version, iri, exampleFile);
} catch (SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for example " + rel
+ ". This example will be ignored.");
}
}
LOG.error(version.toString() + " Error while walking through the examples. They will be ignored.", ex);
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
}
private static void checkOWLProfile() {
dataset.begin(ReadWrite.READ);
Model config = dataset.getNamedModel(Constants.CONFIG);
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.OntologyVersion); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for ontology " + ontology, ex);
}
}
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.Example); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for example " + ontology, ex);
}
}
dataset.end();
}
private static SPARQLExtStreamManager initializeStreamManager() throws IOException, URISyntaxException {
SPARQLExtStreamManager sm = SPARQLExtStreamManager.makeStreamManager();
LocationMapperAccept mapper = new LocationMapperAccept();
// set mapper for saref-pipeline src/documentation directory
URI uri = Main.class.getClassLoader().getResource("documentation").toURI();
boolean isJar = uri.getScheme().equals("jar");
if (isJar) {
FileSystem fileSystem = FileSystems.newFileSystem(uri, Collections.<String, Object>emptyMap());
dirPath = fileSystem.getPath("/documentation");
} else {
dirPath = Paths.get(uri);
}
walk(dirPath, isJar, mapper);
// override with local files in documentation directory
dirPath = Paths.get(new File(directory, "documentation").toURI());
walk(dirPath, false, mapper);
return sm;
}
private static void walk(Path dirPath, boolean isJar, LocationMapperAccept mapper) throws IOException {
Files.walk(dirPath).forEach((p) -> {
String relativePath = dirPath.relativize(p).toString().replace("\\", "/");
mapper.addAltEntry(fileurl, p.toString().substring(1));
} else {
mapper.addAltEntry(fileurl, p.toString());
}
});
}
private static void resetCheckout(Repository repository) {
try (Git git = Git.open(repository.getDirectory())) {
String currentBranch = repository.getCurrentBranch();
git.checkout().setName(currentBranch).call();
}
} catch (IOException | GitAPIException ex) {
LOG.warn("Error while reseting repository " + repository, ex);
private static void reportAndExit(int code) {
try {
File report = new File(target, "report_output.xml");
JAXBContext jaxbContext = JAXBContext.newInstance(TestSuites.class);
Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
jaxbMarshaller.marshal(testSuites, report);
final StringWriter sw = new StringWriter();
jaxbMarshaller.marshal(testSuites, sw);
// generate the report.html
File reportHTML = new File(directory, Constants.SITE_DIR + File.separator + "report.html");
try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(reportHTML));) {
Context context = ContextUtils.build(writer).setBase(Constants.BASE).setDebugTemplate(verbose)
.setStreamManager(streamManager).build();
BindingHashMap binding = new BindingHashMap();
binding.add(VAR_TEST_SUITES, NodeFactory.createLiteral(sw.toString()));
List<Binding> bindings = new ArrayList<>();
bindings.add(binding);
String query = IOUtils.toString(
streamManager
.open(new LookUpRequest("documentation/report/main.rqg", SPARQLExt.MEDIA_TYPE)),
StandardCharsets.UTF_8);
RootPlan reportPlan = PlanFactory.create(query, BASE_DOC);
reportPlan.execTemplateStream(bindings, context);
}
if (openBrowser) {
if (Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE)) {
Desktop.getDesktop().browse(reportHTML.toURI());
} else {
System.out.println("\n\n\nURL to the SAREF pipeline report:\n" + reportHTML.toURI());
}
LOG.error("Exception:", ex);
ex.printStackTrace();
}