Newer
Older
import java.awt.Desktop;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.query.ReadWrite;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.ResIterator;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingHashMap;
import org.apache.jena.sparql.util.Context;
import org.apache.jena.tdb.TDBFactory;
import org.apache.jena.vocabulary.RDF;
import org.apache.log4j.Layout;
import org.apache.log4j.PatternLayout;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRefNameException;
import org.eclipse.jgit.api.errors.RefAlreadyExistsException;
import org.eclipse.jgit.api.errors.RefNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import fr.emse.ci.sparqlext.SPARQLExt;
import fr.emse.ci.sparqlext.engine.PlanFactory;
import fr.emse.ci.sparqlext.engine.RootPlan;
import fr.emse.ci.sparqlext.stream.LocationMapperAccept;
import fr.emse.ci.sparqlext.stream.LocatorFileAccept;
import fr.emse.ci.sparqlext.stream.LookUpRequest;
import fr.emse.ci.sparqlext.stream.SPARQLExtStreamManager;
import fr.emse.ci.sparqlext.utils.ContextUtils;
import fr.emse.ci.sparqlext.utils.VarUtils;
import fr.emse.gitlab.saref.entities.git.Repositories;
import fr.emse.gitlab.saref.entities.git.Repository;
import fr.emse.gitlab.saref.entities.git.Version;
import fr.emse.gitlab.saref.entities.tests.TestSuites;
import fr.emse.gitlab.saref.jobs.CheckOWLProfile;
import fr.emse.gitlab.saref.jobs.CheckRepositoryStructure;
import fr.emse.gitlab.saref.jobs.ReadExamples;
import fr.emse.gitlab.saref.jobs.ReadOntology;
import fr.emse.gitlab.saref.jobs.ReadRepositories;
import fr.emse.gitlab.saref.utils.TestSuitesAppender;
import fr.emse.gitlab.saref.vocabs.EX;
static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_BASE + ".Main");
public final static String BASE_DOC = Constants.BASE + "documentation/";
public static final PathMatcher ttlMatcher = FileSystems.getDefault().getPathMatcher("glob:**/*.ttl");
private static TestSuites testSuites = new TestSuites("SAREF pipeline");
private static final Map<String, String> testSuiteNames = new HashMap<>();
private static File directory;
private static File target;
private static SPARQLExtStreamManager streamManager;
private static boolean openBrowser = true;
private static Dataset dataset;
private static Var VAR_TEST_SUITES = VarUtils.allocVar("testsuites");
static boolean remoteOnly;
static boolean ignoreGit;
static boolean includeMaster;
static boolean generateSite;
static boolean ignoreExamples;
static boolean verbose;
throws IOException, InterruptedException, URISyntaxException, JAXBException, ParseException, RefAlreadyExistsException, RefNotFoundException, InvalidRefNameException, CheckoutConflictException, GitAPIException {
CommandLine cl = CMDConfigurations.parseArguments(args);
if (cl.getOptions().length == 0 || cl.hasOption(ARG_HELP)) {
CMDConfigurations.displayHelp();
return;
}
String dirName = cl.getOptionValue(ARG_DIRECTORY, ARG_DIRECTORY_DEFAULT);
directory = new File(dirName).getCanonicalFile();
remoteOnly = cl.hasOption(ARG_REMOTE_ONLY);
ignoreGit = cl.hasOption(ARG_IGNORE_GIT);
includeMaster = cl.hasOption(ARG_INCLUDE_MASTER);
generateSite = !cl.hasOption(ARG_NO_SITE);
ignoreExamples = cl.hasOption(ARG_IGNORE_EXAMPLES);
ignoreTerms = cl.hasOption(ARG_IGNORE_TERMS);
setLogAppenders();
prepareDirectory();
streamManager = initializeStreamManager();
dataset = createFreshDataset();
LOG.info("Starting pipeline in " + directory);
Repositories repositories;
try {
repositories = new ReadRepositories("Fetching required repositories").readRepositories(directory, remoteOnly, ignoreGit, includeMaster);
} catch (Exception ex) {
reportAndExit(-1);
if (repositories.getDefaultRepository() != null) {
Repository repository = repositories.getDefaultRepository();
boolean checkExamples = !ignoreExamples;
testRepository(repository, checkExamples);
for(Repository repository : repositories.getNamedRepositories()) {
boolean checkExamples = !ignoreExamples && remoteOnly;
testRepository(repository, checkExamples);
checkOWLProfile();
new CheckConfig().doJob(dataset, ignoreExamples);
if (generateSite) {
new GeneratePortal("Generate static files for the portal", streamManager).doJob(dataset, directory, verbose, ignoreExamples, ignoreTerms);
if (repositories.getDefaultRepository() != null && !ignoreGit) {
Repository repository = repositories.getDefaultRepository();
resetCheckout(repository);
for(Repository repository : repositories.getNamedRepositories()) {
resetCheckout(repository);
testSuites.clean();
reportAndExit((int) -Math.signum(testSuites.getErrors()));
}
private static void setLogAppenders() throws IOException {
File logFile = new File(directory, Constants.LOG_FILE_NAME);
Layout layout = new PatternLayout("%d{mm:ss,SSS} %t %-5p %c:%L - %m%n");
org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger();
rootLogger.addAppender(new org.apache.log4j.RollingFileAppender(layout, logFile.getAbsolutePath(), false));
org.apache.log4j.Logger loggerBase = org.apache.log4j.Logger.getLogger(Constants.LOGGER_BASE);
TestSuitesAppender appender = new TestSuitesAppender(testSuites);
}
private static void prepareDirectory() throws IOException {
target = new File(directory, Constants.TARGET_DIR);
FileUtils.forceMkdir(target);
if(generateSite) {
File staticTargetDir = new File(directory, Constants.STATIC_TARGET_DIR);
File staticTargetZip = new File(directory, Constants.STATIC_TARGET_DIR + ".zip");
URL url = Main.class.getClassLoader().getResource("static.zip");
FileUtils.copyURLToFile(url, staticTargetZip);
UnzipFile.unzip(staticTargetZip, staticTargetDir);
FileUtils.deleteQuietly(staticTargetZip);
}
private static Dataset createFreshDataset() throws IOException {
File datasetDir = new File(directory, Constants.DATASET_DIR);
FileUtils.forceMkdir(datasetDir);
dataset = TDBFactory.createDataset(datasetDir.getAbsolutePath());
dataset.begin(ReadWrite.WRITE);
dataset.getDefaultModel().removeAll();
List<String> toRemove = new ArrayList<>();
Iterator<String> it = dataset.listNames();
while(it.hasNext()) { toRemove.add(it.next()); }
for(String name : toRemove) {
dataset.removeNamedModel(name);
}
dataset.addNamedModel(Constants.CONFIG, ModelFactory.createDefaultModel());
dataset.commit();
return dataset;
}
private static void testRepository(Repository repository, boolean checkExamples) {
for (Version version : repository.getVersions()) {
try {
String versionTestSuiteName = version.toString() + " - testing repository structure";
new CheckoutJob(versionTestSuiteName).checkoutVersion(version);
new CheckRepositoryStructure(versionTestSuiteName).check(version);
} catch(SAREFPipelineException ex) {
LOG.error("Error while testing repository structure " + version, ex);
continue;
}
try {
String ontologyTestSuiteName = version.toString() + " - testing ontology file";
testSuiteNames.put(version.getUri(), ontologyTestSuiteName);
new ReadOntology(ontologyTestSuiteName).doJob(dataset, version);
} catch(SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for " + version.toString()
+ ". This version and all the examples will be ignored.", ex);
continue;
}
File examplesDir = new File(version.getRepository().getDirectory(), "examples");
try {
for (Iterator<Path> it = Files.walk(examplesDir.toPath()).filter(p -> {
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
}).iterator();it.hasNext();) {
Path p = it.next();
Path rel = examplesDir.toPath().relativize(p);
String iri = version.getResource().getURI() + "example/"
+ rel.toString().substring(0, rel.toString().length() - 4);
String exampleTestSuiteName = version.toString() + " testing example " + rel;
testSuiteNames.put(iri, exampleTestSuiteName);
File exampleFile = p.toFile();
try {
new ReadExamples(exampleTestSuiteName).doJob(dataset, version, iri, exampleFile);
} catch (SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for example " + rel
+ ". This example will be ignored.");
}
}
} catch(Exception ex) {
LOG.error(version.toString() + " Error while walking through the examples. They will be ignored.", ex);
continue;
}
}
}
private static void checkOWLProfile() {
dataset.begin(ReadWrite.READ);
Model config = dataset.getNamedModel(Constants.CONFIG);
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.OntologyVersion); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for ontology " + ontology, ex);
}
}
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.Example); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for example " + ontology, ex);
}
}
dataset.end();
}
private static SPARQLExtStreamManager initializeStreamManager() throws IOException, URISyntaxException {
URI uri = Main.class.getClassLoader().getResource("documentation").toURI();
Path dirPath;
if(uri.getScheme().equals("jar")) {
FileSystem fileSystem = FileSystems.newFileSystem(uri, Collections.<String, Object>emptyMap());
dirPath = fileSystem.getPath("/documentation");
} else {
dirPath = Paths.get(uri);
}
LocatorFileAccept locator = new LocatorFileAccept(uri.getPath());
LocationMapperAccept mapper = new LocationMapperAccept();
SPARQLExtStreamManager sm = SPARQLExtStreamManager.makeStreamManager(locator);
sm.setLocationMapper(mapper);
Files.walk(dirPath).forEach((p) -> {
String relativePath = dirPath.relativize(p).toString().replace("\\", "/");
if(uri.getScheme().equals("jar")) {
mapper.addAltEntry(fileurl, p.toString().substring(1));
} else {
mapper.addAltEntry(fileurl, p.toString());
}
});
return sm;
}
private static void resetCheckout(Repository repository) {
try(Git git = Git.open(repository.getDirectory())) {
String currentBranch = repository.getCurrentBranch();
if(currentBranch != null) {
git.checkout().setName(currentBranch).call();
}
} catch (IOException | GitAPIException ex) {
LOG.warn("Error while reseting repository " + repository, ex);
private static void reportAndExit(int code) {
try {
File report = new File(target, "report_output.xml");
JAXBContext jaxbContext = JAXBContext.newInstance(TestSuites.class);
Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
jaxbMarshaller.marshal(testSuites, report);
final StringWriter sw = new StringWriter();
jaxbMarshaller.marshal(testSuites, sw);
if(generateSite) {
// generate the report.html
File reportHTML = new File(directory, Constants.SITE_DIR + File.separator + "report.html");
try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(reportHTML));) {
Context context = ContextUtils.build(writer).setBase(Constants.BASE)
.setDebugTemplate(verbose).setStreamManager(streamManager).build();
BindingHashMap binding = new BindingHashMap();
binding.add(VAR_TEST_SUITES, NodeFactory.createLiteral(sw.toString()));
List<Binding> bindings = new ArrayList<>();
bindings.add(binding);
String query = IOUtils.toString(
streamManager.open(new LookUpRequest("documentation/report/main.rqg", SPARQLExt.MEDIA_TYPE)),
StandardCharsets.UTF_8);
RootPlan reportPlan = PlanFactory.create(query, BASE_DOC);
reportPlan.execTemplateStream(bindings, context);
}
if (openBrowser) {
if (Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE)) {
Desktop.getDesktop().browse(reportHTML.toURI());
} else {
System.out.println("\n\n\nURL to the SAREF pipeline report:\n" + reportHTML.toURI());
}
LOG.error("Exception:", ex);
ex.printStackTrace();
}