Newer
Older
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DEBUG_TEMPLATE;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DIRECTORY;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DIRECTORY_DEFAULT;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_HELP;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_INCLUDE_ALL;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_INCLUDE_MASTER;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_PRODUCTION;
import java.awt.Desktop;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import java.util.function.Consumer;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.query.ReadWrite;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.ResIterator;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingHashMap;
import org.apache.jena.sparql.util.Context;
import org.apache.jena.tdb.TDBFactory;
import org.apache.jena.vocabulary.RDF;
import org.apache.log4j.Layout;
import org.apache.log4j.PatternLayout;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRefNameException;
import org.eclipse.jgit.api.errors.RefAlreadyExistsException;
import org.eclipse.jgit.api.errors.RefNotFoundException;
import org.eclipse.jgit.revwalk.RevCommit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import fr.emse.ci.sparqlext.SPARQLExt;
import fr.emse.ci.sparqlext.engine.PlanFactory;
import fr.emse.ci.sparqlext.engine.RootPlan;
import fr.emse.ci.sparqlext.stream.LocationMapperAccept;
import fr.emse.ci.sparqlext.stream.LocatorFileAccept;
import fr.emse.ci.sparqlext.stream.LookUpRequest;
import fr.emse.ci.sparqlext.stream.SPARQLExtStreamManager;
import fr.emse.ci.sparqlext.utils.ContextUtils;
import fr.emse.ci.sparqlext.utils.VarUtils;
import fr.emse.gitlab.saref.entities.git.Repositories;
import fr.emse.gitlab.saref.entities.git.Repository;
import fr.emse.gitlab.saref.entities.git.Version;
import fr.emse.gitlab.saref.entities.tests.TestSuites;
import fr.emse.gitlab.saref.jobs.CheckOWLProfile;
import fr.emse.gitlab.saref.jobs.CheckRepositoryStructure;
import fr.emse.gitlab.saref.jobs.CheckoutJob;
import fr.emse.gitlab.saref.jobs.ReadExamples;
import fr.emse.gitlab.saref.jobs.ReadOntology;
import fr.emse.gitlab.saref.jobs.ReadRepositories;
import fr.emse.gitlab.saref.jobs.GeneratePortal;
import fr.emse.gitlab.saref.utils.TestSuitesAppender;
import fr.emse.gitlab.saref.vocabs.EX;
static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_BASE + ".Main");
private static TestSuites testSuites = new TestSuites("SAREF pipeline");
private static final Map<String, String> testSuiteNames = new HashMap<>();
private static File directory;
private static File target;
private static SPARQLExtStreamManager streamManager;
private static boolean openBrowser = true;
private static Dataset dataset;
private static Var VAR_TEST_SUITES = VarUtils.allocVar("testsuites");
public static void main(String[] args)
throws IOException, InterruptedException, URISyntaxException, JAXBException, ParseException, RefAlreadyExistsException, RefNotFoundException, InvalidRefNameException, CheckoutConflictException, GitAPIException {
parseCommandArguments(args);
setLogAppenders();
prepareDirectory();
streamManager = initializeStreamManager();
dataset = createFreshDataset();
LOG.info("Starting pipeline in " + directory);
Repositories repositories = readRepositories();
accept(repositories, Main::testRepository);
checkOWLProfile();
new CheckConfig().doJob(dataset);
new GeneratePortal("Generate static files for the portal", streamManager).doJob(dataset, directory);
accept(repositories, Main::resetCheckout);
testSuites.prune();
reportAndExit(0);
}
private static void parseCommandArguments(String[] args) throws ParseException, IOException {
CommandLine cl = CMDConfigurations.parseArguments(args);
if (cl.getOptions().length == 0 || cl.hasOption(ARG_HELP)) {
CMDConfigurations.displayHelp();
return;
}
if (cl.hasOption(ARG_INCLUDE_MASTER)) {
Constants.INCLUDE_MASTER = true;
}
if (cl.hasOption(ARG_INCLUDE_ALL)) {
Constants.INCLUDE_MASTER = true;
Constants.INCLUDE_ALL = true;
}
if (cl.hasOption(ARG_PRODUCTION)) {
Constants.PRODUCTION = true;
}
if (cl.hasOption(ARG_DEBUG_TEMPLATE)) {
Constants.DEBUG_TEMPLATE = true;
}
String dirName = cl.getOptionValue(ARG_DIRECTORY, ARG_DIRECTORY_DEFAULT);
if (dirName.equals("")) {
dirName = ARG_DIRECTORY_DEFAULT;
}
directory = new File(dirName).getCanonicalFile();
}
private static void setLogAppenders() throws IOException {
File logFile = new File(directory, Constants.LOG_FILE_NAME);
Layout layout = new PatternLayout("%d{mm:ss,SSS} %t %-5p %c:%L - %m%n");
org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger();
rootLogger.addAppender(new org.apache.log4j.RollingFileAppender(layout, logFile.getAbsolutePath(), false));
org.apache.log4j.Logger loggerBase = org.apache.log4j.Logger.getLogger(Constants.LOGGER_BASE);
TestSuitesAppender appender = new TestSuitesAppender(testSuites);
loggerBase.addAppender(appender);
}
private static void prepareDirectory() throws IOException {
target = new File(directory, Constants.TARGET_DIR);
FileUtils.forceMkdir(target);
File staticTargetDir = new File(directory, Constants.STATIC_TARGET_DIR);
File staticTargetZip = new File(directory, Constants.STATIC_TARGET_DIR + ".zip");
URL url = Main.class.getClassLoader().getResource("static.zip");
FileUtils.copyURLToFile(url, staticTargetZip);
UnzipFile.unzip(staticTargetZip, staticTargetDir);
FileUtils.deleteQuietly(staticTargetZip);
private static Dataset createFreshDataset() throws IOException {
FileUtils.forceMkdir(new File(directory, Constants.DATASET_DIR));
dataset = TDBFactory.createDataset(Constants.DATASET_DIR);
dataset.begin(ReadWrite.WRITE);
dataset.getDefaultModel().removeAll();
List<String> toRemove = new ArrayList<>();
Iterator<String> it = dataset.listNames();
while(it.hasNext()) { toRemove.add(it.next()); }
for(String name : toRemove) {
dataset.removeNamedModel(name);
}
dataset.addNamedModel(Constants.CONFIG, ModelFactory.createDefaultModel());
dataset.commit();
return dataset;
}
private static void accept(Repositories repositories, Consumer<Repository> consumer) {
if (repositories.getDefaultRepository() != null) {
Repository repository = repositories.getDefaultRepository();
consumer.accept(repository);
}
for(Repository repository : repositories.getNamedRepositories()) {
consumer.accept(repository);
}
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
private static Repositories readRepositories() {
try {
return new ReadRepositories("Fetching required repositories").readRepositories(directory);
} catch (Exception ex) {
reportAndExit(-1);
return null;
}
}
private static void testRepository(Repository repository) {
for (Version version : repository.getVersions()) {
try {
String versionTestSuiteName = version.toString() + " - testing repository structure";
new CheckoutJob(versionTestSuiteName).checkoutVersion(version);
new CheckRepositoryStructure(versionTestSuiteName).check(version);
} catch(SAREFPipelineException ex) {
LOG.error("Error while testing repository structure " + version, ex);
continue;
}
try {
String ontologyTestSuiteName = version.toString() + " - testing ontology file";
testSuiteNames.put(version.getUri(), ontologyTestSuiteName);
new ReadOntology(ontologyTestSuiteName).doJob(dataset, version);
} catch(SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for " + version.toString()
+ ". This version and all the examples will be ignored.", ex);
continue;
}
File examplesDir = new File(version.getRepository().getDirectory(), "examples");
try {
for (Iterator<Path> it = Files.walk(examplesDir.toPath()).filter(p -> {
return Constants.ttlMatcher.matches(p);
}).iterator();it.hasNext();) {
Path p = it.next();
Path rel = examplesDir.toPath().relativize(p);
String iri = version.getResource().getURI() + "example/"
+ rel.toString().substring(0, rel.toString().length() - 4);
String exampleTestSuiteName = version.toString() + " testing example " + rel;
testSuiteNames.put(iri, exampleTestSuiteName);
File exampleFile = p.toFile();
try {
new ReadExamples(exampleTestSuiteName).doJob(dataset, version, iri, exampleFile);
} catch (SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for example " + rel
+ ". This example will be ignored.");
}
}
} catch(Exception ex) {
LOG.error(version.toString() + " Error while walking through the examples. They will be ignored.", ex);
continue;
}
}
}
private static void checkOWLProfile() {
dataset.begin(ReadWrite.READ);
Model config = dataset.getNamedModel(Constants.CONFIG);
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.OntologyVersion); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for ontology " + ontology, ex);
}
}
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.Example); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for example " + ontology, ex);
}
}
dataset.end();
}
private static SPARQLExtStreamManager initializeStreamManager() throws IOException, URISyntaxException {
URI uri = Main.class.getClassLoader().getResource("documentation").toURI();
Path dirPath;
if(uri.getScheme().equals("jar")) {
FileSystem fileSystem = FileSystems.newFileSystem(uri, Collections.<String, Object>emptyMap());
dirPath = fileSystem.getPath("/documentation");
} else {
dirPath = Paths.get(uri);
}
LocatorFileAccept locator = new LocatorFileAccept(uri.getPath());
LocationMapperAccept mapper = new LocationMapperAccept();
SPARQLExtStreamManager sm = SPARQLExtStreamManager.makeStreamManager(locator);
sm.setLocationMapper(mapper);
Files.walk(dirPath).forEach((p) -> {
String relativePath = dirPath.relativize(p).toString().replace("\\", "/");
String fileurl = Constants.BASE_DOC + relativePath;
System.out.println("add alt entry " + fileurl + " -> " + p.toString());
if(uri.getScheme().equals("jar")) {
mapper.addAltEntry(fileurl, p.toString().substring(1));
} else {
mapper.addAltEntry(fileurl, p.toString());
}
// mapper.addAltEntry(fileurl, relativePath);
});
return sm;
}
private static void resetCheckout(Repository repository) {
try(Git git = Git.open(repository.getDirectory())) {
String currentBranch = repository.getCurrentBranch();
if(currentBranch != null) {
git.checkout().setStartPoint(currentBranch).call();
}
} catch (IOException | GitAPIException ex) {
LOG.warn("Error while reseting repository " + repository, ex);
}
}
private static void reportAndExit(int code) {
try {
File report = new File(target, "report_output.xml");
JAXBContext jaxbContext = JAXBContext.newInstance(TestSuites.class);
Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
jaxbMarshaller.marshal(testSuites, report);
final StringWriter sw = new StringWriter();
jaxbMarshaller.marshal(testSuites, sw);
// generate the report.html
File reportHTML = new File(directory, Constants.SITE_DIR + File.separator + "report.html");
try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(reportHTML));) {
Context context = ContextUtils.build(writer).setBase(Constants.BASE)
.setDebugTemplate(!Constants.PRODUCTION).setStreamManager(streamManager).build();
BindingHashMap binding = new BindingHashMap();
binding.add(VAR_TEST_SUITES, NodeFactory.createLiteral(sw.toString()));
List<Binding> bindings = new ArrayList<>();
bindings.add(binding);
String query = IOUtils.toString(
streamManager.open(new LookUpRequest("documentation/report/main.rqg", SPARQLExt.MEDIA_TYPE)),
StandardCharsets.UTF_8);
RootPlan reportPlan = PlanFactory.create(query, Constants.BASE_DOC);
reportPlan.execTemplateStream(bindings, context);
}
if (openBrowser) {
if (Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE)) {
} else {
System.out.println("\n\n\nURL to the SAREF pipeline report:\n" + reportHTML.toURI());
LOG.error("Exception:", ex);
ex.printStackTrace();
}