Newer
Older
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DEBUG_TEMPLATE;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DIRECTORY;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_DIRECTORY_DEFAULT;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_HELP;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_INCLUDE_ALL;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_INCLUDE_MASTER;
import static fr.emse.gitlab.saref.CMDConfigurations.ARG_PRODUCTION;
import java.awt.Desktop;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.query.ReadWrite;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.ResIterator;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingHashMap;
import org.apache.jena.sparql.util.Context;
import org.apache.jena.tdb.TDBFactory;
import org.apache.jena.vocabulary.RDF;
import org.apache.log4j.Layout;
import org.apache.log4j.PatternLayout;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRefNameException;
import org.eclipse.jgit.api.errors.RefAlreadyExistsException;
import org.eclipse.jgit.api.errors.RefNotFoundException;
import org.eclipse.jgit.revwalk.RevCommit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import fr.emse.ci.sparqlext.SPARQLExt;
import fr.emse.ci.sparqlext.engine.PlanFactory;
import fr.emse.ci.sparqlext.engine.RootPlan;
import fr.emse.ci.sparqlext.stream.LocationMapperAccept;
import fr.emse.ci.sparqlext.stream.LocatorFileAccept;
import fr.emse.ci.sparqlext.stream.LookUpRequest;
import fr.emse.ci.sparqlext.stream.SPARQLExtStreamManager;
import fr.emse.ci.sparqlext.utils.ContextUtils;
import fr.emse.ci.sparqlext.utils.VarUtils;
import fr.emse.gitlab.saref.entities.git.Repositories;
import fr.emse.gitlab.saref.entities.git.Repository;
import fr.emse.gitlab.saref.entities.git.Version;
import fr.emse.gitlab.saref.entities.tests.TestSuites;
import fr.emse.gitlab.saref.jobs.CheckOWLProfile;
import fr.emse.gitlab.saref.jobs.CheckRepositoryStructure;
import fr.emse.gitlab.saref.jobs.CheckoutJob;
import fr.emse.gitlab.saref.jobs.ReadExamples;
import fr.emse.gitlab.saref.jobs.ReadOntology;
import fr.emse.gitlab.saref.jobs.ReadRepositories;
import fr.emse.gitlab.saref.jobs.GeneratePortal;
import fr.emse.gitlab.saref.utils.TestSuitesAppender;
import fr.emse.gitlab.saref.vocabs.EX;
static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_BASE + ".Main");
private static TestSuites testSuites = new TestSuites("SAREF pipeline");
private static final Map<String, String> testSuiteNames = new HashMap<>();
private static File directory;
private static File target;
private static SPARQLExtStreamManager streamManager;
private static boolean openBrowser = true;
private static Dataset dataset;
private static Var VAR_TEST_SUITES = VarUtils.allocVar("testsuites");
public static void main(String[] args)
throws IOException, InterruptedException, URISyntaxException, JAXBException, ParseException, RefAlreadyExistsException, RefNotFoundException, InvalidRefNameException, CheckoutConflictException, GitAPIException {
parseCommandArguments(args);
setLogAppenders();
prepareDirectory();
streamManager = initializeStreamManager();
dataset = createFreshDataset();
LOG.info("Starting pipeline in " + directory);
Repositories repositories = readRepositories();
accept(repositories, Main::testRepository);
checkOWLProfile();
new CheckConfig().doJob(dataset);
new GeneratePortal("Generate static files for the portal", streamManager).doJob(dataset, directory);
accept(repositories, Main::resetCheckout);
testSuites.prune();
reportAndExit(0);
}
private static void parseCommandArguments(String[] args) throws ParseException, IOException {
CommandLine cl = CMDConfigurations.parseArguments(args);
if (cl.getOptions().length == 0 || cl.hasOption(ARG_HELP)) {
CMDConfigurations.displayHelp();
return;
}
if (cl.hasOption(ARG_INCLUDE_MASTER)) {
Constants.INCLUDE_MASTER = true;
}
if (cl.hasOption(ARG_INCLUDE_ALL)) {
Constants.INCLUDE_MASTER = true;
Constants.INCLUDE_ALL = true;
}
if (cl.hasOption(ARG_PRODUCTION)) {
Constants.PRODUCTION = true;
}
if (cl.hasOption(ARG_DEBUG_TEMPLATE)) {
Constants.DEBUG_TEMPLATE = true;
}
String dirName = cl.getOptionValue(ARG_DIRECTORY, ARG_DIRECTORY_DEFAULT);
if (dirName.equals("")) {
dirName = ARG_DIRECTORY_DEFAULT;
}
directory = new File(dirName).getCanonicalFile();
}
private static void setLogAppenders() throws IOException {
File logFile = new File(directory, Constants.LOG_FILE_NAME);
Layout layout = new PatternLayout("%d{mm:ss,SSS} %t %-5p %c:%L - %m%n");
org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger();
rootLogger.addAppender(new org.apache.log4j.RollingFileAppender(layout, logFile.getAbsolutePath(), false));
org.apache.log4j.Logger loggerBase = org.apache.log4j.Logger.getLogger(Constants.LOGGER_BASE);
TestSuitesAppender appender = new TestSuitesAppender(testSuites);
loggerBase.addAppender(appender);
}
private static void prepareDirectory() throws IOException {
target = new File(directory, Constants.TARGET_DIR);
FileUtils.forceMkdir(target);
File siteDir = new File(directory, Constants.SITE_DIR);
File staticTargetDir = new File(directory, Constants.STATIC_TARGET_DIR);
FileUtils.forceMkdir(siteDir);
FileUtils.forceMkdir(staticTargetDir);
File staticDir = new File(Main.class.getClassLoader().getResource("static").getFile());
FileUtils.copyDirectory(staticDir, staticTargetDir);
}
private static Dataset createFreshDataset() throws IOException {
FileUtils.forceMkdir(new File(directory, Constants.DATASET_DIR));
dataset = TDBFactory.createDataset(Constants.DATASET_DIR);
dataset.begin(ReadWrite.WRITE);
dataset.getDefaultModel().removeAll();
for (Iterator<String> it = dataset.listNames(); it.hasNext();) {
String name = it.next();
dataset.removeNamedModel(name);
}
dataset.addNamedModel(Constants.CONFIG, ModelFactory.createDefaultModel());
dataset.commit();
return dataset;
}
private static void accept(Repositories repositories, Consumer<Repository> consumer) {
if (repositories.getDefaultRepository() != null) {
Repository repository = repositories.getDefaultRepository();
consumer.accept(repository);
}
for(Repository repository : repositories.getNamedRepositories()) {
consumer.accept(repository);
}
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
private static Repositories readRepositories() {
try {
return new ReadRepositories("Fetching required repositories").readRepositories(directory);
} catch (Exception ex) {
reportAndExit(-1);
return null;
}
}
private static void testRepository(Repository repository) {
for (Version version : repository.getVersions()) {
try {
String versionTestSuiteName = version.toString() + " - testing repository structure";
new CheckoutJob(versionTestSuiteName).checkoutVersion(version);
new CheckRepositoryStructure(versionTestSuiteName).check(version);
} catch(SAREFPipelineException ex) {
LOG.error("Error while testing repository structure " + version, ex);
continue;
}
try {
String ontologyTestSuiteName = version.toString() + " - testing ontology file";
testSuiteNames.put(version.getUri(), ontologyTestSuiteName);
new ReadOntology(ontologyTestSuiteName).doJob(dataset, version);
} catch(SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for " + version.toString()
+ ". This version and all the examples will be ignored.", ex);
continue;
}
File examplesDir = new File(version.getRepository().getDirectory(), "examples");
try {
for (Iterator<Path> it = Files.walk(examplesDir.toPath()).filter(p -> {
return Constants.ttlMatcher.matches(p);
}).iterator();it.hasNext();) {
Path p = it.next();
Path rel = examplesDir.toPath().relativize(p);
String iri = version.getResource().getURI() + "example/"
+ rel.toString().substring(0, rel.toString().length() - 4);
String exampleTestSuiteName = version.toString() + " testing example " + rel;
testSuiteNames.put(iri, exampleTestSuiteName);
File exampleFile = p.toFile();
try {
new ReadExamples(exampleTestSuiteName).doJob(dataset, version, iri, exampleFile);
} catch (SAREFPipelineException ex) {
LOG.error(version.toString() + " Found errors for example " + rel
+ ". This example will be ignored.");
}
}
} catch(Exception ex) {
LOG.error(version.toString() + " Error while walking through the examples. They will be ignored.", ex);
continue;
}
}
}
private static void checkOWLProfile() {
dataset.begin(ReadWrite.READ);
Model config = dataset.getNamedModel(Constants.CONFIG);
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.OntologyVersion); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for ontology " + ontology, ex);
}
}
for (ResIterator it = config.listResourcesWithProperty(RDF.type, EX.Example); it.hasNext();) {
String ontology = it.next().getURI();
String testSuiteName = testSuiteNames.get(ontology);
try {
new CheckOWLProfile(testSuiteName).doJob(dataset, ontology);
} catch (SAREFPipelineException ex) {
LOG.warn("Found errors for example " + ontology, ex);
}
}
dataset.end();
}
private static SPARQLExtStreamManager initializeStreamManager() throws IOException {
File documentationDir = new File(Main.class.getClassLoader().getResource("documentation").getFile());
Path dirPath = Paths.get(documentationDir.toURI());
LocatorFileAccept locator = new LocatorFileAccept(documentationDir.toURI().getPath());
LocationMapperAccept mapper = new LocationMapperAccept();
SPARQLExtStreamManager sm = SPARQLExtStreamManager.makeStreamManager(locator);
sm.setLocationMapper(mapper);
Files.walk(dirPath).filter((p) -> {
return p.toFile().isFile();
}).forEach((p) -> {
String relativePath = dirPath.relativize(p).toString();
String fileurl = Constants.BASE_DOC + relativePath.replace("\\", "/");
mapper.addAltEntry(fileurl, p.toString());
});
return sm;
}
private static void resetCheckout(Repository repository) {
try(Git git = Git.open(repository.getDirectory())) {
RevCommit currentRevCommit = repository.getCurrentRevCommit();
if(currentRevCommit != null) {
git.checkout().setStartPoint(currentRevCommit).call();
}
} catch (IOException | GitAPIException ex) {
LOG.warn("Error while reseting repository " + repository, ex);
}
}
private static void reportAndExit(int code) {
try {
File report = new File(target, "report_output.xml");
JAXBContext jaxbContext = JAXBContext.newInstance(TestSuites.class);
Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
jaxbMarshaller.marshal(testSuites, report);
final StringWriter sw = new StringWriter();
jaxbMarshaller.marshal(testSuites, sw);
// generate the report.html
File reportHTML = new File(directory, Constants.SITE_DIR + File.separator + "report.html");
try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(reportHTML));) {
Context context = ContextUtils.build(writer).setBase(Constants.BASE)
.setDebugTemplate(!Constants.PRODUCTION).setStreamManager(streamManager).build();
BindingHashMap binding = new BindingHashMap();
binding.add(VAR_TEST_SUITES, NodeFactory.createLiteral(sw.toString()));
List<Binding> bindings = new ArrayList<>();
bindings.add(binding);
String query = IOUtils.toString(
streamManager.open(new LookUpRequest("report/main.rqg", SPARQLExt.MEDIA_TYPE)),
StandardCharsets.UTF_8);
RootPlan reportPlan = PlanFactory.create(query, Constants.BASE_DOC);
reportPlan.execTemplateStream(bindings, context);
}
if (openBrowser) {
if (Desktop.isDesktopSupported() && Desktop.getDesktop().isSupported(Desktop.Action.BROWSE)) {
Desktop.getDesktop().browse(reportHTML.toURI());
// Desktop.getDesktop().browse(new URI(
// Constants.BASE + "report.html?report=" + URLEncoder.encode(sw.toString(), "UTF-8")));
// } catch (JAXBException | URISyntaxException | IOException ex) {
} catch (JAXBException | IOException ex) {
LOG.error("Exception:", ex);
ex.printStackTrace();
}