SiteManager.java 11.4 KB
Newer Older
/*
 * Copyright 2020 ETSI
 * 
 * Redistribution and use in source and binary forms, with or without 
 * modification, are permitted provided that the following conditions are met:
 * 1. Redistributions of source code must retain the above copyright notice, 
 *    this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright notice, 
 *    this list of conditions and the following disclaimer in the documentation 
 *    and/or other materials provided with the distribution.
 * 3. Neither the name of the copyright holder nor the names of its contributors 
 *    may be used to endorse or promote products derived from this software without 
 *    specific prior written permission.
 * 
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 
 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 
 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 
 * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 
 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 
 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 
 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 
 * OF THE POSSIBILITY OF SUCH DAMAGE.
 */
package fr.emse.gitlab.saref.managers;

import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.query.Dataset;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.apache.jena.sparql.engine.binding.BindingHashMap;
import org.apache.jena.sparql.util.Context;
import org.eclipse.jgit.api.Git;
import org.slf4j.Logger;

import fr.emse.ci.sparqlext.SPARQLExt;
import fr.emse.ci.sparqlext.engine.PlanFactory;
import fr.emse.ci.sparqlext.engine.RootPlan;
import fr.emse.ci.sparqlext.stream.LookUpRequest;
import fr.emse.ci.sparqlext.stream.SPARQLExtStreamManager;
import fr.emse.ci.sparqlext.utils.ContextUtils;
import fr.emse.ci.sparqlext.utils.VarUtils;
import fr.emse.gitlab.saref.SAREF;
import fr.emse.gitlab.saref.SAREFErrorLogger;
import fr.emse.gitlab.saref.SAREFPipeline;
import fr.emse.gitlab.saref.SAREFPipeline.Mode;
import fr.emse.gitlab.saref.SAREFPipelineException;
import fr.emse.gitlab.saref.entities.SAREFExample;
import fr.emse.gitlab.saref.entities.SAREFProject;
import fr.emse.gitlab.saref.entities.SAREFRepository;
import fr.emse.gitlab.saref.entities.SAREFTerm;
import fr.emse.gitlab.saref.entities.SAREFVersion;
import fr.emse.gitlab.saref.utils.StreamManagerFactory;

public class SiteManager extends SAREFErrorLogger {

	private static final String DOC_BASE = SAREF.BASE + "documentation/";
	private static final String TERM_QUERY = DOC_BASE + "term/main.rqg";
	private static final Var VAR_TERM = VarUtils.allocVar("term");
	private static final String ONTO_QUERY = DOC_BASE + "ontology/main.rqg";
	private static final Var VAR_VERSION_IRI = VarUtils.allocVar("versionIRI");
	private static final String EXAMPLE_QUERY = DOC_BASE + "example/main.rqg";
	private static final Var VAR_EXAMPLE = VarUtils.allocVar("example");

	private static final String NAME_REPORT_HTML = "report.html";

	private static final String HTACCESS_PATH = "RewriteCond %{REQUEST_URI} ^(.*/)?[^/]*$\n";
	private static final SPARQLExtStreamManager STREAM_MANAGER_BASE = StreamManagerFactory.get();
Maxime Lefrançois's avatar
Maxime Lefrançois committed
	public final File siteDir;
	public final File sourcesDir;
	private final File reportFileHTML;
	private final RootPlan planForTerm;
	private final RootPlan planForOntologyVersion;
	private final RootPlan planForExample;

	private static enum MESSAGE {
		prepare_target_error, prepare_portal_error;
	}

	public SiteManager(SAREFPipeline pipeline, Logger errorLogger) throws IOException {
		super(pipeline, errorLogger);
		siteDir = new File(pipeline.targetDir, SAREF.NAME_SITE);
		sourcesDir = new File(pipeline.targetDir, SAREF.NAME_SOURCES);
		
		reportFileHTML = new File(siteDir, NAME_REPORT_HTML);

		String query = IOUtils.toString(STREAM_MANAGER_BASE.open(new LookUpRequest(TERM_QUERY, SPARQLExt.MEDIA_TYPE)),
				StandardCharsets.UTF_8);
		planForTerm = PlanFactory.create(query, DOC_BASE);

		query = IOUtils.toString(STREAM_MANAGER_BASE.open(new LookUpRequest(ONTO_QUERY, SPARQLExt.MEDIA_TYPE)),
				StandardCharsets.UTF_8);
		planForOntologyVersion = PlanFactory.create(query, DOC_BASE);

		query = IOUtils.toString(STREAM_MANAGER_BASE.open(new LookUpRequest(EXAMPLE_QUERY, SPARQLExt.MEDIA_TYPE)),
				StandardCharsets.UTF_8);
		planForExample = PlanFactory.create(query, DOC_BASE);
	}

	/**
	 * Prepare the site folder and clone the saref-portal-static project repository.
	 */
	public void prepareSite() throws SAREFPipelineException {
		if(pipeline.mode == Mode.DEVELOP || pipeline.mode == Mode.RELEASE) {
			File portalSourcesDir = new File(sourcesDir, SAREF.NAME_SOURCES_PORTAL);
			try (Git git = Git.cloneRepository().setURI(SAREF.SAREF_PORTAL_STATIC_GIT).setDirectory(portalSourcesDir).call()) {
				File portal = new File(portalSourcesDir, SAREF.NAME_SOURCES_PORTAL);
				FileUtils.copyDirectory(portal, siteDir);
			} catch (Exception ex) {
				try (Git git = Git.open(portalSourcesDir)) {
					git.pull().call();
					File portal = new File(portalSourcesDir, SAREF.NAME_SOURCES_PORTAL);
					FileUtils.copyDirectory(portal, siteDir);
				} catch (Exception e) {
					String msg = getMessage(MESSAGE.prepare_target_error, SAREF.SAREF_PORTAL_STATIC_GIT);
					logError(msg, e);
					throw new SAREFPipelineException(msg, e);
				}
			}
		} else {
			try {
				File portal = new File(pipeline.directory, SAREF.NAME_SOURCES_PORTAL);
				FileUtils.copyDirectory(portal, siteDir);
			} catch (Exception e) {
				// should have a folder portal
				String msg = getMessage(MESSAGE.prepare_target_error, SAREF.SAREF_PORTAL_STATIC_GIT);
				throw new SAREFPipelineException(msg, e);
			}
		}
	}

	public void writeReport(StringWriter sw) throws IOException {
		try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(reportFileHTML));) {
			boolean debugTemplate = pipeline.mode == Mode.DEVELOP;
			Context context = ContextUtils.build(writer).setBase(SAREF.BASE).setDebugTemplate(debugTemplate)
					.setStreamManager(STREAM_MANAGER_BASE).build();
			BindingHashMap binding = new BindingHashMap();
			Var varTestSuites = VarUtils.allocVar("testsuites");
			binding.add(varTestSuites, NodeFactory.createLiteral(sw.toString()));
			List<Binding> bindings = new ArrayList<>();
			bindings.add(binding);
			String query = IOUtils.toString(
					STREAM_MANAGER_BASE.open(new LookUpRequest("documentation/report/main.rqg", SPARQLExt.MEDIA_TYPE)),
					StandardCharsets.UTF_8);
			RootPlan reportPlan = PlanFactory.create(query, DOC_BASE);
			reportPlan.execTemplateStream(bindings, context);
		} catch (IOException ex) {
			throw ex;
	public void generateOntologyDocumentation(SAREFVersion version, IndentedWriter writer,
			SPARQLExtStreamManager streamManager, Dataset dataset) {
		Resource resource = version.getResource();
		generateHTML(planForOntologyVersion, VAR_VERSION_IRI, resource, writer, streamManager, dataset);
	public void generateExampleDocumentation(SAREFExample example, IndentedWriter writer,
			SPARQLExtStreamManager streamManager, Dataset dataset) {
		Resource resource = example.getResource();
		generateHTML(planForExample, VAR_EXAMPLE, resource, writer, streamManager, dataset);
	}

	public void generateTermDocumentation(SAREFTerm term, IndentedWriter writer, Dataset dataset) {
		Resource resource = term.getResource();
		generateHTML(planForTerm, VAR_TERM, resource, writer, STREAM_MANAGER_BASE, dataset);
	}

	private void generateHTML(RootPlan plan, Var var, Resource resource, IndentedWriter writer,
			SPARQLExtStreamManager streamManager, Dataset dataset) {
		Context context = ContextUtils.build(writer).setBase(SAREF.BASE).setDebugTemplate(pipeline.mode == Mode.DEVELOP)
				.setInputDataset(dataset).setStreamManager(streamManager).build();
		BindingHashMap binding = new BindingHashMap();
		binding.add(var, resource.asNode());
		List<Binding> bindings = new ArrayList<>();
		bindings.add(binding);
		plan.execTemplateStream(bindings, context);
	}
	public static SPARQLExtStreamManager getStreamManagerBase() {
		return STREAM_MANAGER_BASE;
	}

	public void generateHtaccess() {
		File htaccess = new File(siteDir, ".htaccess");
		try (FileWriter writer = new FileWriter(htaccess)) {
			writer.write("RewriteRule ^sources/(.*)$ https://forge.etsi.org/rep/SAREF/$1 [R=302]\n\n");
			writer.write(
					"Header set Content-disposition \"expr=attachement; filename=%{resp:Content-Location}\" \"expr=%{resp:Content-Location} =~ /.(rdf|nt|n3)$/\"\n");
			writer.write(
					"Header set Content-disposition \"expr=inline; filename=%{resp:Content-Location}\" \"expr=  %{resp:Content-Location} =~ /.+/  && !( %{resp:Content-Location} =~ /.(rdf|nt|n3)$/ )\"\n\n");
			if (pipeline.getSourcesManager().getTargetRepositoryManager() != null) {
				writeHtaccess(writer, pipeline.getSourcesManager().getTargetRepositoryManager());
			}
			for (RepositoryManager repositoryManager : pipeline.getSourcesManager().getSourceRepositoryManagers()) {
				writeHtaccess(writer, repositoryManager);
			}
		} catch (IOException ex) {
			ex.printStackTrace();
		}
	}

	private void writeHtaccess(FileWriter writer, RepositoryManager repositoryManager) throws IOException {
		SAREFRepository repository = repositoryManager.getRepository();
		SAREFProject project = repository.getProject();
		if(repository.getVersions().isEmpty()) {
			return;
		}
		SAREFVersion lastVersion = repository.getVersions().lastEntry().getValue();
		// redirects core/ to core/v3.1.1/
		writer.write(HTACCESS_PATH);
		writer.write(
				String.format("RewriteRule ^%s/$ %%1%s/ [R=302]\n", project.getPath(), lastVersion.getVersionName()));
		// redirects core.ttl to core/v1.1.1.ttl
		writer.write(HTACCESS_PATH);
		writer.write(String.format("RewriteRule ^%s\\.([^\\./]+)$ %%1%s/%s.$1 [R=302]\n\n", project.getPath(),
				lastVersion.getVersionPath(), project.getOntologyFileName()));
		for (SAREFVersion version : repository.getVersions().values()) {
			// redirects core/v3.1.1/ to core/v3.1.1/saref.conneg
			writer.write(HTACCESS_PATH);
			writer.write(String.format("RewriteRule ^%s/$ %%1%s\n", version.getVersionPath(),
					project.getOntologyFileName()));
			// redirects core/v1.1.1.ttl to core/v1.1.1/saref.ttl
			writer.write(HTACCESS_PATH);
			writer.write(String.format("RewriteRule ^%s\\.([^\\./]+)$ %%1%s/%s.$1  [R=302]\n", version.getVersionPath(),
					version.getVersionName(), project.getOntologyFileName()));
			writer.write("\n");
			
			for(SAREFExample example : version.getExamples().values()) {
				writer.write("RewriteCond %{REQUEST_URI} ^(.*)?/[^/]*$\n");
				writer.write(String.format("RewriteRule ^%s/example/%s.([^/]+)/(.*)$ %%1 [R=303]\n", version.getVersionPath(), example.getName()));
				writer.write("\n");
			}
		}
		writer.write("\n");
	}