Commit 853318e9 authored by Gradl, Tobias's avatar Gradl, Tobias
Browse files

Merged build.gradle

parents e523bf10 b404d1c1
Pipeline #25462 passed with stages
in 4 minutes and 18 seconds
......@@ -9,7 +9,7 @@ allprojects {
apply plugin: 'eclipse'
group = 'eu.dariah.de.minfba'
version = '4.1.3-SNAPSHOT'
version = '4.3-SNAPSHOT'
repositories {
mavenLocal()
......@@ -18,10 +18,10 @@ allprojects {
}
}
ext {
coreVersion = "6.4-SNAPSHOT"
gtfVersion = "2.2.1-SNAPSHOT"
processingVersion = "4.3-SNAPSHOT"
colregModelVersion = "4.3.4-RELEASE"
coreVersion = "6.5.1-SNAPSHOT"
gtfVersion = "2.3.2-SNAPSHOT"
processingVersion = "4.3.3-SNAPSHOT"
colregModelVersion = "4.4.1-SNAPSHOT"
dariahSpVersion = "2.1.7-RELEASE"
jsonAssertVersion = "1.5.0"
......@@ -44,7 +44,8 @@ allprojects {
repoPass = project.hasProperty('nexuspass') ? project.getProperty('nexuspass') : ''
// Filled dynamically by packaging tasks
debFile = ""
primaryDebFile = ""
alternativeDebFile = ""
rpmFile = ""
aptRepo = ""
}
......
......@@ -11,6 +11,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import de.unibamberg.minf.core.web.localization.LocaleConverter;
......@@ -68,7 +69,7 @@ public class CollectionSyncClient extends BaseApiClientImpl<CollectionApiPojo, E
String result = restTemplate.getForObject(String.format(this.getFetchDetailsUrl(), id), String.class);
CollectionApiResultPojo<ExtendedCollectionApiPojo> rPojo = apiObjectMapper.readValue(result, new TypeReference<CollectionApiResultPojo<ExtendedCollectionApiPojo>>() {});
if (rPojo.getContent()!=null && rPojo.getContent().size()>0) {
if (rPojo.getContent()!=null && !rPojo.getContent().isEmpty()) {
return rPojo.getContent().iterator().next();
}
return null;
......@@ -162,6 +163,7 @@ public class CollectionSyncClient extends BaseApiClientImpl<CollectionApiPojo, E
cCurrent.setNames(cImported.getNames());
cCurrent.setModified(cImported.getModified());
cCurrent.setUpdatePeriod(cImported.getUpdatePeriod());
cCurrent.setCollectionMetadata(cImported.getCollectionMetadata());
cCurrent.setUpdate(true);
this.mergeEndpoints(cCurrent, cImported);
......@@ -257,6 +259,13 @@ public class CollectionSyncClient extends BaseApiClientImpl<CollectionApiPojo, E
ExtendedCollectionApiPojo fetchedCollection = this.fetchDetails(foreignEntityId);
Collection convertedCollection = new Collection();
try {
convertedCollection.setCollectionMetadata(apiObjectMapper.writeValueAsString(fetchedCollection));
} catch (JsonProcessingException e) {
logger.error("Failed to JSON serialized collection metadata");
}
if (fetchedCollection.getTitles()!=null) {
convertedCollection.setNames(new HashMap<String, String>());
String languageCode;
......
......@@ -12,8 +12,10 @@ import org.springframework.web.bind.annotation.ResponseBody;
import de.unibamberg.minf.core.web.controller.BaseTranslationController;
import de.unibamberg.minf.core.web.pojo.ModelActionPojo;
import eu.dariah.de.search.crawling.CrawlManager;
import eu.dariah.de.search.model.Collection;
import eu.dariah.de.search.model.Crawl;
import eu.dariah.de.search.model.Endpoint;
import eu.dariah.de.search.service.CollectionService;
import eu.dariah.de.search.service.CrawlService;
import eu.dariah.de.search.service.EndpointService;
import eu.dariah.de.search.service.DatamodelService;
......@@ -25,6 +27,7 @@ public class CrawlController extends BaseTranslationController {
@Autowired private CrawlService crawlService;
@Autowired private CrawlManager crawlManager;
@Autowired private EndpointService endpointService;
@Autowired private CollectionService collectionService;
public CrawlController() {
super("crawls");
......@@ -54,9 +57,10 @@ public class CrawlController extends BaseTranslationController {
ModelActionPojo result = new ModelActionPojo(true);
Crawl cBase = crawlService.findById(crawlId);
Endpoint ep = endpointService.findById(cBase.getCollectionId(), cBase.getEndpointId());
Collection c = collectionService.findById(cBase.getCollectionId());
if (cBase!=null) {
crawlManager.performOfflineCrawl(ep, schemaService.findById(cBase.getDatamodelId()), cBase.getId());
crawlManager.performOfflineCrawl(c, ep, schemaService.findById(cBase.getDatamodelId()), cBase.getId());
}
return result;
}
......
package eu.dariah.de.search.controller.search;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dariah.de.search.exceptions.QueryExecutionException;
import eu.dariah.de.search.query.ExtendedQueryImpl;
import eu.dariah.de.search.query.Query;
import eu.dariah.de.search.query.SimpleQueryImpl;
import eu.dariah.de.search.query.execution.QueryExecutionServiceImpl;
import eu.dariah.de.search.query.meta.SruQueryExecutionServiceImpl;
public abstract class BaseQueryExecutionController {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired protected ObjectMapper objMapper;
@Autowired protected SruQueryExecutionServiceImpl sruQueryExecutionService;
protected Query deserializeQuery(String jsonQuery) throws QueryExecutionException {
Query query = null;
try {
JsonNode node = objMapper.readValue(jsonQuery, JsonNode.class);
// For wrapped queries
if (node.has("query")) {
node = node.get("query");
}
if (node.has("queryString")) {
query = objMapper.treeToValue(node, SimpleQueryImpl.class);
} else {
query = objMapper.treeToValue(node, ExtendedQueryImpl.class);
}
return query;
} catch (IOException e) {
throw new QueryExecutionException("Failed to deserialize provided query", e);
}
}
@Autowired protected QueryExecutionServiceImpl queryExecutionService;
}
......@@ -5,6 +5,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
......@@ -15,6 +16,9 @@ import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
......@@ -34,9 +38,14 @@ import de.unibamberg.minf.processing.output.xml.XmlOutputService;
import eu.dariah.de.search.Constants.ResultElementRelationTypes;
import eu.dariah.de.search.Constants.RootElementKeys;
import eu.dariah.de.search.controller.BaseController;
import eu.dariah.de.search.exceptions.QueryExecutionException;
import eu.dariah.de.search.model.Collection;
import eu.dariah.de.search.model.ExtendedDatamodelContainer;
import eu.dariah.de.search.query.Query;
import eu.dariah.de.search.query.execution.ItemService;
import eu.dariah.de.search.query.execution.QueryExecutionServiceImpl;
import eu.dariah.de.search.query.results.FieldHighlight;
import eu.dariah.de.search.query.results.QueryResult;
import eu.dariah.de.search.query.results.ResultElement;
import eu.dariah.de.search.service.CollectionService;
import eu.dariah.de.search.service.DatamodelService;
......@@ -49,6 +58,7 @@ public class ItemController extends BaseController {
@Autowired private CollectionService collectionService;
@Autowired private DatamodelService datamodelService;
@Autowired private ItemService itemService;
@Autowired protected QueryExecutionServiceImpl queryExecutionService;
@Value("${datamodels.integration}")
protected String integrationModelEntityId;
......@@ -62,6 +72,11 @@ public class ItemController extends BaseController {
@GetMapping(value = "/")
public String getItem(@PathVariable String type, @PathVariable String itemId, Model model, Locale locale, HttpServletResponse response) throws IOException {
return this.postItem(type, itemId, null, model, locale, response);
}
@PostMapping(value = "/")
public String postItem(@PathVariable String type, @PathVariable String itemId, @RequestParam(required= false, name="query") String jsonQuery, Model model, Locale locale, HttpServletResponse response) throws IOException {
model.addAttribute("itemId", itemId);
if (itemId==null || type==null) {
return ITEM_404_VIEW;
......@@ -71,11 +86,30 @@ public class ItemController extends BaseController {
return ITEM_404_VIEW;
}
JsonNode itemSource = itemService.getItem(datamodel.getIndexName(), itemId, locale, false, false);
Query query = null;
if (jsonQuery!=null && !jsonQuery.isBlank()) {
try {
query = queryExecutionService.deserializeQuery(jsonQuery);
} catch (QueryExecutionException e) {
logger.error("Failed to deserialize query", e);
}
}
JsonNode itemSource = null;
ResultElement item = null;
if (itemSource!=null) {
if (query!=null) {
query.setItemId(itemId);
QueryResult qr = queryExecutionService.executeQuery(query, locale);
item = qr.getResultElements().get(0);
itemSource = item.getSource();
}
if (item==null) {
itemSource = itemService.getItem(datamodel.getIndexName(), itemId, locale, false, false);
item = itemService.renderResultElement(itemSource, datamodel.getIndexName(), itemId, locale);
}
}
if (item==null) {
return ITEM_404_VIEW;
}
......@@ -114,6 +148,14 @@ public class ItemController extends BaseController {
model.addAttribute("endpointId", item.getEndpointId());
model.addAttribute("datamodel", datamodel);
model.addAttribute("item", item);
if (item.getFieldHighlights()!=null) {
model.addAttribute("highlights", item.getFieldHighlights().stream()
.flatMap(f -> f.getHighlightTexts().stream())
.distinct()
.collect(Collectors.toList()));
}
return "item/view";
}
......@@ -129,7 +171,7 @@ public class ItemController extends BaseController {
@GetMapping(value = "/data")
public @ResponseBody String getItem(@PathVariable String type, @PathVariable String itemId, @RequestParam(name="type") String dataType, Model model, Locale locale, HttpServletResponse response) throws IOException {
public @ResponseBody String getData(@PathVariable String type, @PathVariable String itemId, @RequestParam(name="type") String dataType, Model model, Locale locale, HttpServletResponse response) throws IOException {
ExtendedDatamodelContainer datamodel = datamodelService.findById(type);
JsonNode itemSource = itemService.getItem(datamodel.getIndexName(), itemId, locale, dataType.equals(RootElementKeys.INTEGRATIONS.toString()), dataType.equals(RootElementKeys.CONTENT.toString()));
ResultElement item = itemService.renderResultElement(itemSource, datamodel.getIndexName(), itemId, locale);
......
......@@ -36,7 +36,7 @@ public class MetaqueryExecutionController extends BaseQueryExecutionController {
@PostMapping
public @ResponseBody List<QueryResult> queryAsPost(@RequestBody String q, HttpServletResponse response, Locale locale) throws QueryExecutionException {
Query query = this.deserializeQuery(q);
Query query = queryExecutionService.deserializeQuery(q);
List<QueryResult> results = new ArrayList<>();
//results.addAll(sruQueryExecutionService.executeQuery(query, locale));
......
......@@ -16,6 +16,7 @@ import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
......@@ -32,13 +33,11 @@ import eu.dariah.de.search.service.DatamodelService;
@Controller
@RequestMapping("/query")
public class QueryExecutionController extends BaseQueryExecutionController {
@Autowired private QueryExecutionServiceImpl queryExecutionService;
public class QueryExecutionController extends BaseQueryExecutionController {
@Autowired private AggregationService aggregationService;
@Autowired private CustomSearchService customSearchService;
@Autowired protected DatamodelService datamodelService;
@Autowired protected ObjectMapper objMapper;
@GetMapping
public @ResponseBody List<QueryResult> queryAsGet(@RequestParam String q, HttpServletResponse response, Locale locale) {
......@@ -56,7 +55,7 @@ public class QueryExecutionController extends BaseQueryExecutionController {
@PostMapping
public @ResponseBody List<QueryResult> queryAsPost(@RequestBody String q, HttpServletResponse response, Locale locale) throws QueryExecutionException {
Query query = this.deserializeQuery(q);
Query query = queryExecutionService.deserializeQuery(q);
List<QueryResult> results = new ArrayList<>();
if ((query.getSourceIds()==null || query.getSourceIds().isEmpty()) && query.getCustomSearch()!=null && !query.getCustomSearch().isEmpty()) {
List<CustomSearch> cs = customSearchService.findByPrefix(query.getCustomSearch());
......@@ -80,7 +79,7 @@ public class QueryExecutionController extends BaseQueryExecutionController {
@PostMapping(value="/tags/")
public @ResponseBody JsonNode getTags(@RequestBody String q, HttpServletResponse response) throws QueryExecutionException {
Query query = this.deserializeQuery(q);
Query query = queryExecutionService.deserializeQuery(q);
ObjectNode result = objMapper.createObjectNode();
ArrayNode sourceIds = objMapper.createArrayNode();
for (String sourceId : query.getSourceIds()) {
......
......@@ -8,7 +8,7 @@ import eu.dariah.de.search.model.Endpoint;
import eu.dariah.de.search.model.ExtendedDatamodelContainer;
public interface CrawlManager extends ProcessingListener {
public void performOfflineCrawl(Endpoint ep, ExtendedDatamodelContainer sc, String baseCrawlId);
public void performOfflineCrawl(Collection collection, Endpoint ep, ExtendedDatamodelContainer sc, String baseCrawlId);
public void performOnlineCrawl(Collection ds, Endpoint ep, ExtendedDatamodelContainer sc);
public CrawlState getCrawlState(String crawlId);
......
......@@ -20,6 +20,11 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.MissingNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import de.unibamberg.minf.processing.exception.GenericProcessingException;
import de.unibamberg.minf.processing.exception.ProcessingConfigException;
import de.unibamberg.minf.processing.service.base.ProcessingService.ProcessingServiceStates;
......@@ -43,7 +48,7 @@ public class CrawlManagerImpl implements CrawlManager, ApplicationContextAware,
@Autowired protected CrawlService crawlService;
@Autowired protected CollectionService collectionService;
@Autowired private ObjectMapper objectMapper;
private String baseDownloadPath;
......@@ -122,13 +127,26 @@ public class CrawlManagerImpl implements CrawlManager, ApplicationContextAware,
if (modified) {
collectionService.saveCollection(collection);
}
this.performCrawl(crawlService.createOnlineCrawl(collection.getId(), endpoint.getId(), datamodel.getId()), endpoint, datamodel);
this.performCrawl(crawlService.createOnlineCrawl(collection.getId(), endpoint.getId(), datamodel.getId()), endpoint, datamodel, this.getSessionData(collection));
}
@Override
public void performOfflineCrawl(Endpoint endpoint, ExtendedDatamodelContainer datamodel, String baseCrawlId) {
this.performCrawl(crawlService.createOfflineCrawl(baseCrawlId), endpoint, datamodel);
public void performOfflineCrawl(Collection collection, Endpoint endpoint, ExtendedDatamodelContainer datamodel, String baseCrawlId) {
this.performCrawl(crawlService.createOfflineCrawl(baseCrawlId), endpoint, datamodel, this.getSessionData(collection));
}
private JsonNode getSessionData(Collection collection) {
if (collection.getCollectionMetadata()==null || collection.getCollectionMetadata().isBlank()) {
return MissingNode.getInstance();
}
try {
ObjectNode cNode = objectMapper.createObjectNode();
cNode.set("collection", objectMapper.readTree(collection.getCollectionMetadata()));
return cNode;
} catch (Exception e) {
logger.error("Failed to read collection metadata to session data", e);
return MissingNode.getInstance();
}
}
@Override
......@@ -192,14 +210,14 @@ public class CrawlManagerImpl implements CrawlManager, ApplicationContextAware,
}
}
private void performCrawl(Crawl crawl, Endpoint endpoint, ExtendedDatamodelContainer datamodel) {
private void performCrawl(Crawl crawl, Endpoint endpoint, ExtendedDatamodelContainer datamodel, JsonNode sessionData) {
if (crawl==null || endpoint==null || datamodel==null) {
logger.warn("Could not create crawl pipeline. Either crawl, endpoint or datamodel were unset");
return;
}
try {
MDC.put("uid", crawl.getId());
CrawlPipeline pipeline = this.createPipeline(endpoint, datamodel, crawl);
CrawlPipeline pipeline = this.createPipeline(endpoint, datamodel, crawl, sessionData);
if (pipeline!=null) {
this.enqueue(pipeline, crawl);
}
......@@ -213,7 +231,7 @@ public class CrawlManagerImpl implements CrawlManager, ApplicationContextAware,
}
}
private CrawlPipeline createPipeline(Endpoint ep, ExtendedDatamodelContainer sc, Crawl c) throws ProcessingConfigException, GenericProcessingException, IOException {
private CrawlPipeline createPipeline(Endpoint ep, ExtendedDatamodelContainer sc, Crawl c, JsonNode sessionData) throws ProcessingConfigException, GenericProcessingException, IOException {
String access = null;
String file = null;
for (AccessMethods mAv : AccessMethods.values()) {
......@@ -240,7 +258,7 @@ public class CrawlManagerImpl implements CrawlManager, ApplicationContextAware,
return null;
}
CrawlingExecutionContext ctx = new CrawlingExecutionContext(this.baseDownloadPath, c);
CrawlingExecutionContext ctx = new CrawlingExecutionContext(this.baseDownloadPath, c, sessionData);
List<Crawler> crawlers = this.getCrawlers(access, file, c.getBaseCrawlId()==null);
ResourceIndexingServiceImpl indexer;
for (Crawler crawler : crawlers) {
......
......@@ -10,11 +10,13 @@ import java.util.concurrent.locks.ReentrantLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.unibamberg.minf.gtf.context.ExecutionContext;
import de.unibamberg.minf.processing.exception.GenericProcessingException;
import de.unibamberg.minf.processing.exception.ProcessingConfigException;
import de.unibamberg.minf.processing.listener.ProcessingListener;
import eu.dariah.de.search.crawling.crawler.Crawler;
import net.bytebuddy.asm.Advice.This;
public class CrawlPipelineImpl implements CrawlPipeline {
protected static final Logger logger = LoggerFactory.getLogger(CrawlPipelineImpl.class);
......@@ -25,6 +27,7 @@ public class CrawlPipelineImpl implements CrawlPipeline {
private String crawlId;
private ExecutorService pipelineExecutor = Executors.newSingleThreadExecutor();
private ReentrantLock lock = new ReentrantLock();
private ExecutionContext executionContext;
private ProcessingServiceStates state = ProcessingServiceStates.WAITING;
private long stageSize = 0;
......@@ -47,6 +50,9 @@ public class CrawlPipelineImpl implements CrawlPipeline {
@Override public ProcessingListener getListener() { return this.listener; }
@Override public void setListener(ProcessingListener listener) { this.listener = listener; }
@Override public ExecutionContext getExecutionContext() { return executionContext; }
@Override public void setExecutionContext(ExecutionContext executionContext) { this.executionContext = executionContext; }
@Override public boolean isCancellationRequested() { return cancellationRequested; }
public CrawlPipelineImpl(String crawlId, List<Crawler> runnables) throws GenericProcessingException {
......@@ -55,7 +61,7 @@ public class CrawlPipelineImpl implements CrawlPipeline {
}
this.uuid = UUID.randomUUID();
this.runnablesMap = new LinkedHashMap<UUID, Crawler>();
this.runnablesMap = new LinkedHashMap<>();
this.crawlId = crawlId;
try {
for (Crawler svc : runnables) {
......
......@@ -229,7 +229,7 @@ public class TimedCrawlManagerImpl extends CrawlManagerImpl implements TimedCraw
if (dataset.getId().equals(datamodel.getId())) {
String baseCrawlId = this.getCompletedOnlineCrawlId(endpoint.getId(), dataset.getId());
if (baseCrawlId!=null) {
this.performOfflineCrawl(endpoint, datamodel, baseCrawlId);
this.performOfflineCrawl(collection, endpoint, datamodel, baseCrawlId);
if (this.debugging) {
logger.debug("");
}
......
......@@ -6,6 +6,8 @@ import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.springframework.util.Assert;
import com.fasterxml.jackson.databind.JsonNode;
import de.unibamberg.minf.gtf.context.ExecutionContext;
import eu.dariah.de.search.model.Crawl;
......@@ -14,17 +16,18 @@ public class CrawlingExecutionContext implements ExecutionContext {
private final String collectionId;
private final String endpointId;
private final String datasetId;
private final JsonNode sessionData;
private final String workingDir;
public String getPathPrefix() { return pathPrefix; }
public String getCollectionId() { return collectionId; }
public String getEndpointId() { return endpointId; }
public String getDatasetId() { return datasetId; }
@Override public JsonNode getSessionData() { return this.sessionData; }
@Override public String getWorkingDir() { return this.workingDir; }
public CrawlingExecutionContext(String pathPrefix, String collectionId, String endpointId, String datasetId) throws IOException {
public CrawlingExecutionContext(String pathPrefix, String collectionId, String endpointId, String datasetId, JsonNode sessionData) throws IOException {
Assert.notNull(pathPrefix);
Assert.notNull(collectionId);
Assert.notNull(endpointId);
......@@ -33,7 +36,7 @@ public class CrawlingExecutionContext implements ExecutionContext {
this.collectionId = collectionId;
this.endpointId = endpointId;
this.datasetId = datasetId;
this.sessionData = sessionData;
this.workingDir = pathPrefix + File.separator + this.getCollectionId() + File.separator + this.getEndpointId() + File.separator + this.getDatasetId() + File.separator;
File workingDir = new File(this.workingDir);
......@@ -41,7 +44,7 @@ public class CrawlingExecutionContext implements ExecutionContext {
FileUtils.forceMkdir(new File(this.workingDir));
}
}
public CrawlingExecutionContext(String string, Crawl c) throws IOException {
this(string, c.getCollectionId(), c.getEndpointId(), c.getDatamodelId());
public CrawlingExecutionContext(String string, Crawl c, JsonNode sessionData) throws IOException {
this(string, c.getCollectionId(), c.getEndpointId(), c.getDatamodelId(), sessionData);
}
}
\ No newline at end of file
......@@ -17,7 +17,7 @@ public class UserDaoImpl extends BaseMongoDaoImpl<User> implements UserDao {
public User findByUsername(String domain, String username) {
Query q = new Query();
q.addCriteria(Criteria.where("username").is(username));
q.addCriteria(Criteria.where("endpointId").is(domain));
q.addCriteria(Criteria.where("issuer").is(domain));
return this.findOne(q);
}
}
......@@ -4,12 +4,17 @@ import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.EnumMap;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Component;
import de.unibamberg.minf.dme.model.base.Grammar;
import de.unibamberg.minf.dme.model.grammar.AuxiliaryFile;
import de.unibamberg.minf.dme.model.grammar.GrammarContainer;
import de.unibamberg.minf.dme.model.grammar.AuxiliaryFile.FileTypes;
import de.unibamberg.minf.gtf.compilation.GrammarCompiler;
import de.unibamberg.minf.gtf.exceptions.GrammarProcessingException;
import lombok.extern.slf4j.Slf4j;
......@@ -32,6 +37,9 @@ public class GrammarDaoImpl extends BaseFsDao implements GrammarDao, Initializin
FileUtils.deleteDirectory(new File(this.getGrammarDirectory(grammarId)));
}
/*
* TODO: refactor to a common method somewhere in gtf (@see de.unibamberg.minf.dme.service.GrammarServiceImpl)
*/
@Override
public boolean saveGrammar(GrammarContainer gc) throws IOException {
if (gc.getParserGrammar()==null || gc.getParserGrammar().trim().isEmpty()) {
......@@ -51,20 +59,38 @@ public class GrammarDaoImpl extends BaseFsDao implements GrammarDao, Initializin
}
Files.createDirectories(Paths.get(dirPath));
if (gc.getLexerGrammar()!=null && !gc.getLexerGrammar().trim().isEmpty()) {
gc.setLexerGrammar("lexer grammar " + gc.getId() + "Lexer;\n\n" + gc.getLexerGrammar());
Files.write(Paths.get(filePathPrefix + "Lexer.g4"), gc.getLexerGrammar().getBytes());
String lexerGrammar = gc.getLexerGrammar();
String parserGrammar = gc.getParserGrammar();
Map<FileTypes, String> fileTypeNameMap = new EnumMap<>(FileTypes.class);
if (gc.getAuxiliaryFiles()!=null) {
String content;
for (AuxiliaryFile f : gc.getAuxiliaryFiles()) {
content = f.getContent().replace("{LEXER}", gc.getId() + "Lexer").replace("{PARSER}", gc.getId() + "Parser");
Files.write(Paths.get(dirPath + f.getFileType().getFileName()), content.getBytes());
fileTypeNameMap.put(f.getFileType(), f.getFileType().getFileName().substring(0, f.getFileType().getFileName().indexOf('.')));
}
}
if (lexerGrammar!=null && !lexerGrammar.trim().isEmpty()) {
if (fileTypeNameMap.containsKey(FileTypes.LEXER_SUPERCLASS)) {
lexerGrammar = "options { superClass= " + fileTypeNameMap.get(FileTypes.LEXER_SUPERCLASS) + "; }\n\n" + lexerGrammar;
}
lexerGrammar = "lexer grammar " + gc.getId() + "Lexer;\n\n" + lexerGrammar;