diff --git a/FdpSettings.json b/FdpSettings.json
new file mode 100644
index 0000000..c21349c
--- /dev/null
+++ b/FdpSettings.json
@@ -0,0 +1,9 @@
+{
+ "forms": {
+ "autocomplete": {
+ "searchNamespace": true,
+ "sources": [
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/Properties.yaml b/Properties.yaml
index 6f0801e..40d0664 100644
--- a/Properties.yaml
+++ b/Properties.yaml
@@ -67,4 +67,4 @@ outputRoot: "C:\\Users\\PatrickDekker(Health\\IdeaProjects\\health-ri-metadata\\
Core\\"
piecesDir: "PiecesShape"
fairDataPointDir: "FairDataPointShape"
-validationDir: "ValidationShape"
+validationDir: "ValidationShape"
\ No newline at end of file
diff --git a/README.md b/README.md
index 460b300..a4e5427 100644
--- a/README.md
+++ b/README.md
@@ -49,7 +49,7 @@ Run the tool with the required configuration file:
-i path to Properties.yaml, you can use relative location (default is ./Properties.yaml) works if the property file is
located at the some location as the jar file.
--u fdp admin user (default: albert.einstein@example.com)
+-u fdpClient admin user (default: albert.einstein@example.com)
-p password (default: password)
diff --git a/pom.xml b/pom.xml
index bb9bc08..7610b29 100644
--- a/pom.xml
+++ b/pom.xml
@@ -72,7 +72,7 @@
ch.qos.logback
logback-core
- 1.5.18
+ 1.5.19
ch.qos.logback
@@ -117,8 +117,6 @@
fr.sparna.rdf.xls2rdf
xls2rdf-pom
3.2.1
-
-
org.apache.poi
@@ -135,6 +133,25 @@
poi-ooxml-schemas
4.1.0
+
+
+ org.mockito
+ mockito-core
+ 5.20.0
+ test
+
+
+
+ org.springframework.boot
+ spring-boot-starter
+ 3.5.0
+
+
+
+ org.modelmapper
+ modelmapper
+ 3.2.6
+
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/FDP.java b/src/main/java/nl/healthri/fdp/uploadschema/FDP.java
deleted file mode 100644
index 7f148c0..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/FDP.java
+++ /dev/null
@@ -1,188 +0,0 @@
-package nl.healthri.fdp.uploadschema;
-
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import nl.healthri.fdp.uploadschema.requestbodies.*;
-import nl.healthri.fdp.uploadschema.requestresponses.*;
-import nl.healthri.fdp.uploadschema.tasks.ResourceUpdateInsertTask;
-import nl.healthri.fdp.uploadschema.tasks.ShapeUpdateInsertTask;
-import nl.healthri.fdp.uploadschema.utils.RequestBuilder;
-import nl.healthri.fdp.uploadschema.utils.ResourceMap;
-import nl.healthri.fdp.uploadschema.utils.ShapesMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.helpers.MessageFormatter;
-
-import java.net.URI;
-import java.net.http.HttpClient;
-import java.time.Duration;
-import java.time.temporal.ChronoUnit;
-import java.util.*;
-import java.util.stream.Collectors;
-
-public class FDP implements AutoCloseable {
- private static final Logger logger = LoggerFactory.getLogger(FDP.class);
-
- private final String url;
- private final ObjectMapper mapper;
- private final HttpClient client;
- private TokenResponse token;
-
- private FDP(URI url) {
- this.url = url.toString();
- this.mapper = new ObjectMapper(new JsonFactory());
- this.client = HttpClient.newBuilder()
- .followRedirects(HttpClient.Redirect.NORMAL)
- .connectTimeout(Duration.of(500, ChronoUnit.MILLIS))
- .build();
- }
-
- public static FDP connectToFdp(URI url, String username, String password) {
- FDP fdp = new FDP(url);
- var info = fdp.request().setUri(fdp.url("actuator/info")).get(FDPInfoResponse.class);
- logger.info("FDP info: {}", info.toString());
-
- var mp = new loginParms(username, password);
- var token = fdp.request().setUri(fdp.url("tokens")).setBody(mp).post(TokenResponse.class);
-
- logger.info("Token received: {}", token);
- fdp.token = token;
-
- return fdp;
- }
-
- private String url(String path) {
- return MessageFormatter.format("{}/{}", url, path).getMessage();
- }
-
- private String url(String path, String uuid) {
- return MessageFormatter.arrayFormat("{}/{}/{}", new Object[]{url, path, uuid}).getMessage();
- }
-
- private Set getParentUID(Set shapes) {
- if (shapes.isEmpty()) {
- return Collections.emptySet();
- }
- var shapesOnFdp = fetchSchemaFromFDP();
- return shapes.stream()
- .map(shapesOnFdp::getUUID).flatMap(Optional::stream)
- .collect(Collectors.toSet());
- }
-
- public RequestBuilder request() {
- return new RequestBuilder(mapper, client);
-
- }
-
- public ShapesMap fetchSchemaFromFDP() {
- logger.info("Fetch schema info from fdp");
- var sp = new SchemaParms(false, true);
- SchemaDataResponse[] schemas = request().setUri(url + "/metadata-schemas", sp)
- .setBody(sp).setToken(token).get(SchemaDataResponse[].class);
- return new ShapesMap(schemas);
- }
-
- public ResourceMap fetchResourceFromFDP() {
- logger.info("Fetch resource info from fdp");
- ResourceResponse[] resources = request().setUri(url + "/resource-definitions")
- .setToken(token).get(ResourceResponse[].class);
-
- return new ResourceMap(resources);
- }
-
- public void insertResource(ResourceUpdateInsertTask task) {
- logger.info("Insert {} resource into the fdp", task.resource);
- ResourceParms RP = new ResourceParms(
- task.resource,
- task.url(),
- new ArrayList<>(List.of(task.shapeUUUID)),
- new ArrayList<>(),
- new ArrayList<>(),
- new ArrayList<>());
- ResourceResponse rer = request().setUri(url("resource-definitions"))
- .setBody(RP)
- .setToken(token).post(ResourceResponse.class);
- task.UUID = rer.uuid();
- }
-
- public void updateResource(ResourceUpdateInsertTask task) {
- logger.info("fetch resource {} from fdp", task.resource);
- var rr = request()
- .setToken(token)
- .setUri(url("resource-definitions", task.UUID))
- .get(ResourceResponse.class);
-
- if (rr.children().stream().anyMatch(c -> c.resourceDefinitionUuid().equals(task.childUUuid))) {
- logger.warn("resource {} already has link to child {}", rr.name(), task.childName);
- } else {
- //FIXME TagsURI is hardcoded..
- var child = new ResourceResponse.Child(task.childUUuid, task.childRelationIri,
- new ResourceResponse.ListView(task.pluralName(), "http://www.w3.org/ns/dcat#themeTaxonomy", new ArrayList<>()));
- rr.children().add(child);
- }
-
- logger.info("update resource {} on the fdp", task.resource);
- request().setToken(token)
- .setUri(url("resource-definitions", task.UUID))
- .setBody(rr)
- .put(ResourceResponse.class);
- }
-
- /**
- * @param t task, with info about the shape to create,
- * when the shapes are created it will update this parameter by setting the UUID!
- */
- public void insertSchema(ShapeUpdateInsertTask t) {
- logger.info("Insert {} shape into the fdp", t.shape);
- EditSchemaParms esp = new EditSchemaParms(t.shape,
- t.description(), false,
- t.model, getParentUID(t.parents),
- t.shape,
- t.url());
-
- SchemaEdit se = request().setUri(url + "/metadata-schemas")
- .setBody(esp)
- .setToken(token)
- .post(SchemaEdit.class);
- t.uuid = se.uuid();
- }
-
- /**
- * update exiting shape on the fdp
- *
- * @param t task, with shape information
- */
-
- public void updateSchema(ShapeUpdateInsertTask t) {
- logger.info("Update {} into the fdp", t.shape);
- EditSchemaParms esp = new EditSchemaParms(t.shape,
- t.description(), false,
- t.model, getParentUID(t.parents),
- t.shape,
- t.url());
-
- //result of request is not needed.
- request().setUri(url + "/metadata-schemas/" + t.uuid + "/draft")
- .setBody(esp)
- .setToken(token)
- .put(SchemaEdit.class);
- }
-
- public void releaseSchema(ShapeUpdateInsertTask t) {
- logger.info("Release {} into the fdp", t.shape);
- ReleaseSchemaParms rsp = ReleaseSchemaParms.of(t.shape,
- false, t.version);
-// result of request is not needed.
- request().setUri(url + "/metadata-schemas/" + t.uuid + "/versions")
- .setBody(rsp)
- .setToken(token)
- .post(SchemaDataResponse.class);
- }
-
- @Override
- public void close() {
- if (client != null) {
- client.close();
- }
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/SchemaTools.java b/src/main/java/nl/healthri/fdp/uploadschema/SchemaTools.java
index 7822524..3518daa 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/SchemaTools.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/SchemaTools.java
@@ -1,10 +1,15 @@
package nl.healthri.fdp.uploadschema;
-import nl.healthri.fdp.uploadschema.tasks.ResourceUpdateInsertTask;
-import nl.healthri.fdp.uploadschema.tasks.ShapeUpdateInsertTask;
-import nl.healthri.fdp.uploadschema.utils.Properties;
-import nl.healthri.fdp.uploadschema.utils.RdfUtils;
-import org.eclipse.rdf4j.model.Model;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import nl.healthri.fdp.uploadschema.config.fdp.Settings;
+import nl.healthri.fdp.uploadschema.integrations.FdpClient;
+import nl.healthri.fdp.uploadschema.integrations.exceptions.FdpClientException;
+import nl.healthri.fdp.uploadschema.services.FdpService;
+import nl.healthri.fdp.uploadschema.services.ResourceTaskService;
+import nl.healthri.fdp.uploadschema.services.SchemaToolService;
+import nl.healthri.fdp.uploadschema.services.ShapeTaskService;
+import nl.healthri.fdp.uploadschema.utils.FileHandler;
+import nl.healthri.fdp.uploadschema.config.fdp.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
@@ -13,11 +18,8 @@
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.ArrayList;
-
-import static java.util.function.Predicate.not;
+import java.net.http.HttpClient;
+import java.time.Duration;
@CommandLine.Command(name = "SchemaTools utility that create FDP ready Shacls and upload them the the FDP.",
mixinStandardHelpOptions = true, version = "SchemaTool v1.0")
@@ -25,6 +27,9 @@ public class SchemaTools implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(SchemaTools.class);
+ @CommandLine.Option(names = {"-s", "--settings"}, defaultValue = "./FdpSettings.json", description = "location of the FdpSettings.json file (default: ${DEFAULT-VALUE})")
+ File settingsFile;
+
@CommandLine.Option(names = {"-i", "--input"}, defaultValue = "./Properties.yaml", description = "location of the Property.yaml file (default: ${DEFAULT-VALUE})")
File propertyFile;
@@ -41,6 +46,9 @@ public class SchemaTools implements Runnable {
@CommandLine.Option(names = {"-c", "--command"}, defaultValue = "both", description = "Valid values: ${COMPLETION-CANDIDATES} (default: ${DEFAULT-VALUE})", converter = CommandEnumConverter.class)
CommandEnum command;
+ @CommandLine.Option(names = {"-f", "--force"}, defaultValue = "false", description = "Force upload even if schema has not changed")
+ boolean force;
+
public static void main(String... args) {
var cmd = new CommandLine(new SchemaTools());
System.exit(cmd.execute(args));
@@ -49,64 +57,44 @@ public static void main(String... args) {
@Override
public void run() {
try {
- final Properties p = Properties.load(propertyFile);
- if (command == CommandEnum.TEMPLATE) {
- //1 read all(!) excel files from folder, write shapes in Pieces directory
- logger.info("reading templates from {} ", p.templateDir);
- for (var e : XlsToRdfUtils.getTemplateFiles(p.templateDir).entrySet()) {
- logger.info(" converting {} ", e.getValue());
- Path path = p.getPiecesDir().resolve(e.getKey() + ".ttl");
- String shacl = XlsToRdfUtils.createShacl(e.getValue());
- Files.write(path, shacl.getBytes());
- }
- //2 merge piece to FairDataPoint dir.
- logger.info("Writing files: {}", p.getFiles().keySet());
- for (var e : p.getFiles(p.getPiecesDir()).entrySet()) {
- Path path = p.getFairDataPointDir().resolve(RdfUtils.schemaToFile(e.getKey()));
- Model m = RdfUtils.readFiles(e.getValue());
- RdfUtils.safeModel(path, m);
- }
- //3 merge all pieces into validation dir.
+ HttpClient client = HttpClient.newBuilder()
+ .connectTimeout(Duration.ofSeconds(10))
+ .build();
- Path path = p.getValidationDir().resolve("HRI-Datamodel-shapes.ttl");
- logger.info("Write validation file {} combining {} files", path, p.getAllFiles().size());
- Model m = RdfUtils.readFiles(new ArrayList<>(p.getAllFiles()));
- RdfUtils.safeModel(path, m);
- } else {
- logger.info("Connecting to FDP at {} as {} ", hostname, username);
+ final ObjectMapper objectMapper = new ObjectMapper();
+ final FdpClient fdpClient = new FdpClient(client, this.hostname, objectMapper);
+ final FdpService fdpService = new FdpService(fdpClient);
- final FDP fdp = FDP.connectToFdp(hostname, username, password);
+ final Properties properties = Properties.load(propertyFile);
- if (command == CommandEnum.SCHEMA || command == CommandEnum.BOTH) {
- //Shapes we want to update/insert
- var shapeTasks = ShapeUpdateInsertTask.createTasks(p, fdp);
+ final ResourceTaskService resourceTaskService = new ResourceTaskService(fdpService, properties);
-// insert new schemas and keep the UUID, this is needed for the "release step"
- shapeTasks.stream().filter(ShapeUpdateInsertTask::isInsert).forEach(fdp::insertSchema);
+ final FileHandler fileHandler = new FileHandler();
+ final ShapeTaskService shapeTaskService = new ShapeTaskService(fdpService, fileHandler, properties);
+ final SchemaToolService schemaToolService = new SchemaToolService(fdpService, resourceTaskService, shapeTaskService, properties, fileHandler);
-// update existing shape, will get status draft.
- shapeTasks.stream().filter(not(ShapeUpdateInsertTask::isInsert)).forEach(fdp::updateSchema);
+ fdpService.authenticate(this.username, this.password);
- //the draft-schema are released,
- shapeTasks.forEach(fdp::releaseSchema);
- }
-
- if (command == CommandEnum.RESOURCE || command == CommandEnum.BOTH) {
- //add resource-descriptions
- var resourceTasks = ResourceUpdateInsertTask.createTask(p, fdp);
- resourceTasks.stream().filter(ResourceUpdateInsertTask::isInsert).forEach(fdp::insertResource);
+ final Settings newFdpSettings = Settings.GetSettings(settingsFile);
+ fdpService.updateSettings(newFdpSettings);
- if (resourceTasks.stream().noneMatch(not(ResourceUpdateInsertTask::isInsert))) {
- logger.warn("Updating resources is not supported yet, but will try to add children if needed)");
- }
-
- //add the previous resources as child to parent.
- var resourceTasksParents = ResourceUpdateInsertTask.createParentTask(p, fdp);
- resourceTasksParents.stream().filter(ResourceUpdateInsertTask::hasChild).forEach(fdp::updateResource);
+ switch (command) {
+ case TEMPLATE -> {
+ schemaToolService.convertTemplatesToShaclShapes();
+ schemaToolService.mergeShapesToFdpSchemas();
+ schemaToolService.mergeShapesForValidation();
+ }
+ case BOTH -> {
+ schemaToolService.createOrUpdateSchemas(force);
+ schemaToolService.addResourceDescriptions();
}
+ case SCHEMA -> schemaToolService.createOrUpdateSchemas(force);
+ case RESOURCE -> schemaToolService.addResourceDescriptions();
}
- } catch (IOException io) {
- throw new RuntimeException(io);
+ } catch (IOException e) {
+ logger.error("Unexpected error: {}", e.getMessage());
+ } catch (FdpClientException e){
+ logger.error("FDP Connection Error: {}", e.getMessage());
}
}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/Properties.java b/src/main/java/nl/healthri/fdp/uploadschema/config/fdp/Properties.java
similarity index 81%
rename from src/main/java/nl/healthri/fdp/uploadschema/utils/Properties.java
rename to src/main/java/nl/healthri/fdp/uploadschema/config/fdp/Properties.java
index 921f5a8..3d9eb31 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/utils/Properties.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/config/fdp/Properties.java
@@ -1,12 +1,14 @@
-package nl.healthri.fdp.uploadschema.utils;
+package nl.healthri.fdp.uploadschema.config.fdp;
import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.exc.StreamReadException;
+import com.fasterxml.jackson.databind.DatabindException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
-import nl.healthri.fdp.uploadschema.Version;
+import nl.healthri.fdp.uploadschema.domain.Version;
import java.io.File;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
@@ -22,16 +24,11 @@ public class Properties {
public final Map resources = new HashMap<>();
public List schemasToPublish;
public String schemaVersion;
-
public String inputDir;
-
public String templateDir;
public String outputRoot;
- @JsonProperty("piecesDir")
public String piecesDir;
- @JsonProperty("fairDataPointDir")
public String fairDataPointDir;
- @JsonProperty("validationDir")
public String validationDir;
public record ResourceProperties(
@@ -41,8 +38,23 @@ public record ResourceProperties(
}
public static Properties load(File file) throws IOException {
- var mapper = new ObjectMapper(new YAMLFactory());
- return mapper.readValue(file, Properties.class);
+ if (!file.exists() || !file.isFile()) {
+ throw new FileNotFoundException("Properties file not found: " + file.getAbsolutePath());
+ }
+
+ try {
+ var mapper = new ObjectMapper(new YAMLFactory());
+ return mapper.readValue(file, Properties.class);
+ } catch (StreamReadException e) {
+ // Malformed YAML (e.g., bad indentation, syntax error)
+ throw new IOException("Failed to read the YAML contents from file: " + file.getAbsolutePath(), e);
+ } catch (DatabindException e) {
+ // Valid YAML, but doesn't mattch the Properties structure
+ throw new IOException("Failed to bind YAML content to Properties object from file: " + file.getAbsolutePath(), e);
+ } catch (IOException e) {
+ // Other IO issues
+ throw new IOException("Error while reading properties from file: " + file.getAbsolutePath(), e);
+ }
}
/**
@@ -99,6 +111,10 @@ public Version getVersion() {
return new Version(schemaVersion);
}
+ public List getSchemasToPublish() {
+ return this.schemasToPublish;
+ }
+
@JsonIgnore
public Path getPiecesDir() {
return Path.of(outputRoot, piecesDir);
@@ -172,5 +188,8 @@ public Set getParents(String child) {
.map(Map.Entry::getKey).collect(Collectors.toSet());
}
+ public Map getResourceProperties() {
+ return this.resources;
+ }
}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/config/fdp/Settings.java b/src/main/java/nl/healthri/fdp/uploadschema/config/fdp/Settings.java
new file mode 100644
index 0000000..008d276
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/config/fdp/Settings.java
@@ -0,0 +1,127 @@
+package nl.healthri.fdp.uploadschema.config.fdp;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsResponseDto;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import nl.healthri.fdp.uploadschema.config.fdp.Settings.Forms.Autocomplete.Source;
+
+public class Settings {
+ private static Settings settings;
+
+ public String clientUrl;
+ public String persistentUrl;
+ public String appTitle;
+ public String appSubtitle;
+ public String appTitleFromConfig;
+ public String appSubtitleFromConfig;
+ public List metadataMetrics;
+ public Ping ping;
+ public Repository repository;
+ public Search search;
+ public Forms forms;
+
+ public static class MetadataMetric {
+ public String metricUri;
+ public String resourceUri;
+
+ }
+
+ public static class Ping {
+ public boolean enabled;
+ public List endpoints;
+ public List endpointsFromConfig;
+ public String interval;
+ }
+
+ public static class Repository {
+ public String type;
+ }
+
+ public static class Search {
+ public List filters;
+ }
+
+ public static class Forms {
+ public Autocomplete autocomplete;
+
+ public static class Autocomplete {
+ public boolean searchNamespace;
+ public List sources;
+
+ public static class Source {
+ public String rdfType;
+ public String sparqlEndpoint;
+ public String sparqlQuery;
+
+ public String getRdfType() {
+ return rdfType;
+ }
+ }
+ }
+ }
+
+ public Settings() {}
+
+ // Always returns the existing settings if already initialized.
+ public static Settings GetSettings(){
+ if(settings == null){
+ throw new NullPointerException("Settings instance is not set");
+ }
+
+ return settings;
+ }
+
+ // Always returns the existing settings if already initialized.
+ public static Settings GetSettings(File file) throws IOException {
+ if(settings == null){
+ if (!file.exists() || !file.isFile()) {
+ throw new FileNotFoundException("Settings file not found: " + file.getAbsolutePath());
+ }
+ ObjectMapper mapper = new ObjectMapper();
+ settings = mapper.readValue(file, Settings.class);
+ }
+
+ return settings;
+ }
+
+
+ // Merges missing sources from new settings into current settings
+ public Settings Merge(Settings newSettings){
+ // Early return if new settings are invalid
+ if (newSettings == null ||
+ newSettings.forms == null ||
+ newSettings.forms.autocomplete == null ||
+ newSettings.forms.autocomplete.sources == null) {
+ return this;
+ }
+
+ List currentSources = this.forms.autocomplete.sources;
+ List newSources = newSettings.forms.autocomplete.sources;
+
+ // Collect existing rdfTypes into a Set for simpler lookup
+ Set existingTypes = currentSources.stream()
+ .map(Source::getRdfType)
+ .filter(Objects::nonNull)
+ .collect(Collectors.toSet());
+
+ // Add sources with rdfTypes not located in currentSources
+ newSources.stream()
+ .filter(s -> s.getRdfType() != null && !existingTypes.contains(s.getRdfType()))
+ .forEach(currentSources::add);
+
+ return this;
+ }
+
+ public static Settings convertToEntity(SettingsResponseDto settingsResponseDTO) {
+ ObjectMapper mapper = new ObjectMapper();
+ return mapper.convertValue(settingsResponseDTO, Settings.class);
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/domain/ResourceTask.java b/src/main/java/nl/healthri/fdp/uploadschema/domain/ResourceTask.java
new file mode 100644
index 0000000..0db6047
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/domain/ResourceTask.java
@@ -0,0 +1,86 @@
+package nl.healthri.fdp.uploadschema.domain;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.regex.Pattern;
+
+public class ResourceTask {
+ public final String resource;
+ public String UUID;
+ public final String shapeUUUID;
+ public String childUUuid;
+ public String childRelationIri;
+ public String childName;
+ public final boolean exists;
+
+ private static final Logger logger = LoggerFactory.getLogger(ResourceTask.class);
+
+ public ResourceTask(String resource, String uuid, String shapeUUUID, boolean exists) {
+ this.resource = resource;
+ this.UUID = uuid;
+ this.shapeUUUID = shapeUUUID;
+ this.exists = exists;
+ }
+
+ public ResourceTask(String resource, String uuid, String shapeUUUID, String childUUuid, String childRelationIri, String childName, boolean exists) {
+ this.resource = resource;
+ this.UUID = uuid;
+ this.shapeUUUID = shapeUUUID;
+ this.childUUuid = childUUuid;
+ this.childRelationIri = childRelationIri;
+ this.childName = childName;
+ this.exists = exists;
+ }
+
+ public void validate() {
+ if (this.resource == null || this.resource.isEmpty()) {
+ logger.error("Invalid: resource is required");
+ }
+ if (this.UUID == null || this.UUID.isEmpty()) {
+ logger.error("Invalid: UUID is required");
+ }
+ if (this.shapeUUUID == null || this.shapeUUUID.isEmpty()) {
+ logger.error("Invalid: shapeUUUID is required");
+ }
+ if (this.childUUuid != null && (this.childRelationIri == null || this.childRelationIri.isEmpty())) {
+ logger.error("Ìnvalid: childRelationIri should be set if childUUuid is provided");
+ }
+ if (this.childRelationIri != null && (this.childUUuid == null || this.childUUuid.isEmpty())) {
+ logger.error("Ìnvalid: childUUuid should be set if childRelationIri is provided");
+ }
+ if (this.childName != null && this.childName.isEmpty()) {
+ logger.error("Ìnvalid: childName is empty");
+ }
+ }
+
+ public String pluralName() {
+ //FIXME shape datasetSeries is already plural form.
+ if (this.childName.toLowerCase().endsWith("ies")) return this.childName;
+
+ // Rule 1: Words ending in consonant + "y" -> replace "y" with "ies"
+ if (Pattern.matches(".*[^aeiou]y$", this.childName)) {
+ return this.childName.replaceAll("y$", "ies");
+ }
+ // Rule 2: Words ending in "s", "x", "z", "ch", or "sh" -> add "es"
+ else if (Pattern.matches(".*(s|x|z|ch|sh)$", this.childName)) {
+ return this.childName + "es";
+ }
+ // Default rule: Just add "s"
+ else {
+ return this.childName + "s";
+ }
+ }
+
+ public String url() {
+ return this.resource.toLowerCase().replaceAll(" ", "");
+ }
+
+ public boolean isInsert() {
+ return !this.exists;
+ }
+
+ public boolean hasChild() {
+ return this.childUUuid != null;
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/domain/ShapeTask.java b/src/main/java/nl/healthri/fdp/uploadschema/domain/ShapeTask.java
new file mode 100644
index 0000000..93d1d8c
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/domain/ShapeTask.java
@@ -0,0 +1,55 @@
+package nl.healthri.fdp.uploadschema.domain;
+
+import nl.healthri.fdp.uploadschema.domain.enums.ShapeStatus;
+import nl.healthri.fdp.uploadschema.utils.SchemaInfo;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+public class ShapeTask {
+ public final String shape;
+ public Version version;
+ public String uuid;
+ public Set parents; // Names of parents for this schema
+ public String model;
+ public ShapeStatus status;
+
+ private static final Logger logger = LoggerFactory.getLogger(ShapeTask.class);
+
+ public ShapeTask(String shape, Version version, String uuid, Set parents, String model, ShapeStatus status) {
+ this.shape = shape;
+ this.version = version;
+ this.uuid = uuid;
+ this.parents = parents;
+ this.model = model;
+ this.status = status;
+ }
+
+ public Set getParentUID(Map schemaMap) {
+ if (this.parents.isEmpty()) {
+ return Collections.emptySet();
+ }
+
+ return this.parents.stream()
+ .map(schemaMap::get) // SchemaInfo
+ .map(SchemaInfo::uuid) // SchemaInfo.UUID
+ .collect(Collectors.toSet());
+ }
+
+ public String description() {
+ return shape;
+ }
+
+ public String url() {
+ return shape.toLowerCase().replaceAll(" ", "");
+ }
+
+ public ShapeStatus status() {
+ return status;
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/Version.java b/src/main/java/nl/healthri/fdp/uploadschema/domain/Version.java
similarity index 97%
rename from src/main/java/nl/healthri/fdp/uploadschema/Version.java
rename to src/main/java/nl/healthri/fdp/uploadschema/domain/Version.java
index f267ee2..787d9a9 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/Version.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/domain/Version.java
@@ -1,4 +1,4 @@
-package nl.healthri.fdp.uploadschema;
+package nl.healthri.fdp.uploadschema.domain;
import java.util.Comparator;
import java.util.Objects;
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/domain/enums/ShapeStatus.java b/src/main/java/nl/healthri/fdp/uploadschema/domain/enums/ShapeStatus.java
new file mode 100644
index 0000000..59f1fe1
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/domain/enums/ShapeStatus.java
@@ -0,0 +1,5 @@
+package nl.healthri.fdp.uploadschema.domain.enums;
+
+public enum ShapeStatus {
+ INSERT, UPDATE, SAME
+}
\ No newline at end of file
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/dto/auth/LoginRequestDto.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/auth/LoginRequestDto.java
new file mode 100644
index 0000000..7e9e331
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/auth/LoginRequestDto.java
@@ -0,0 +1,5 @@
+package nl.healthri.fdp.uploadschema.dto.auth;
+
+public record LoginRequestDto(String email,
+ String password) {
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/dto/auth/LoginResponseDto.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/auth/LoginResponseDto.java
new file mode 100644
index 0000000..0f948fd
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/auth/LoginResponseDto.java
@@ -0,0 +1,7 @@
+package nl.healthri.fdp.uploadschema.dto.auth;
+
+public record LoginResponseDto(String token) {
+ public String asHeaderString() {
+ return "Bearer " + token;
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/dto/resource/ResourceRequestDto.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/resource/ResourceRequestDto.java
new file mode 100644
index 0000000..0efc488
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/resource/ResourceRequestDto.java
@@ -0,0 +1,17 @@
+package nl.healthri.fdp.uploadschema.dto.resource;
+
+import java.util.ArrayList;
+
+public record ResourceRequestDto(String name,
+ String urlPrefix,
+ ArrayList metadataSchemaUuids,
+ ArrayList targetClassUris,
+ ArrayList children,
+ ArrayList externalLinks) {
+
+ public record ResourceChild(String UUID) {
+ }
+
+ public record ResourceLink(String title, String propertyUri) {
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/ResourceResponse.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/resource/ResourceResponseDto.java
similarity index 74%
rename from src/main/java/nl/healthri/fdp/uploadschema/requestresponses/ResourceResponse.java
rename to src/main/java/nl/healthri/fdp/uploadschema/dto/resource/ResourceResponseDto.java
index 92b0251..87b1bfd 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/ResourceResponse.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/resource/ResourceResponseDto.java
@@ -1,8 +1,11 @@
-package nl.healthri.fdp.uploadschema.requestresponses;
+package nl.healthri.fdp.uploadschema.dto.resource;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.ArrayList;
-public record ResourceResponse(
+@JsonIgnoreProperties(ignoreUnknown = true) // Description, abstractschema, definition are ignored.
+public record ResourceResponseDto(
String uuid,
String name,
String urlPrefix,
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/dto/schema/ReleaseSchemaRequestDto.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/schema/ReleaseSchemaRequestDto.java
new file mode 100644
index 0000000..ce253b0
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/schema/ReleaseSchemaRequestDto.java
@@ -0,0 +1,20 @@
+package nl.healthri.fdp.uploadschema.dto.schema;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import nl.healthri.fdp.uploadschema.domain.Version;
+
+public record ReleaseSchemaRequestDto(
+ @JsonProperty("description")
+ String resourceName,
+ boolean published,
+ String version,
+ String major,
+ String minor,
+ String patch) {
+
+ @JsonIgnore
+ public static ReleaseSchemaRequestDto of(String resourceName, boolean published, Version v) {
+ return new ReleaseSchemaRequestDto(resourceName, published, v.toString(), "" + v.major(), "" + v.minor(), "" + v.patch());
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/SchemaDataResponse.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/schema/SchemaDataResponseDto.java
similarity index 90%
rename from src/main/java/nl/healthri/fdp/uploadschema/requestresponses/SchemaDataResponse.java
rename to src/main/java/nl/healthri/fdp/uploadschema/dto/schema/SchemaDataResponseDto.java
index ab2f6a9..6f35960 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/SchemaDataResponse.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/schema/SchemaDataResponseDto.java
@@ -1,8 +1,8 @@
-package nl.healthri.fdp.uploadschema.requestresponses;
+package nl.healthri.fdp.uploadschema.dto.schema;
import java.util.ArrayList;
-public record SchemaDataResponse(
+public record SchemaDataResponseDto(
String uuid,
String name,
Latest latest,
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/EditSchemaParms.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/schema/UpdateSchemaRequestDto.java
similarity index 74%
rename from src/main/java/nl/healthri/fdp/uploadschema/requestbodies/EditSchemaParms.java
rename to src/main/java/nl/healthri/fdp/uploadschema/dto/schema/UpdateSchemaRequestDto.java
index d7fa5e1..6b441f3 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/EditSchemaParms.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/schema/UpdateSchemaRequestDto.java
@@ -1,8 +1,8 @@
-package nl.healthri.fdp.uploadschema.requestbodies;
+package nl.healthri.fdp.uploadschema.dto.schema;
import java.util.Set;
-public record EditSchemaParms(
+public record UpdateSchemaRequestDto(
String name,
String description,
boolean abstractSchema,
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/dto/settings/SettingsRequestDto.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/settings/SettingsRequestDto.java
new file mode 100644
index 0000000..17fa6c5
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/settings/SettingsRequestDto.java
@@ -0,0 +1,37 @@
+package nl.healthri.fdp.uploadschema.dto.settings;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import nl.healthri.fdp.uploadschema.config.fdp.Settings;
+
+import java.util.List;
+
+public record SettingsRequestDto(
+ String clientUrl,
+ String persistentUrl,
+ String appTitle,
+ String appSubtitle,
+ String appTitleFromConfig,
+ String appSubtitleFromConfig,
+ List metadataMetrics,
+ Ping ping,
+ Repository repository,
+ Search search,
+ Forms forms
+) {
+ public record MetadataMetric(String metricUri, String resourceUri) {}
+ public record Ping(boolean enabled, List endpoints, List endpointsFromConfig, String interval) {}
+ public record Repository(String type) {}
+ public record Search(List filters) {}
+ public record Forms(Autocomplete autocomplete) {
+ public record Autocomplete(boolean searchNamespace, List sources) {
+ public record Source(String rdfType, String sparqlEndpoint, String sparqlQuery) {}
+ }
+ }
+
+ public static SettingsRequestDto convertToDto(Settings settings) {
+ ObjectMapper mapper = new ObjectMapper();
+ return mapper.convertValue(settings, SettingsRequestDto.class);
+ }
+}
+
+
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/dto/settings/SettingsResponseDto.java b/src/main/java/nl/healthri/fdp/uploadschema/dto/settings/SettingsResponseDto.java
new file mode 100644
index 0000000..69e67ed
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/dto/settings/SettingsResponseDto.java
@@ -0,0 +1,58 @@
+package nl.healthri.fdp.uploadschema.dto.settings;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import nl.healthri.fdp.uploadschema.config.fdp.Settings;
+
+import java.util.List;
+
+public record SettingsResponseDto(
+ String clientUrl,
+ String persistentUrl,
+ String appTitle,
+ String appSubtitle,
+ String appTitleFromConfig,
+ String appSubtitleFromConfig,
+ List metadataMetrics,
+ Ping ping,
+ Repository repository,
+ Search search,
+ Forms forms
+) {
+ public record MetadataMetric(
+ String metricUri,
+ String resourceUri
+ ) {}
+
+ public record Ping(
+ boolean enabled,
+ List endpoints,
+ List endpointsFromConfig,
+ String interval
+ ) {}
+
+ public record Repository(
+ String type
+ ) {}
+
+ public record Search(
+ List filters
+ ) {}
+
+ public record Forms(
+ Autocomplete autocomplete
+ ) {
+ public record Autocomplete(
+ boolean searchNamespace,
+ List sources
+ ) {
+ public record Source(
+ String rdfType,
+ String sparqlEndpoint,
+ String sparqlQuery
+ ) {}
+ }
+ }
+
+}
+
+
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/integrations/FdpClient.java b/src/main/java/nl/healthri/fdp/uploadschema/integrations/FdpClient.java
new file mode 100644
index 0000000..376b4f0
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/integrations/FdpClient.java
@@ -0,0 +1,436 @@
+package nl.healthri.fdp.uploadschema.integrations;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.ReleaseSchemaRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.UpdateSchemaRequestDto;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsRequestDto;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsResponseDto;
+import nl.healthri.fdp.uploadschema.dto.auth.LoginRequestDto;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceResponseDto;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.dto.auth.LoginResponseDto;
+import nl.healthri.fdp.uploadschema.integrations.exceptions.FdpClientException;
+import nl.healthri.fdp.uploadschema.utils.HttpRequestUtils;
+
+import org.apache.http.HttpHeaders;
+import org.apache.http.entity.ContentType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpResponse;
+import java.util.*;
+
+@Component
+public class FdpClient implements FdpClientInterface {
+ private final HttpClient client;
+ private final URI hostname;
+ private final ObjectMapper objectMapper;
+ private String authToken;
+
+ private static final Logger logger = LoggerFactory.getLogger(FdpClient.class);
+
+ public FdpClient(HttpClient client, URI hostname, ObjectMapper objectMapper) {
+ this.client = Objects.requireNonNull(client, "HttpClient must not be null");
+ this.hostname = Objects.requireNonNull(hostname, "URL must not be null");
+ this.objectMapper = Objects.requireNonNull(objectMapper, "ObjectMapper must not be null");
+ }
+
+ public void setAuthToken(LoginResponseDto loginResponse) {
+ this.authToken = loginResponse.asHeaderString();
+ }
+
+ private void isAuthenticated() {
+ if (this.authToken == null || this.authToken.isBlank()) {
+ throw new IllegalStateException("FdpClient is not authenticated, authorization token is null or empty.");
+ }
+ }
+
+ public LoginResponseDto getAuthToken(LoginRequestDto loginRequest) {
+ logger.info("Connecting to FDP at {} as {} ", hostname, loginRequest.email());
+
+ try {
+ URI uri = new URI(this.hostname + "/tokens");
+
+ HttpRequest.BodyPublisher body = HttpRequest.BodyPublishers.ofString(
+ this.objectMapper.writeValueAsString(loginRequest)
+ );
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .POST(body)
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .build();
+
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ // Maps response body to object
+ return this.objectMapper.readValue(response.body(), LoginResponseDto.class);
+
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP during authentication", e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Authentication process was interrupted", e);
+ }
+ }
+
+ public List fetchSchemas() {
+ logger.info("Fetching metadata schemas from FDP");
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/metadata-schemas");
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .GET()
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request created through the client
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ // Maps response body to object
+ return List.of(objectMapper.readValue(response.body(), SchemaDataResponseDto[].class));
+
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while fetching schemas", e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Request to fetch schemas was interrupted", e);
+ }
+ }
+
+ /**
+ * @param task task, with info about the shape to create,
+ * when the shapes are created it will update this parameter by setting the UUID!
+ */
+ public ResourceResponseDto insertSchema(ShapeTask task, UpdateSchemaRequestDto updateSchemaRequest) {
+ logger.info("Inserting {} schema into FDP", task.shape);
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/metadata-schemas");
+
+ HttpRequest.BodyPublisher body = HttpRequest.BodyPublishers.ofString(
+ this.objectMapper.writeValueAsString(updateSchemaRequest)
+ );
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .POST(body)
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ // Maps response body to object
+ return objectMapper.readValue(response.body(), ResourceResponseDto.class);
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while inserting schema for " + task.shape, e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Schema insertion was interrupted", e);
+ }
+ }
+
+
+
+
+ public void updateSchema(ShapeTask task, UpdateSchemaRequestDto updateSchemaRequest) {
+ logger.info("Updating shape {} in FDP", task.shape);
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/metadata-schemas/" + task.uuid + "/draft");
+
+ HttpRequest.BodyPublisher body = HttpRequest.BodyPublishers.ofString(
+ this.objectMapper.writeValueAsString(updateSchemaRequest)
+ );
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .PUT(body)
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while updating schema for " + task.shape, e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Schema update was interrupted", e);
+ }
+ }
+
+ public void releaseSchema(ShapeTask task, ReleaseSchemaRequestDto releaseSchemaRequest) {
+ logger.info("Releasing {} into FDP", task.shape);
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/metadata-schemas/" + task.uuid + "/versions");
+
+ HttpRequest.BodyPublisher body = HttpRequest.BodyPublishers.ofString(
+ this.objectMapper.writeValueAsString(releaseSchemaRequest)
+ );
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .POST(body)
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while releasing schema " + task.shape, e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Schema release was interrupted", e);
+ }
+ }
+
+ public List fetchResources() {
+ logger.info("Fetching resources from fdp");
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/resource-definitions");
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .GET()
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ // Map response to body
+ return List.of(objectMapper.readValue(response.body(), ResourceResponseDto[].class));
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while fetching resources", e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Resource fetch was interrupted", e);
+ }
+ }
+
+ public ResourceResponseDto fetchResource(String resourceId){
+ logger.info("fetching resource {} from FDP", resourceId);
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/resource-definitions/" + resourceId);
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .GET()
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ // Maps response body to object
+ return objectMapper.readValue(response.body(), ResourceResponseDto.class);
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while fetching resource " + resourceId, e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Resource fetch was interrupted", e);
+ }
+ }
+
+ public ResourceResponseDto insertResource(ResourceTask task, ResourceRequestDto resourceRequest) {
+ logger.info("Inserting {} resources into FDP", task.resource);
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/resource-definitions");
+
+ HttpRequest.BodyPublisher body = HttpRequest.BodyPublishers.ofString(
+ this.objectMapper.writeValueAsString(resourceRequest)
+ );
+
+ // Creates request
+ HttpRequest request = HttpRequest.newBuilder()
+ .POST(body)
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ // Maps response body to object
+ return objectMapper.readValue(response.body(), ResourceResponseDto.class);
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while inserting resource " + task.resource, e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Resource insertion was interrupted", e);
+ }
+ }
+
+ public void updateResource(ResourceTask task, ResourceResponseDto resourceResponse) {
+ logger.info("updating resource {} in FDP", task.resource);
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/resource-definitions/" + task.UUID);
+
+ HttpRequest.BodyPublisher body = HttpRequest.BodyPublishers.ofString(
+ this.objectMapper.writeValueAsString(resourceResponse)
+ );
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .PUT(body)
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to reach FDP while updating resource " + task.resource, e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Resource update was interrupted", e);
+ }
+ }
+
+
+ public SettingsResponseDto getSettings() {
+ logger.info("getting settings from FDP");
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/settings");
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .GET()
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+
+ // Maps response body to object
+ return objectMapper.readValue(response.body(), SettingsResponseDto.class);
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to get FDP settings", e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Get settings was interrupted", e);
+ }
+ }
+
+ public void updateSettings(SettingsRequestDto settingsRequestDto) {
+ logger.info("updating settings in FDP");
+
+ try {
+ isAuthenticated();
+
+ URI uri = new URI(this.hostname + "/settings");
+
+ HttpRequest.BodyPublisher body = HttpRequest.BodyPublishers.ofString(
+ this.objectMapper.writeValueAsString(settingsRequestDto)
+ );
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .PUT(body)
+ .uri(uri)
+ .header(HttpHeaders.ACCEPT, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString())
+ .header(HttpHeaders.AUTHORIZATION, this.authToken)
+ .build();
+
+ // Sends request
+ HttpResponse response = this.client.send(request, HttpResponse.BodyHandlers.ofString());
+
+ // Handle each response based on Fair Data Point (FDP) Swagger documentation.
+ HttpRequestUtils.handleResponseStatus(response);
+ } catch (IOException | URISyntaxException e) {
+ throw new FdpClientException("Failed to update settings", e);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new FdpClientException("Update settings was interrupted", e);
+ }
+ }
+
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/integrations/FdpClientInterface.java b/src/main/java/nl/healthri/fdp/uploadschema/integrations/FdpClientInterface.java
new file mode 100644
index 0000000..67d45b1
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/integrations/FdpClientInterface.java
@@ -0,0 +1,34 @@
+package nl.healthri.fdp.uploadschema.integrations;
+
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.ReleaseSchemaRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.UpdateSchemaRequestDto;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsRequestDto;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsResponseDto;
+import nl.healthri.fdp.uploadschema.dto.auth.LoginRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.dto.auth.LoginResponseDto;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceResponseDto;
+import nl.healthri.fdp.uploadschema.integrations.exceptions.FdpClientException;
+
+import java.util.List;
+
+public interface FdpClientInterface {
+ void setAuthToken(LoginResponseDto loginResponse);
+ LoginResponseDto getAuthToken(LoginRequestDto loginRequest) throws FdpClientException;
+
+ List fetchSchemas() throws FdpClientException;
+ ResourceResponseDto insertSchema(ShapeTask task, UpdateSchemaRequestDto updateSchemaRequest) throws FdpClientException;
+ void updateSchema(ShapeTask task, UpdateSchemaRequestDto updateSchemaRequest) throws FdpClientException;
+ void releaseSchema(ShapeTask task, ReleaseSchemaRequestDto releaseSchemaRequest) throws FdpClientException;
+
+ List fetchResources() throws FdpClientException;
+ ResourceResponseDto fetchResource(String resourceId) throws FdpClientException;
+ ResourceResponseDto insertResource(ResourceTask task, ResourceRequestDto resourceRequest) throws FdpClientException;
+ void updateResource(ResourceTask task, ResourceResponseDto resourceResponse) throws FdpClientException;
+
+ SettingsResponseDto getSettings() throws FdpClientException;
+ void updateSettings(SettingsRequestDto settingsRequestDto) throws FdpClientException;
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/integrations/exceptions/FdpClientException.java b/src/main/java/nl/healthri/fdp/uploadschema/integrations/exceptions/FdpClientException.java
new file mode 100644
index 0000000..0c1f471
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/integrations/exceptions/FdpClientException.java
@@ -0,0 +1,9 @@
+package nl.healthri.fdp.uploadschema.integrations.exceptions;
+
+// FdpClientException.java
+public class FdpClientException extends RuntimeException {
+ public FdpClientException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
+
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/ReleaseSchemaParms.java b/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/ReleaseSchemaParms.java
deleted file mode 100644
index f366899..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/ReleaseSchemaParms.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestbodies;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import nl.healthri.fdp.uploadschema.Version;
-
-public record ReleaseSchemaParms(
- @JsonProperty("description")
- String resourceName,
- boolean published,
- String version,
- String major,
- String minor,
- String patch) {
-
- @JsonIgnore
- public static ReleaseSchemaParms of(String resourceName, boolean published, Version v) {
- return new ReleaseSchemaParms(resourceName, published, v.toString(), "" + v.major(), "" + v.minor(), "" + v.patch());
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/ResourceParms.java b/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/ResourceParms.java
deleted file mode 100644
index 1831df6..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/ResourceParms.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestbodies;
-
-import java.util.ArrayList;
-
-public record ResourceParms(String name,
- String urlPrefix,
- ArrayList metadataSchemaUuids,
- ArrayList targetClassUris,
- ArrayList children,
- ArrayList externalLinks) {
-
- public record ResourceChild(String UUID) {
- }
-
- public record ResourceLink(String title, String propertyUri) {
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/SchemaParms.java b/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/SchemaParms.java
deleted file mode 100644
index a525c92..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/SchemaParms.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestbodies;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public record SchemaParms(boolean drafts,
- @JsonProperty("abstract") boolean abstractSchema) {
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/loginParms.java b/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/loginParms.java
deleted file mode 100644
index b420547..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestbodies/loginParms.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestbodies;
-
-public record loginParms(String email,
- String password) {
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/FDPInfoResponse.java b/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/FDPInfoResponse.java
deleted file mode 100644
index d6e3946..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/FDPInfoResponse.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestresponses;
-
-public record FDPInfoResponse(String name, String version, String builtAt) {
- @Override
-
- public String toString() {
- return name + " (" + version + ") build: " + builtAt;
- }
-}
-
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/ResourceEditResponse.java b/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/ResourceEditResponse.java
deleted file mode 100644
index 4448054..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/ResourceEditResponse.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestresponses;
-
-import nl.healthri.fdp.uploadschema.requestbodies.ResourceParms;
-
-import java.util.ArrayList;
-
-public record ResourceEditResponse(String uuid,
- String name,
- String urlPrefix,
- ArrayList metadataSchemaUuids,
- ArrayList targetClassUris,
- ArrayList children,
- ArrayList externalLinks) {
-
- public record ResourceChild(String UUID) {
- }
-
- public record ResourceLink(String title, String propertyUri) {
- }
-
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/SchemaEdit.java b/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/SchemaEdit.java
deleted file mode 100644
index 35de85d..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/SchemaEdit.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestresponses;
-
-import java.util.HashSet;
-
-public record SchemaEdit(
- String uuid,
- String name,
- String description,
- boolean abstractSchema,
- String definition,
- HashSet extendsSchemaUuids,
- String suggestedResourceName,
- String suggestedUrlPrefix,
- String lastVersion
-) {
-
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/TokenResponse.java b/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/TokenResponse.java
deleted file mode 100644
index 5822266..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/requestresponses/TokenResponse.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package nl.healthri.fdp.uploadschema.requestresponses;
-
-public record TokenResponse(String token) {
- public String asHeaderString() {
- return "Bearer " + token;
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/FdpService.java b/src/main/java/nl/healthri/fdp/uploadschema/services/FdpService.java
new file mode 100644
index 0000000..cf8a5e5
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/FdpService.java
@@ -0,0 +1,142 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.config.fdp.Settings;
+import nl.healthri.fdp.uploadschema.domain.Version;
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.ReleaseSchemaRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.UpdateSchemaRequestDto;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsRequestDto;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsResponseDto;
+import nl.healthri.fdp.uploadschema.dto.auth.LoginRequestDto;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.dto.auth.LoginResponseDto;
+import nl.healthri.fdp.uploadschema.integrations.FdpClientInterface;
+import nl.healthri.fdp.uploadschema.integrations.exceptions.FdpClientException;
+import nl.healthri.fdp.uploadschema.utils.SchemaInfo;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceResponseDto;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.util.*;
+
+import static nl.healthri.fdp.uploadschema.config.fdp.Settings.*;
+
+
+@Service
+public class FdpService implements FdpServiceInterface {
+ private final FdpClientInterface fdpClient;
+
+ private static final Logger logger = LoggerFactory.getLogger(FdpService.class);
+
+ @Autowired
+ public FdpService(FdpClientInterface fdpClient) {
+ this.fdpClient = fdpClient;
+ }
+
+ public void authenticate(String username, String password) throws FdpClientException {
+ LoginRequestDto loginRequest = new LoginRequestDto(username, password);
+ LoginResponseDto loginResponse = fdpClient.getAuthToken(loginRequest);
+ fdpClient.setAuthToken(loginResponse);
+ }
+
+ public List getAllSchemas() throws FdpClientException{
+ return fdpClient.fetchSchemas();
+ }
+
+ public void createSchema(ShapeTask task) throws FdpClientException {
+ List schemaDataResponseList = getAllSchemas();
+
+ Map schemaInfoMap = new HashMap<>();
+ for(SchemaDataResponseDto schemaDataResponse : schemaDataResponseList) {
+ Version version = new Version(schemaDataResponse.latest().version());
+ SchemaInfo schemaInfo = new SchemaInfo(version, schemaDataResponse.uuid(), schemaDataResponse.latest().definition());
+ schemaInfoMap.put(schemaDataResponse.name(), schemaInfo);
+ }
+
+
+ UpdateSchemaRequestDto updateSchemaRequest = new UpdateSchemaRequestDto(
+ task.shape,
+ task.description(), false,
+ task.model,
+ task.getParentUID(schemaInfoMap),
+ task.shape,
+ task.url());
+
+ ResourceResponseDto resourceResponse = fdpClient.insertSchema(task, updateSchemaRequest);
+ task.uuid = resourceResponse.uuid();
+ }
+
+
+ public void updateSchema(ShapeTask task) throws FdpClientException {
+ List schemaDataResponseList = getAllSchemas();
+
+ Map schemaInfoMap = new HashMap<>();
+ for(SchemaDataResponseDto schemaDataResponse : schemaDataResponseList) {
+ Version version = new Version(schemaDataResponse.latest().version());
+ SchemaInfo schemaInfo = new SchemaInfo(version, schemaDataResponse.uuid(), schemaDataResponse.latest().definition());
+ schemaInfoMap.put(schemaDataResponse.name(), schemaInfo);
+ }
+
+ UpdateSchemaRequestDto updateSchemaRequest = new UpdateSchemaRequestDto(
+ task.shape,
+ task.description(), false,
+ task.model,
+ task.getParentUID(schemaInfoMap),
+ task.shape,
+ task.url());
+
+ fdpClient.updateSchema(task, updateSchemaRequest);
+ }
+
+ public void releaseSchema(ShapeTask task) throws FdpClientException{
+ ReleaseSchemaRequestDto releaseSchemaRequest = ReleaseSchemaRequestDto.of(task.shape, false, task.version);
+
+ fdpClient.releaseSchema(task, releaseSchemaRequest);
+ }
+
+ public List getAllResources() throws FdpClientException{
+ return fdpClient.fetchResources();
+ }
+
+ public void createResource(ResourceTask task) throws FdpClientException{
+ ResourceRequestDto resourceRequest = new ResourceRequestDto(
+ task.resource,
+ task.url(),
+ new ArrayList<>(List.of(task.shapeUUUID)),
+ new ArrayList<>(),
+ new ArrayList<>(),
+ new ArrayList<>());
+
+ ResourceResponseDto resourceResponse = fdpClient.insertResource(task, resourceRequest);
+ task.UUID = resourceResponse.uuid();
+ }
+
+ public void updateResource(ResourceTask task) throws FdpClientException{
+ ResourceResponseDto resourceResponse = fdpClient.fetchResource(task.UUID);
+
+ if (resourceResponse.children().stream().anyMatch(c -> c.resourceDefinitionUuid().equals(task.childUUuid))) {
+ logger.info("resource {} already has link to child {}", resourceResponse.name(), task.childName);
+ } else {
+ //FIXME TagsURI is hardcoded..
+ ResourceResponseDto.ListView listView = new ResourceResponseDto.ListView(task.pluralName(), "http://www.w3.org/ns/dcat#themeTaxonomy", new ArrayList<>());
+ ResourceResponseDto.Child child = new ResourceResponseDto.Child(task.childUUuid, task.childRelationIri, listView);
+ resourceResponse.children().add(child);
+ }
+
+ fdpClient.updateResource(task, resourceResponse);
+ }
+
+ public void updateSettings(Settings newSettings){
+ SettingsResponseDto fdpSettingsResponseDto = fdpClient.getSettings();
+
+ Settings fdpSettings = convertToEntity(fdpSettingsResponseDto);
+ Settings mergedSettings = fdpSettings.Merge(newSettings);
+
+ SettingsRequestDto settingsRequestDto = SettingsRequestDto.convertToDto(mergedSettings);
+ fdpClient.updateSettings(settingsRequestDto);
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/FdpServiceInterface.java b/src/main/java/nl/healthri/fdp/uploadschema/services/FdpServiceInterface.java
new file mode 100644
index 0000000..c8ef0a1
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/FdpServiceInterface.java
@@ -0,0 +1,25 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.config.fdp.Settings;
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceResponseDto;
+
+import java.util.List;
+
+public interface FdpServiceInterface {
+ void authenticate(String username, String password);
+
+ List getAllSchemas();
+
+ void createSchema(ShapeTask task);
+ void updateSchema(ShapeTask task);
+ void releaseSchema(ShapeTask task);
+
+ List getAllResources();
+ void createResource(ResourceTask task);
+ void updateResource(ResourceTask task);
+
+ void updateSettings(Settings settings);
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/ResourceTaskService.java b/src/main/java/nl/healthri/fdp/uploadschema/services/ResourceTaskService.java
new file mode 100644
index 0000000..4d69ef0
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/ResourceTaskService.java
@@ -0,0 +1,125 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceResponseDto;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.config.fdp.Properties;
+import nl.healthri.fdp.uploadschema.utils.ResourceInfo;
+import nl.healthri.fdp.uploadschema.utils.SchemaInfo;
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+import java.util.Map;
+
+import static nl.healthri.fdp.uploadschema.utils.ResourceInfo.createResourceInfoMap;
+import static nl.healthri.fdp.uploadschema.utils.SchemaInfo.createSchemaInfoMap;
+
+@Service
+public class ResourceTaskService implements ResourceTaskServiceInterface {
+ public FdpServiceInterface fdpService;
+ public Properties properties;
+
+ public ResourceTaskService(FdpServiceInterface fdpService, Properties properties) {
+ this.fdpService = fdpService;
+ this.properties = properties;
+ }
+
+ protected record ResourceData(
+ String resourceUUID,
+ boolean exists) {}
+
+ protected record ParentResourceData(
+ String parentResourceName,
+ String parentResourceUuid,
+ String childUuid,
+ String childIri,
+ String childName,
+ boolean exists) {}
+
+
+ public List createTasks() {
+ List fdpResourceResponseList = this.fdpService.getAllResources();
+ Map fdpResourceInfoMap = createResourceInfoMap(fdpResourceResponseList);
+
+ List fdpSchemaDataResponseList = this.fdpService.getAllSchemas();
+ Map fdpSchemaInfoMap = createSchemaInfoMap(fdpSchemaDataResponseList);
+
+ return properties.resources.entrySet().stream().map(propertyResource -> {
+ String resourceName = propertyResource.getKey();
+
+ // Gets all needed resource information from fdpResourceInfoMap to set to new resourceTask
+ ResourceData resourceData = getResourceInfo(resourceName, fdpResourceInfoMap);
+ String schemaUUID = getSchemaUUID(resourceName, propertyResource.getValue().schema(), fdpSchemaInfoMap);
+
+ return new ResourceTask(
+ resourceName,
+ resourceData.resourceUUID,
+ schemaUUID,
+ resourceData.exists
+ );
+ }).toList();
+ }
+
+ public List createParentTasks() {
+ List fdpResourceResponseList = this.fdpService.getAllResources();
+ Map fdpResourceInfoMap = createResourceInfoMap(fdpResourceResponseList);
+
+ return this.properties.resources.entrySet().stream().map(propertyResource -> {
+ // Gets all needed parent resource information from fdpResourceInfoMap to set to new resourceTask
+ ParentResourceData parentResourceData = getParentResourceInfo(propertyResource, fdpResourceInfoMap);
+
+ return new ResourceTask(
+ parentResourceData.parentResourceName,
+ parentResourceData.parentResourceUuid,
+ null,
+ parentResourceData.childUuid,
+ parentResourceData.childIri,
+ parentResourceData.childName,
+ parentResourceData.exists
+ );
+ }).toList();
+ }
+
+ // Gets information from property resource parent and creates a new parent resource with parent information.
+ protected ParentResourceData getParentResourceInfo(Map.Entry propertyResource, Map fdpResourceInfoMap) {
+ String propertyResourceParentName = propertyResource.getValue().parentResource();
+
+ ResourceInfo fdpResourceInfo = fdpResourceInfoMap.get(propertyResourceParentName);
+ if (fdpResourceInfo == null) {
+ return new ParentResourceData(propertyResourceParentName, null, null, null, null, false);
+ }
+
+ String parentResourceUuid = fdpResourceInfo.uuid();
+ String childName = propertyResource.getKey();
+ String childIri = propertyResource.getValue().parentRelationIri();
+ String childUuid = fdpResourceInfoMap.get(childName).uuid();
+ boolean exists = true;
+
+ return new ParentResourceData(
+ propertyResourceParentName,
+ parentResourceUuid,
+ childUuid,
+ childIri,
+ childName,
+ exists);
+ }
+
+ protected ResourceData getResourceInfo(String resourceName, Map fdpResourceInfoMap) {
+ ResourceInfo fdpResourceInfo = fdpResourceInfoMap.get(resourceName);
+ if (fdpResourceInfo == null) {
+ return new ResourceData("", false);
+ }
+
+ return new ResourceData(fdpResourceInfo.uuid(), true);
+ }
+
+ protected String getSchemaUUID(String resourceName, String schema, Map fdpSchemaInfoMap) {
+ String name = (schema == null || schema.isBlank()) ? resourceName : schema;
+ SchemaInfo schemaInfo = fdpSchemaInfoMap.get(name);
+ if (schemaInfo == null) {
+ return "";
+ }
+
+ return schemaInfo.uuid();
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/ResourceTaskServiceInterface.java b/src/main/java/nl/healthri/fdp/uploadschema/services/ResourceTaskServiceInterface.java
new file mode 100644
index 0000000..a3280b9
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/ResourceTaskServiceInterface.java
@@ -0,0 +1,10 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+
+import java.util.List;
+
+public interface ResourceTaskServiceInterface {
+ List createTasks();
+ List createParentTasks();
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/SchemaToolService.java b/src/main/java/nl/healthri/fdp/uploadschema/services/SchemaToolService.java
new file mode 100644
index 0000000..281d2f2
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/SchemaToolService.java
@@ -0,0 +1,112 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+import nl.healthri.fdp.uploadschema.utils.FileHandler;
+import nl.healthri.fdp.uploadschema.config.fdp.Properties;
+import nl.healthri.fdp.uploadschema.utils.RdfUtils;
+import nl.healthri.fdp.uploadschema.utils.XlsToRdfUtils;
+import org.eclipse.rdf4j.model.Model;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.List;
+
+import static java.util.function.Predicate.not;
+
+@Service
+public class SchemaToolService implements SchemaToolServiceInterface {
+ public FdpServiceInterface fdpService;
+ public ResourceTaskServiceInterface resourceTaskService;
+ public ShapeTaskServiceInterface shapeTaskService;
+ public Properties properties;
+ public FileHandler fileHandler;
+
+ private static final Logger logger = LoggerFactory.getLogger(SchemaToolService.class);
+
+ public SchemaToolService(FdpServiceInterface fdpService, ResourceTaskServiceInterface resourceTaskService, ShapeTaskServiceInterface shapeTaskService, Properties properties, FileHandler fileHandler) {
+ this.fdpService = fdpService;
+ this.resourceTaskService = resourceTaskService;
+ this.shapeTaskService = shapeTaskService;
+ this.properties = properties;
+ this.fileHandler = fileHandler;
+ }
+
+ public void createOrUpdateSchemas(boolean force) throws IOException {
+ logger.info("Creating/updating schemas from tasks to FDP");
+
+ List shapeTaskList = shapeTaskService.createTasks();
+ shapeTaskList.forEach(task -> {
+ switch (task.status()) {
+ case INSERT -> {
+ fdpService.createSchema(task);
+ fdpService.releaseSchema(task);
+ }
+ case SAME -> {
+ if (force) {
+ fdpService.updateSchema(task);
+ fdpService.releaseSchema(task);
+ logger.info("Schema {} is updated, it was the same but force was set", task.shape);
+ } else {
+ logger.warn("Schema {} is not updated because it's still the same", task.shape);
+ }
+ }
+ case UPDATE -> {
+ fdpService.updateSchema(task);
+ fdpService.releaseSchema(task);
+ }
+ }
+ });
+ }
+
+ public void convertTemplatesToShaclShapes() throws IOException {
+ logger.info("reading templates from {} ", properties.templateDir);
+
+ for (var e : XlsToRdfUtils.getTemplateFiles(properties.templateDir).entrySet()) {
+ logger.info(" converting {} ", e.getValue());
+ Path path = properties.getPiecesDir().resolve(e.getKey() + ".ttl");
+ String shacl = XlsToRdfUtils.createShacl(e.getValue());
+ Files.write(path, shacl.getBytes());
+ }
+ }
+
+ public void mergeShapesToFdpSchemas() throws IOException {
+ logger.info("Writing files: {}", properties.getFiles().keySet());
+
+ for (var e : properties.getFiles(properties.getPiecesDir()).entrySet()) {
+ Path path = properties.getFairDataPointDir().resolve(RdfUtils.schemaToFilename(e.getKey()));
+ Model m = fileHandler.readFiles(e.getValue());
+ fileHandler.safeModel(path, m);
+ }
+ }
+
+ public void mergeShapesForValidation() throws IOException {
+ logger.info("Merging files: {}", properties.getValidationDir());
+
+ Path path = properties.getValidationDir().resolve("HRI-Datamodel-shapes.ttl");
+ logger.info("Write validation file {} combining {} files", path, properties.getAllFiles().size());
+ Model m = fileHandler.readFiles(new ArrayList<>(properties.getAllFiles()));
+ fileHandler.safeModel(path, m);
+ }
+
+ public void addResourceDescriptions() {
+ logger.info("Adding resource descriptions from resource tasks to FDP");
+
+ List resourceTaskList = this.resourceTaskService.createTasks();
+ resourceTaskList.stream().filter(ResourceTask::isInsert).forEach(this.fdpService::createResource);
+
+ if (resourceTaskList.stream().noneMatch(not(ResourceTask::isInsert))) {
+ logger.warn("Updating resources is not supported yet, but will try to add children if needed)");
+ }
+
+ //add the previous resources as child to parent.
+ List resourceTasksParents = resourceTaskService.createParentTasks();
+ resourceTasksParents.stream().filter(ResourceTask::hasChild).forEach(fdpService::updateResource);
+ }
+
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/SchemaToolServiceInterface.java b/src/main/java/nl/healthri/fdp/uploadschema/services/SchemaToolServiceInterface.java
new file mode 100644
index 0000000..8774eb7
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/SchemaToolServiceInterface.java
@@ -0,0 +1,11 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import java.io.IOException;
+
+public interface SchemaToolServiceInterface {
+ void createOrUpdateSchemas(boolean force) throws IOException;
+ void convertTemplatesToShaclShapes() throws IOException;
+ void mergeShapesToFdpSchemas() throws IOException;
+ void mergeShapesForValidation() throws IOException;
+ void addResourceDescriptions();
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/ShapeTaskService.java b/src/main/java/nl/healthri/fdp/uploadschema/services/ShapeTaskService.java
new file mode 100644
index 0000000..44749af
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/ShapeTaskService.java
@@ -0,0 +1,74 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.domain.Version;
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+import nl.healthri.fdp.uploadschema.domain.enums.ShapeStatus;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.utils.*;
+import nl.healthri.fdp.uploadschema.config.fdp.Properties;
+import org.eclipse.rdf4j.model.Model;
+import org.eclipse.rdf4j.model.util.Models;
+import org.springframework.stereotype.Service;
+
+import java.net.URI;
+import java.util.*;
+import java.util.List;
+
+import static nl.healthri.fdp.uploadschema.utils.SchemaInfo.createSchemaInfoMap;
+
+@Service
+public class ShapeTaskService implements ShapeTaskServiceInterface {
+ public FdpServiceInterface fdpService;
+ public FileHandler fileHandler;
+ public Properties properties;
+
+ public ShapeTaskService(FdpServiceInterface fdpServiceInterface, FileHandler fileHandler, Properties properties) {
+ this.fdpService = fdpServiceInterface;
+ this.fileHandler = fileHandler;
+ this.properties = properties;
+ }
+
+ public List createTasks() {
+ Map> files = this.properties.getFiles();
+ List schemaDataResponseList = this.fdpService.getAllSchemas();
+ Map shapesOnFdp = createSchemaInfoMap(schemaDataResponseList);
+
+ //list of the task we have to do for insert/updating shacls
+ return this.properties.getSchemasToPublish().stream().map(schemaTitle -> {
+ List ttlFiles = Optional.ofNullable(files.get(schemaTitle)).orElseThrow(() -> new NoSuchElementException(schemaTitle + " not present in schema section of yaml-file"));
+ Model newModel = fileHandler.readFiles(ttlFiles);
+ String model = RdfUtils.modelAsTurtleString(newModel);
+ Version requestedVersion = this.properties.getVersion();
+ Set parents = this.properties.getParents(schemaTitle);
+
+ // Adds shape task are same or need to be updated
+ if (shapesOnFdp.containsKey(schemaTitle)) {
+ SchemaInfo matchingFdpSchema = shapesOnFdp.get(schemaTitle);
+ Version version = matchingFdpSchema.version().next(requestedVersion);
+ String uuid = matchingFdpSchema.uuid();
+ Model fdpSchemaModel = RdfUtils.fromTurtleString(matchingFdpSchema.definition());
+ ShapeStatus status = Models.isomorphic(fdpSchemaModel, newModel) ? ShapeStatus.SAME : ShapeStatus.UPDATE;
+
+ return new ShapeTask(
+ schemaTitle,
+ version,
+ uuid,
+ parents,
+ model,
+ status
+ );
+ }
+
+ // Adds shape task to insert into FDP
+ return new ShapeTask(
+ schemaTitle,
+ requestedVersion,
+ "",
+ parents,
+ model,
+ ShapeStatus.INSERT
+ );
+ }).toList();
+ }
+}
+
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/services/ShapeTaskServiceInterface.java b/src/main/java/nl/healthri/fdp/uploadschema/services/ShapeTaskServiceInterface.java
new file mode 100644
index 0000000..a16cffd
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/services/ShapeTaskServiceInterface.java
@@ -0,0 +1,9 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+
+import java.util.List;
+
+public interface ShapeTaskServiceInterface {
+ List createTasks();
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/tasks/ResourceUpdateInsertTask.java b/src/main/java/nl/healthri/fdp/uploadschema/tasks/ResourceUpdateInsertTask.java
deleted file mode 100644
index 934e617..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/tasks/ResourceUpdateInsertTask.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package nl.healthri.fdp.uploadschema.tasks;
-
-import nl.healthri.fdp.uploadschema.FDP;
-import nl.healthri.fdp.uploadschema.utils.Properties;
-import nl.healthri.fdp.uploadschema.utils.ResourceMap;
-import nl.healthri.fdp.uploadschema.utils.ShapesMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-import java.util.regex.Pattern;
-
-public class ResourceUpdateInsertTask {
-
- private static final Logger logger = LoggerFactory.getLogger(ResourceUpdateInsertTask.class);
-
- public final String resource;
- public String UUID;
- public String shapeUUUID;
- public String childUUuid;
- public String childRelationIri;
- public String childName;
- boolean exists = false;
-
- public ResourceUpdateInsertTask(String resource) {
- this.resource = resource;
- }
-
- public static List createParentTask(Properties p, FDP fdp) {
- var resourcesOnFdp = fdp.fetchResourceFromFDP();
-
- return p.resources.entrySet().stream().map(r -> {
- //now we to update the parent not the resource itself!
- var parentName = r.getValue().parentResource();
- var childName = r.getKey();
- var childIri = r.getValue().parentRelationIri();
- return new ResourceUpdateInsertTask(parentName)
- .addExistingInfo(resourcesOnFdp) //adds uuid
- .addChildInfo(childName, childIri, resourcesOnFdp);
- }).toList();
- }
-
- public static List createTask(Properties p, FDP fdp) {
- var resourcesOnFdp = fdp.fetchResourceFromFDP();
- var shapesOnFdp = fdp.fetchSchemaFromFDP();
- return p.resources.entrySet().stream().map(r -> new ResourceUpdateInsertTask(r.getKey())
- .addExistingInfo(resourcesOnFdp)
- .addShapeUUID(shapesOnFdp, r.getValue().schema())).toList();
- }
-
- public String pluralName() {
- //FIXME shape datasetSeries is already plural form.
- if (childName.toLowerCase().endsWith("ies")) return childName;
-
- // Rule 1: Words ending in consonant + "y" -> replace "y" with "ies"
- if (Pattern.matches(".*[^aeiou]y$", childName)) {
- return childName.replaceAll("y$", "ies");
- }
- // Rule 2: Words ending in "s", "x", "z", "ch", or "sh" -> add "es"
- else if (Pattern.matches(".*(s|x|z|ch|sh)$", childName)) {
- return childName + "es";
- }
- // Default rule: Just add "s"
- else {
- return childName + "s";
- }
- }
-
- public ResourceUpdateInsertTask addShapeUUID(ShapesMap shapes, String schema) {
- String name = schema.isBlank() ? resource : schema;
- var shape = shapes.getUUID(name);
- shape.ifPresentOrElse(s -> shapeUUUID = s,
- () -> logger.error("Can't find shape: {} ", resource));
- return this;
- }
-
- public String url() {
- return resource.toLowerCase().replaceAll(" ", "");
- }
-
- public boolean isInsert() {
- return !exists;
- }
-
- public boolean hasChild() {
- return childUUuid != null;
- }
-
- public ResourceUpdateInsertTask addExistingInfo(ResourceMap resourceOnFdp) {
- var uuid = resourceOnFdp.getUUID(resource);
- exists = uuid.isPresent();
- uuid.ifPresentOrElse(u -> this.UUID = u,
- () -> logger.warn("update of resource is not supported yet"));
- return this;
- }
-
- public ResourceUpdateInsertTask addChildInfo(String name, String relationIri, ResourceMap resourceOnFdp) {
- var uuid = resourceOnFdp.getUUID(name);
- if (uuid.isPresent()) {
- childName = name;
- childRelationIri = relationIri;
- childUUuid = uuid.get();
- } else {
- logger.error("Child resource is not found {} ", name);
- }
- return this;
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/tasks/ShapeUpdateInsertTask.java b/src/main/java/nl/healthri/fdp/uploadschema/tasks/ShapeUpdateInsertTask.java
deleted file mode 100644
index 653a136..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/tasks/ShapeUpdateInsertTask.java
+++ /dev/null
@@ -1,72 +0,0 @@
-package nl.healthri.fdp.uploadschema.tasks;
-
-import nl.healthri.fdp.uploadschema.FDP;
-import nl.healthri.fdp.uploadschema.Version;
-import nl.healthri.fdp.uploadschema.utils.Properties;
-import nl.healthri.fdp.uploadschema.utils.RdfUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.net.URI;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-public class ShapeUpdateInsertTask {
- private static final Logger logger = LoggerFactory.getLogger(ShapeUpdateInsertTask.class);
-
- public final String shape;
- public Version version;
- public String uuid;
- //name of parents for this schema.
- public Set parents;
- public String model;
- public boolean exists = false;
-
- public ShapeUpdateInsertTask(String shape) {
- this.shape = shape;
- }
-
- public static List createTasks(Properties p, FDP fdp) {
- final List Shapes = p.schemasToPublish;
- final var files = p.getFiles();
- var shapesOnFdp = fdp.fetchSchemaFromFDP();
- logger.info("found following shapes on fdp: {}", shapesOnFdp.keySet());
-
- //list of the task we have to do for insert/updating shacls
- return Shapes.stream().map(r -> {
- var ShapeUpdateInsertTask = new ShapeUpdateInsertTask(r);
- var requestedVersion = p.getVersion();
- var ttlFiles = Optional.ofNullable(files.get(r)).orElseThrow(() -> new NoSuchElementException(r + " not present in schema section of yaml-file"));
-
- logger.debug("loading model {} using turtle files: {} ", r, ttlFiles.stream().map(URI::toString).collect(Collectors.joining(", ")));
-
- ShapeUpdateInsertTask.model = RdfUtils.modelAsTurtleString(RdfUtils.readFiles(ttlFiles));
- if (shapesOnFdp.isPresent(r)) {
- ShapeUpdateInsertTask.version = shapesOnFdp.getVersion(r).get().next(requestedVersion); //next patch version
- ShapeUpdateInsertTask.uuid = shapesOnFdp.getUUID(r).get();
- ShapeUpdateInsertTask.exists = true;
- } else {
- ShapeUpdateInsertTask.version = requestedVersion;
- ShapeUpdateInsertTask.uuid = "";
- ShapeUpdateInsertTask.exists = false;
- }
- ShapeUpdateInsertTask.parents = p.getParents(r);
- return ShapeUpdateInsertTask;
- }).toList();
- }
-
- public String description() {
- return shape;
- }
-
- public String url() {
- return shape.toLowerCase().replaceAll(" ", "");
- }
-
- public boolean isInsert() {
- return !exists;
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/FileHandler.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/FileHandler.java
new file mode 100644
index 0000000..85ce130
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/utils/FileHandler.java
@@ -0,0 +1,131 @@
+package nl.healthri.fdp.uploadschema.utils;
+
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Model;
+import org.eclipse.rdf4j.model.Namespace;
+import org.eclipse.rdf4j.model.Statement;
+import org.eclipse.rdf4j.model.impl.LinkedHashModel;
+import org.eclipse.rdf4j.rio.*;
+import org.eclipse.rdf4j.rio.helpers.StatementCollector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.*;
+import java.net.URI;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpResponse;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.Duration;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+//this class handles RDF-File loading and saving this class should be
+//injected in your class when you need it. Having it in a separate class
+//simplifies testing.
+public class FileHandler {
+ private static final Logger logger = LoggerFactory.getLogger(FileHandler.class);
+
+ public void safeModel(Path p, Model m) throws IOException {
+ saveModelToStream(Files.newOutputStream(p), m);
+ }
+
+ private void saveModelToStream(OutputStream out, Model m) {
+ RDFWriter writer = Rio.createWriter(RDFFormat.TURTLE, out);
+ writer.startRDF();
+
+ for (Namespace ns : m.getNamespaces()) {
+ writer.handleNamespace(ns.getPrefix(), ns.getName());
+ }
+ for (Statement st : m) {
+ writer.handleStatement(st);
+ }
+ writer.endRDF();
+ }
+
+
+ public Model readFiles(List files){
+ logger.info("reading and parsing Shacl from {}", files.getFirst().toString());
+
+ try {
+ RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE);
+ Model model = new LinkedHashModel();
+ rdfParser.setRDFHandler(new StatementCollector(model));
+ for (URI u : files) {
+ readFile(u, rdfParser);
+ }
+ validateNamespaces(model);
+ return model;
+ } catch (IOException e) {
+ String fileList = files.stream()
+ .map(URI::toString)
+ .collect(Collectors.joining(", "));
+ throw new RuntimeException("Failed to read files: " + fileList, e);
+ }
+ }
+
+
+ private void readFile(URI uri, RDFParser parser) throws IOException {
+ try {
+ InputStream fis = getInputStream(uri);
+ parser.parse(fis);
+ } catch (IOException e) {
+ throw new IOException("Error while reading file: " + uri, e);
+ } catch (RDFParseException e) {
+ throw new IOException("Error parsing RDF file - invalid Turtle syntax: " + uri, e);
+ } catch (RDFHandlerException e) {
+ throw new IOException("Error while processing RDF content from file: " + uri, e);
+ }
+ }
+
+ private InputStream getInputStream(URI uri) throws IOException {
+ if (List.of("http", "https").contains(uri.getScheme().toLowerCase())) {
+ logger.trace("Fetch from github: {}", uri);
+ try (HttpClient client = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).followRedirects(HttpClient.Redirect.NORMAL).build()) {
+ HttpRequest request = HttpRequest.newBuilder().uri(uri).GET().build();
+
+ HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString());
+ if (response.statusCode() / 100 == 2) {
+ return new ByteArrayInputStream(response.body().getBytes());
+ } else {
+ throw new IOException("Failed to fetch file: " + response.statusCode());
+ }
+ } catch (IOException e) {
+ logger.error("Failed to get input stream: {}", e.getMessage());
+ throw new RuntimeException(e);
+ } catch (InterruptedException e){
+ logger.error("Thread was interrupted while getting input stream: {}", e.getMessage());
+ Thread.currentThread().interrupt();
+ throw new RuntimeException(e);
+ }
+ } else {
+ return new FileInputStream(Paths.get(uri).toFile());
+ }
+ }
+
+ private void validateNamespaces(Model model) {
+ // Collect all namespaces used in the model
+ final Set usedNamespaces = model.stream()
+ .flatMap(st -> Set.of(st.getSubject(), st.getPredicate(), st.getObject()).stream())
+ .filter(st -> st instanceof IRI)
+ .map(st -> ((IRI) st).getNamespace())
+ .collect(Collectors.toSet());
+ // Remove unused namespaces
+ Set namespacesToRemove = model.getNamespaces().stream()
+ .filter(ns -> !usedNamespaces.contains(ns.getName()))
+ .collect(Collectors.toSet());
+ logger.info("Following namespace are unused: {}", namespacesToRemove);
+
+ Set prefixes = model.getNamespaces().stream().map(Namespace::getPrefix).collect(Collectors.toSet());
+ if (prefixes.size() != model.getNamespaces().size()) {
+ logger.warn("Duplicate prefixes found.");
+ }
+ Set names = model.getNamespaces().stream().map(Namespace::getName).collect(Collectors.toSet());
+ if (names.size() != model.getNamespaces().size()) {
+ logger.warn("Duplicate namespace found.");
+ }
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/HttpRequestUtils.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/HttpRequestUtils.java
new file mode 100644
index 0000000..58f91f7
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/utils/HttpRequestUtils.java
@@ -0,0 +1,27 @@
+package nl.healthri.fdp.uploadschema.utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.net.URI;
+import java.net.http.HttpResponse;
+
+
+public class HttpRequestUtils {
+ private static final Logger logger = LoggerFactory.getLogger(HttpRequestUtils.class);
+
+ public static void handleResponseStatus(HttpResponse response) throws IOException {
+ String method = response.request().method();
+ int statusCode = response.statusCode();
+ URI uri = response.uri();
+
+ switch (statusCode) {
+ case 200 -> logger.info("[" + statusCode + "]" + " successfull request: " + method + " " + uri);
+ case 400 -> throw new IllegalArgumentException("[" + statusCode + "]" + " bad request: " + method + " " + uri);
+ case 401 -> throw new SecurityException("[" + statusCode + "]" + "Unauthorized: " + method + " " + uri);
+ case 403 -> throw new SecurityException("[" + statusCode + "]" + "Forbidden: " + method + " " + uri);
+ case 404 -> throw new IOException("[" + statusCode + "]" + "Resource Not Found: " + method + " " + uri);
+ case 500 -> throw new IOException("[" + statusCode + "]" + "Internal Server Error: " + method + " " + uri);
+ default -> throw new RuntimeException("[" + statusCode + "]" + "Unexpected HTTP status: " + method + " " + uri);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/ObjectMap.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/ObjectMap.java
deleted file mode 100644
index 9c409bc..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/utils/ObjectMap.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package nl.healthri.fdp.uploadschema.utils;
-
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-
-public abstract class ObjectMap {
- protected Map map;
-
- public boolean isPresent(String name) {
- return map.containsKey(name);
- }
-
- public Set keySet() {
- return map.keySet();
- }
-
- public Optional getValue(String name) {
- return Optional.ofNullable(map.get(name));
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/RdfUtils.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/RdfUtils.java
index 8027022..f9b2145 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/utils/RdfUtils.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/utils/RdfUtils.java
@@ -1,138 +1,29 @@
package nl.healthri.fdp.uploadschema.utils;
-import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Model;
-import org.eclipse.rdf4j.model.Namespace;
-import org.eclipse.rdf4j.model.Statement;
-import org.eclipse.rdf4j.model.impl.LinkedHashModel;
import org.eclipse.rdf4j.rio.RDFFormat;
-import org.eclipse.rdf4j.rio.RDFParser;
-import org.eclipse.rdf4j.rio.RDFWriter;
import org.eclipse.rdf4j.rio.Rio;
-import org.eclipse.rdf4j.rio.helpers.StatementCollector;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.io.*;
-import java.net.URI;
-import java.net.http.HttpClient;
-import java.net.http.HttpRequest;
-import java.net.http.HttpResponse;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.time.Duration;
-import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
+import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
public class RdfUtils {
- private static final Logger logger = LoggerFactory.getLogger(RdfUtils.class);
-
- private RdfUtils() {
- //prevents instantiation
- }
-
- private static void validateNamespaces(Model model) {
- // Collect all namespaces used in the model
- final Set usedNamespaces = model.stream()
- .flatMap(st -> Set.of(st.getSubject(), st.getPredicate(), st.getObject()).stream())
- .filter(st -> st instanceof IRI)
- .map(st -> ((IRI) st).getNamespace())
- .collect(Collectors.toSet());
- // Remove unused namespaces
- Set namespacesToRemove = model.getNamespaces().stream()
- .filter(ns -> !usedNamespaces.contains(ns.getName()))
- .collect(Collectors.toSet());
- logger.info("Following namespace are unused: {}", namespacesToRemove);
-
- Set prefixes = model.getNamespaces().stream().map(Namespace::getPrefix).collect(Collectors.toSet());
- if (prefixes.size() != model.getNamespaces().size()) {
- logger.warn("Duplicate prefixes found.");
- }
- Set names = model.getNamespaces().stream().map(Namespace::getName).collect(Collectors.toSet());
- if (names.size() != model.getNamespaces().size()) {
- logger.warn("Duplicate namespace found.");
- }
- }
-
- private static void readFile(URI uri, RDFParser parser) throws IOException {
- logger.debug("reading {}", uri.getPath());
- try {
- InputStream fis = getInputStream(uri);
- parser.parse(fis);
- } catch (Exception e) {
- logger.error(e.toString());
- }
- }
-
- public static Model readFiles(List files) {
+ public static Model fromTurtleString(String s) {
try {
- logger.info("reading shacls from {}", files.getFirst().toString());
- RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE);
- Model model = new LinkedHashModel();
- rdfParser.setRDFHandler(new StatementCollector(model));
- for (URI u : files) {
- readFile(u, rdfParser);
- }
- validateNamespaces(model);
- return model;
+ return Rio.parse(new StringReader(s), RDFFormat.TURTLE);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
- @SuppressWarnings("unused")
- public static void printModelAsTurtle(Model m) {
- saveModelToStream(System.out, m);
- }
-
- public static String modelAsTurtleString(Model m) {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- saveModelToStream(out, m);
- return out.toString();
- }
-
- @SuppressWarnings("unused")
- public static void safeModel(Path p, Model m) throws IOException {
- saveModelToStream(Files.newOutputStream(p), m);
- }
-
- private static void saveModelToStream(OutputStream out, Model m) {
- RDFWriter writer = Rio.createWriter(RDFFormat.TURTLE, out);
- writer.startRDF();
-
- for (Namespace ns : m.getNamespaces()) {
- writer.handleNamespace(ns.getPrefix(), ns.getName());
- }
- for (Statement st : m) {
- writer.handleStatement(st);
- }
- writer.endRDF();
- }
-
- private static InputStream getInputStream(URI uri) throws IOException {
-
- if (List.of("http", "https").contains(uri.getScheme().toLowerCase())) {
- logger.trace("Fetch from github: {}", uri);
- try (HttpClient client = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).followRedirects(HttpClient.Redirect.NORMAL).build()) {
- HttpRequest request = HttpRequest.newBuilder().uri(uri).GET().build();
-
- HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString());
- if (response.statusCode() / 100 == 2) {
- return new ByteArrayInputStream(response.body().getBytes());
- } else {
- throw new IOException("Failed to fetch file: " + response.statusCode());
- }
- } catch (InterruptedException ie) {
- throw new RuntimeException(ie);
- }
- } else {
- return new FileInputStream(Paths.get(uri).toFile());
- }
+ public static String modelAsTurtleString(Model model) {
+ StringWriter sw = new StringWriter();
+ Rio.write(model, sw, RDFFormat.TURTLE);
+ return sw.toString();
}
- public static String schemaToFile(String name) {
+ public static String schemaToFilename(String name) {
return name.replaceAll(" ", "") + ".ttl";
}
}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/RequestBuilder.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/RequestBuilder.java
deleted file mode 100644
index 9ae9425..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/utils/RequestBuilder.java
+++ /dev/null
@@ -1,113 +0,0 @@
-package nl.healthri.fdp.uploadschema.utils;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import nl.healthri.fdp.uploadschema.requestresponses.TokenResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.http.HttpClient;
-import java.net.http.HttpRequest;
-import java.net.http.HttpResponse;
-
-public class RequestBuilder {
-
- private static final Logger logger = LoggerFactory.getLogger(RequestBuilder.class);
-
- private final ObjectMapper mapper;
- private final HttpClient client;
-
- private String body = "";
- private String bearer = "";
- private URI uri = null;
-
- public RequestBuilder(ObjectMapper mapper, HttpClient client) {
- this.mapper = mapper;
- this.client = client;
- }
-
- public RequestBuilder setUri(String uri) {
- this.uri = URI.create(uri);
- return this;
- }
-
- public RequestBuilder setUri(String uri, Object parms) {
- //FIXME, this is ugly
- try {
- String json = mapper.writeValueAsString(parms);
- String p = "?" + json.substring(1, json.length() - 1)
- .replaceAll(",", "&")
- .replaceAll(":", "=")
- .replaceAll("\"", "");
- this.uri = URI.create(uri + p);
- } catch (JsonProcessingException e) {
- logger.error(e.getMessage());
- throw new RuntimeException(e);
- }
- return this;
- }
-
-
- public RequestBuilder setBody(Object obj) {
- try {
- this.body = mapper.writeValueAsString(obj);
- } catch (JsonProcessingException e) {
- throw new RuntimeException(e);
- }
- return this;
- }
-
- public RequestBuilder setToken(TokenResponse token) {
- this.bearer = token == null ? "" : token.asHeaderString();
- return this;
- }
-
- public T post(Class clazz) {
- var b = HttpRequest.newBuilder(uri)
- .POST(HttpRequest.BodyPublishers.ofString(body));
- return run(b, clazz);
- }
-
- public T get(Class clazz) {
- var b = HttpRequest.newBuilder(uri)
- .GET();
- return run(b, clazz);
- }
-
- public T put(Class clazz) {
-
- var b = HttpRequest.newBuilder(uri).PUT(HttpRequest.BodyPublishers.ofString(body));
- return run(b, clazz);
- }
-
- private T run(HttpRequest.Builder builder, Class clazz) {
- try {
-
- var b = builder
- .header("accept", "application/json")
- .header("Content-Type", "application/json");
- var request = !bearer.isBlank() ?
- b.header("Authorization", bearer).build()
- : b.build();
-
- logger.debug("body: {}", body);
- logger.debug("url: {}", uri.toString());
- logger.debug("request: {}", request);
-
- HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString());
- if ((response.statusCode() / 100) == 2) {
- logger.debug("request success: {}", response.statusCode());
- return mapper.readValue(response.body(), clazz);
- }
- throw new RuntimeException("Invalid request: " + response.statusCode() + " -> " + response.body());
- } catch (IOException e) {
- logger.error("url: {}", uri.toString());
- throw new RuntimeException(e);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();//
- }
- return null; //you can't get here...
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/ResourceInfo.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/ResourceInfo.java
new file mode 100644
index 0000000..0b160fa
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/utils/ResourceInfo.java
@@ -0,0 +1,19 @@
+package nl.healthri.fdp.uploadschema.utils;
+
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceResponseDto;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public record ResourceInfo(String name, String uuid) {
+ public static Map createResourceInfoMap(List resourceResponseList){
+ Map fdpResourceMap = new HashMap<>();
+ for(ResourceResponseDto resourceResponse : resourceResponseList) {
+ ResourceInfo resourceInfo = new ResourceInfo(resourceResponse.name(), resourceResponse.uuid());
+ fdpResourceMap.put(resourceResponse.name(), resourceInfo);
+ }
+
+ return fdpResourceMap;
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/ResourceMap.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/ResourceMap.java
deleted file mode 100644
index c3a9b24..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/utils/ResourceMap.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package nl.healthri.fdp.uploadschema.utils;
-
-import nl.healthri.fdp.uploadschema.requestresponses.ResourceResponse;
-
-import java.util.Arrays;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
-public class ResourceMap extends ObjectMap {
-
- public ResourceMap(ResourceResponse[] resourceResponses) {
- map = Arrays.stream(resourceResponses).collect(Collectors.toMap(ResourceResponse::name, rr -> new ResourceInfo(rr.name(), rr.uuid())));
- }
-
- public Optional getUUID(String name) {
- return getValue(name).map(ResourceInfo::uuid);
- }
-
- public record ResourceInfo(String name, String uuid) {
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/SchemaInfo.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/SchemaInfo.java
new file mode 100644
index 0000000..a97afea
--- /dev/null
+++ b/src/main/java/nl/healthri/fdp/uploadschema/utils/SchemaInfo.java
@@ -0,0 +1,27 @@
+package nl.healthri.fdp.uploadschema.utils;
+
+import nl.healthri.fdp.uploadschema.domain.Version;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public record SchemaInfo(Version version, String uuid, String definition) {
+ public static Map createSchemaInfoMap(List schemaDataResponseList) {
+ Map schemaInfoMap = new HashMap<>();
+ for (SchemaDataResponseDto schemaDataResponse : schemaDataResponseList) {
+ Version version = new Version(schemaDataResponse.latest().version());
+
+ SchemaInfo schemaInfo = new SchemaInfo(
+ version,
+ schemaDataResponse.uuid(),
+ schemaDataResponse.latest().definition()
+ );
+
+ schemaInfoMap.put(schemaDataResponse.name(), schemaInfo);
+ }
+
+ return schemaInfoMap;
+ }
+}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/utils/ShapesMap.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/ShapesMap.java
deleted file mode 100644
index bc455a3..0000000
--- a/src/main/java/nl/healthri/fdp/uploadschema/utils/ShapesMap.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package nl.healthri.fdp.uploadschema.utils;
-
-import nl.healthri.fdp.uploadschema.Version;
-import nl.healthri.fdp.uploadschema.requestresponses.SchemaDataResponse;
-
-import java.util.Arrays;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
-public class ShapesMap extends ObjectMap {
-
- public ShapesMap(SchemaDataResponse[] repsonses) {
- this.map = Arrays
- .stream(repsonses)
- .collect(Collectors.toMap(SchemaDataResponse::name, sr -> new SchemaInfo(new Version(sr.latest().version()), sr.uuid())));
- }
-
- public Optional getVersion(String name) {
- return getValue(name).map(SchemaInfo::version);
- }
-
- public Optional getUUID(String name) {
- return getValue(name).map(SchemaInfo::uuid);
- }
-
- public record SchemaInfo(Version version, String uuid) {
- }
-}
diff --git a/src/main/java/nl/healthri/fdp/uploadschema/XlsToRdfUtils.java b/src/main/java/nl/healthri/fdp/uploadschema/utils/XlsToRdfUtils.java
similarity index 98%
rename from src/main/java/nl/healthri/fdp/uploadschema/XlsToRdfUtils.java
rename to src/main/java/nl/healthri/fdp/uploadschema/utils/XlsToRdfUtils.java
index 214ef7c..0ce3dbc 100644
--- a/src/main/java/nl/healthri/fdp/uploadschema/XlsToRdfUtils.java
+++ b/src/main/java/nl/healthri/fdp/uploadschema/utils/XlsToRdfUtils.java
@@ -1,4 +1,4 @@
-package nl.healthri.fdp.uploadschema;
+package nl.healthri.fdp.uploadschema.utils;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
diff --git a/src/test/java/nl/healthri/fdp/uploadschema/config/SettingsTest.java b/src/test/java/nl/healthri/fdp/uploadschema/config/SettingsTest.java
new file mode 100644
index 0000000..d9751d2
--- /dev/null
+++ b/src/test/java/nl/healthri/fdp/uploadschema/config/SettingsTest.java
@@ -0,0 +1,166 @@
+package nl.healthri.fdp.uploadschema.config;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import nl.healthri.fdp.uploadschema.config.fdp.Settings;
+import nl.healthri.fdp.uploadschema.dto.settings.SettingsResponseDto;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.List;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+public class SettingsTest {
+
+ @TempDir
+ File tempDir;
+
+ // Helper to reset the static 'settings' instance
+ @BeforeEach
+ void resetSettingsSingleton() throws Exception {
+ Field settingsField = Settings.class.getDeclaredField("settings");
+ settingsField.setAccessible(true);
+ settingsField.set(null, null);
+ }
+
+ // Create a minimal SettingsResponse for merging
+ private SettingsResponseDto createMinimalFdpSettingsResponse() {
+ return new SettingsResponseDto(
+ "clientUrl", "persistentUrl", "appTitle", "appSubtitle",
+ "appTitleConfig", "appSubtitleConfig",
+ List.of(),
+ new SettingsResponseDto.Ping(true, List.of("e1"), List.of("e1c"), "1h"),
+ new SettingsResponseDto.Repository("type"),
+ new SettingsResponseDto.Search(List.of("f1")),
+ new SettingsResponseDto.Forms(
+ new SettingsResponseDto.Forms.Autocomplete(
+ true,
+ List.of(
+ new SettingsResponseDto.Forms.Autocomplete.Source("FdpRdfType1", "fsparql1", "fquery1")
+ )
+ )
+ )
+ );
+ }
+
+ public File createFile(String jsonFdpSettings) throws IOException {
+ // Get initial settings loaded (they don't need sources for this test)
+ File validFile = new File(tempDir, "settings.json");
+ try (FileWriter writer = new FileWriter(validFile)) {
+ writer.write(jsonFdpSettings);
+ }
+
+ return validFile;
+ }
+
+ @Test
+ public void SourceFoundInFdp_WhenMerging_ReturnsSettingsWithFdpSource() throws IOException {
+ // ARRANGE
+ final String localRdfType = "CommonRdfType";
+ final String localQuery = "LocalQuery";
+
+ final String fdpQuery = "FdpQuery";
+
+ String jsonFdpSettings = "{\"forms\": {\"autocomplete\": {\"sources\": [{\"rdfType\": \"" + localRdfType + "\", \"sparqlQuery\": \"" + localQuery + "\"}]}}}";
+ File file = createFile(jsonFdpSettings);
+ Settings settings = Settings.GetSettings(file);
+
+
+ // Setup FDP Settings with the same rdfType, but different query
+ SettingsResponseDto fdpSettings = new SettingsResponseDto(
+ "clientUrl", "persistentUrl", "appTitle", "appSubtitle",
+ "appTitleConfig", "appSubtitleConfig",
+ List.of(),
+ new SettingsResponseDto.Ping(true, List.of(), List.of(), "1h"),
+ new SettingsResponseDto.Repository("type"),
+ new SettingsResponseDto.Search(List.of()),
+ new SettingsResponseDto.Forms(
+ new SettingsResponseDto.Forms.Autocomplete(
+ true,
+ List.of(
+ new SettingsResponseDto.Forms.Autocomplete.Source(localRdfType, "fsparql", fdpQuery)
+ )
+ )
+ )
+ );
+
+ // ACT
+ settings = settings.Merge(Settings.convertToEntity(fdpSettings));
+
+ // ASSERT
+ assertEquals(1, settings.forms.autocomplete.sources.size(), "Should have 1 resource from settings and 1 resource from fdpSettings.");
+ }
+
+ @Test
+ public void SourceMissingInFdpSettings_WhenMerging_ReturnsSettingsWithSource() throws IOException {
+ // ARRANGE
+ final String localRdfType = "LocalOnlyRdfType";
+ final String localQuery = "LocalQuery";
+
+ String jsonFdpSettings = "{\"forms\": {\"autocomplete\": {\"sources\": [{\"rdfType\": \"" + localRdfType + "\", \"sparqlQuery\": \"" + localQuery + "\"}]}}}";
+ File file = createFile(jsonFdpSettings);
+ Settings settings = Settings.GetSettings(file);
+ SettingsResponseDto fdpSettings = createMinimalFdpSettingsResponse();
+
+ // ACT
+ settings = settings.Merge(Settings.convertToEntity(fdpSettings));
+
+ // ASSERT
+ assertEquals(2, settings.forms.autocomplete.sources.size(), "Should have both local and FDP source.");
+
+ // Check if the local source is preserved
+ Settings.Forms.Autocomplete.Source localSource = settings.forms.autocomplete.sources.stream()
+ .filter(s -> s.rdfType.equals(localRdfType))
+ .findFirst().orElseThrow(() -> new AssertionError("Local source not found in merged settings"));
+ assertEquals(localQuery, localSource.sparqlQuery, "Local source's query should be preserved.");
+
+ // Check if the FDP source is present
+ settings.forms.autocomplete.sources.stream()
+ .filter(s -> s.rdfType.equals("FdpRdfType1"))
+ .findFirst().orElseThrow(() -> new AssertionError("FDP source not found in merged settings"));
+ }
+
+ @Test
+ public void FileNotFound_WhenGettingSettings_ThrowsFileNotFoundException() {
+ // ARRANGE
+ File nonExistentFile = new File(tempDir, "nonExistent.json");
+
+ // ACT && ASSERT
+ assertThrows(FileNotFoundException.class, () -> Settings.GetSettings(nonExistentFile));
+ }
+
+ @Test
+ public void MalformedJsonFile_WhenGettingSettings_ThrowsIOException() throws IOException {
+ // ARRANGE
+ String jsonFdpSettings = "{ \"forms\": { \"autocomplete\": \"invalid";
+ File file = createFile(jsonFdpSettings);
+
+ // ACT & ASSERT
+ assertThrows(JsonMappingException.class, () -> Settings.GetSettings(file));
+ }
+
+ @Test
+ public void ValidJsonFile_WhenGettingSettings_ReturnsSettings() throws IOException {
+ // ARRANGE
+ final String expectedAppTitle = "TestTitle";
+ String jsonFdpSettings = "{\"appTitle\":\"" + expectedAppTitle + "\", \"forms\": {}}";
+ File file = createFile(jsonFdpSettings);
+
+ // ACT
+ Settings settings = Settings.GetSettings(file);
+
+ // ASSERT
+ assertNotNull(settings, "Settings should not be null.");
+ assertEquals(expectedAppTitle, settings.appTitle, "Settings appTitle should match the file content.");
+
+ // Check singleton logic (calling again returns same instance)
+ Settings settings2 = Settings.GetSettings();
+ assertSame(settings, settings2, "Subsequent calls should return the same singleton instance.");
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/nl/healthri/fdp/uploadschema/services/ResourceTaskServiceTest.java b/src/test/java/nl/healthri/fdp/uploadschema/services/ResourceTaskServiceTest.java
new file mode 100644
index 0000000..b2e24f4
--- /dev/null
+++ b/src/test/java/nl/healthri/fdp/uploadschema/services/ResourceTaskServiceTest.java
@@ -0,0 +1,432 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.domain.ResourceTask;
+import nl.healthri.fdp.uploadschema.domain.Version;
+import nl.healthri.fdp.uploadschema.dto.resource.ResourceResponseDto;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.config.fdp.Properties;
+import nl.healthri.fdp.uploadschema.utils.ResourceInfo;
+import nl.healthri.fdp.uploadschema.utils.SchemaInfo;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static nl.healthri.fdp.uploadschema.utils.ResourceInfo.createResourceInfoMap;
+import static nl.healthri.fdp.uploadschema.utils.SchemaInfo.createSchemaInfoMap;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.mockito.Mockito.*;
+
+class ResourceTaskServiceTest {
+
+ @Mock
+ private FdpService fdpServiceMock;
+ private ResourceTaskService resourceTaskService;
+
+ @BeforeEach
+ void setUp() {
+ fdpServiceMock = mock(FdpService.class);
+ Properties properties = getProperties();
+ resourceTaskService = new ResourceTaskService(fdpServiceMock, properties);
+ }
+
+ private Properties getProperties() {
+ Properties properties = new Properties();
+
+ properties.schemas.put("Catalog", List.of("Catalog.ttl", "Agent.ttl", "Kind.ttl", "PeriodOfTime.ttl"));
+ properties.schemas.put("Dataset", List.of("Dataset.ttl", "Agent.ttl", "Kind.ttl", "PeriodOfTime.ttl", "Attribution.ttl", "Identifier.ttl", "QualityCertificate.ttl", "Relationship.ttl"));
+ properties.schemas.put("Dataset Series", List.of("DatasetSeries.ttl", "Agent.ttl", "PeriodOfTime.ttl", "Kind.ttl"));
+ properties.schemas.put("Resource", List.of("Resource.ttl"));
+ properties.schemas.put("Distribution", List.of("Distribution.ttl", "PeriodOfTime.ttl", "Checksum.ttl"));
+ properties.schemas.put("Data Service", List.of("DataService.ttl", "Agent.ttl", "Kind.ttl", "Identifier.ttl"));
+
+ properties.parentChild.put("Resource", List.of("Dataset", "Catalog", "Data Service"));
+
+ properties.resources.put("Sample Distribution",
+ new Properties.ResourceProperties("Dataset", "http://www.w3.org/ns/adms#sample", "Distribution"));
+ properties.resources.put("Dataset Series",
+ new Properties.ResourceProperties("Dataset", "http://www.w3.org/ns/dcat#inSeries", "Dataset Series"));
+ properties.resources.put("Analytics Distribution",
+ new Properties.ResourceProperties("Dataset", "http://healthdataportal.eu/ns/health#analytics", "Distribution"));
+
+ properties.schemasToPublish = List.of(
+ "Resource",
+ "Catalog",
+ "Dataset",
+ "Dataset Series",
+ "Distribution",
+ "Data Service"
+ );
+
+ properties.schemaVersion = "2.0.0";
+ properties.inputDir = "https://raw.githubusercontent.com/Health-RI/health-ri-metadata/master/Formalisation(shacl)/Core/PiecesShape/";
+ properties.templateDir = "C:\\Users\\PatrickDekker(Health\\templates\\";
+ properties.outputRoot = "C:\\Users\\PatrickDekker(Health\\IdeaProjects\\health-ri-metadata\\Formalisation(shacl)\\Core\\";
+ properties.piecesDir = "PiecesShape";
+ properties.fairDataPointDir = "FairDataPointShape";
+ properties.validationDir = "ValidationShape";
+
+ return properties;
+ }
+
+ List getResourceResponseList(String name1, String name2, String name3) {
+ return List.of(
+ new ResourceResponseDto(
+ "1",
+ name1,
+ null, null, null, null, null
+ ),
+ new ResourceResponseDto(
+ "2",
+ name2,
+ null, null, null, null, null
+
+ ),
+ new ResourceResponseDto(
+ "3",
+ name3,
+ null, null, null, null, null
+
+ )
+ );
+ }
+
+ List getResourceResponseListWithParent(String name1, String name2, String name3, String name4) {
+ return List.of(
+ new ResourceResponseDto(
+ "1",
+ name1,
+ null, null, null, null, null
+ ),
+ new ResourceResponseDto(
+ "2",
+ name2,
+ null, null, null, null, null
+
+ ),
+ new ResourceResponseDto(
+ "3",
+ name3,
+ null, null, null, null, null
+ ),
+ new ResourceResponseDto(
+ "4",
+ name4,
+ null, null, null, null, null
+ )
+ );
+ }
+
+ List getSchemaDataResponseList(String name1, String name2, String name3) {
+ return List.of(
+ new SchemaDataResponseDto(
+ "1",
+ name1,
+ new SchemaDataResponseDto.Latest(
+ null,
+ new Version(1, 0, 0).toString(),
+ null,
+ null,
+ null,
+ false,
+ false,
+ true,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null
+ ),
+ null,
+ new ArrayList<>(List.of(new Version(1, 0, 0).toString())),
+ null,
+ null
+ ),
+ new SchemaDataResponseDto(
+ "2",
+ name2,
+ new SchemaDataResponseDto.Latest(
+ null,
+ new Version(1, 0, 0).toString(),
+ null,
+ null,
+ null,
+ false,
+ false,
+ true,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null
+ ),
+ null,
+ new ArrayList<>(List.of(new Version(1, 0, 0).toString())),
+ null,
+ null
+ ),
+ new SchemaDataResponseDto(
+ "3",
+ name3,
+ new SchemaDataResponseDto.Latest(
+ null,
+ new Version(1, 0, 0).toString(),
+ null,
+ null,
+ null,
+ false,
+ false,
+ true,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null,
+ null
+ ),
+ null,
+ new ArrayList<>(List.of(new Version(1, 0, 0).toString())),
+ null,
+ null
+ )
+ );
+ }
+
+
+ @Test
+ void PropertyResourceNotFoundInFdpResourceInfoMap_WhenGettingResourceInfo_ResourceDataEmptyIdAndExistsFalse() {
+ // Arrange
+ List fdpResourceResponseList = getResourceResponseList("resource-not-in-fdp-1", "resource-not-in-fdp-2", "resource-not-in-fdp-3");
+ Map fdpResourceInfoMap = createResourceInfoMap(fdpResourceResponseList);
+
+ // Act & Assert
+ this.resourceTaskService.properties.resources.forEach((key, value) -> {
+ ResourceTaskService.ResourceData resourceData =
+ this.resourceTaskService.getResourceInfo(key, fdpResourceInfoMap);
+
+ // Assert
+ assertEquals("", resourceData.resourceUUID());
+ assertFalse(resourceData.exists());
+ });
+ }
+
+ @Test
+ void PropertyResourceFoundInFdpResourceInfoMap_WhenGettingResourceInfo_ResourceDataIdIsFdpUuidIdAndExistsTrue() {
+ // Arrange
+ List fdpResourceResponseList = getResourceResponseList("Sample Distribution", "Dataset Series", "Analytics Distribution");
+ Map fdpResourceInfoMap = createResourceInfoMap(fdpResourceResponseList);
+
+ // Act & Assert
+ this.resourceTaskService.properties.resources.forEach((key, value) -> {
+ ResourceTaskService.ResourceData resourceData =
+ this.resourceTaskService.getResourceInfo(key, fdpResourceInfoMap);
+
+ // Assert
+ String fdpResourceUuid = fdpResourceInfoMap.get(key).uuid();
+ assertEquals(fdpResourceUuid, resourceData.resourceUUID());
+ assertTrue(resourceData.exists());
+ });
+ }
+
+ @Test
+ void PropertyResourceNotFoundInFdpSchemaInfoMap_WhenGettingSchemaUuid_ReturnsEmptyId() {
+ // Arrange
+ List fdpSchemaDataResponseList = getSchemaDataResponseList("resource-not-in-fdp-1", "resource-not-in-fdp-2", "resource-not-in-fdp-3");
+ Map fdpSchemaInfoMap = createSchemaInfoMap(fdpSchemaDataResponseList);
+
+ when(fdpServiceMock.getAllSchemas()).thenReturn(fdpSchemaDataResponseList);
+
+ // Act & Assert
+ this.resourceTaskService.properties.resources.forEach((key, value) -> {
+ // Act
+ String resourceSchemaId = this.resourceTaskService.getSchemaUUID(key, value.schema(), fdpSchemaInfoMap);
+
+ // Assert
+ assertEquals("", resourceSchemaId);
+ });
+ }
+
+ @Test
+ void PropertyResourceFoundInFdpSchemaInfoMap_WhenGettingSchemaUuid_ReturnsSchemaIdFromFdpSchema() {
+ // Arrange
+ List fdpSchemaDataResponseList = getSchemaDataResponseList("Distribution", "Dataset Series", "Distribution");
+ Map fdpSchemaInfoMap = createSchemaInfoMap(fdpSchemaDataResponseList);
+
+ when(fdpServiceMock.getAllSchemas()).thenReturn(fdpSchemaDataResponseList);
+
+ // Act & Assert
+ this.resourceTaskService.properties.resources.forEach((propertyResourceName, value) -> {
+ String propertyResourceSchema = value.schema();
+
+ // Act
+ String resourceSchemaId = this.resourceTaskService.getSchemaUUID(propertyResourceName, propertyResourceSchema, fdpSchemaInfoMap);
+
+ // Assert
+ String expectedSchemaId = fdpSchemaInfoMap.get(value.schema()).uuid();
+ assertEquals(expectedSchemaId, resourceSchemaId);
+ });
+ }
+
+ @Test
+ void PropertyParentResourceNotFoundInFdpResourceInfoMap_WhenGettingParentResourceInfo_ReturnEmptyParentResourceData(){
+ // Arrange
+ List fdpResourceResponseList = getResourceResponseListWithParent("parent-resource-not-in-fdp", "resource-not-in-fdp-1", "resource-not-in-fdp-2", "resource-not-in-fdp-3");
+ Map fdpResourceInfoMap = createResourceInfoMap(fdpResourceResponseList);
+
+ // Act & Assert
+ this.resourceTaskService.properties.resources.entrySet().forEach(propertyResource -> {
+ ResourceTaskService.ParentResourceData resourceData =
+ this.resourceTaskService.getParentResourceInfo(propertyResource, fdpResourceInfoMap);
+
+ // Assert
+ assertEquals(propertyResource.getValue().parentResource(), resourceData.parentResourceName());
+ assertNull(resourceData.parentResourceUuid());
+ assertNull(resourceData.childUuid());
+ assertNull(resourceData.childIri());
+ assertNull(resourceData.childName());
+ assertFalse(resourceData.exists());
+ });
+ }
+
+ // TODO:
+ @Test
+ void PropertyParentResourceFoundInFdpResourceInfoMap_WhenGettingParentResourceInfo_ReturnParentResourceDataWithFilledChildInfo(){
+ // Arrange
+
+ List fdpResourceResponseList = getResourceResponseListWithParent("Dataset", "Sample Distribution", "Dataset Series", "Analytics Distribution");
+ Map fdpResourceInfoMap = createResourceInfoMap(fdpResourceResponseList);
+
+ // Act & Assert
+ this.resourceTaskService.properties.resources.entrySet().forEach(propertyResource -> {
+ ResourceTaskService.ParentResourceData resourceData =
+ this.resourceTaskService.getParentResourceInfo(propertyResource, fdpResourceInfoMap);
+
+ // Assert
+ String parentResource = propertyResource.getValue().parentResource();
+ assertEquals(parentResource, resourceData.parentResourceName());
+ assertNotNull(resourceData.parentResourceUuid());
+ assertNotNull(resourceData.childUuid());
+ assertEquals(propertyResource.getValue().parentRelationIri(), resourceData.childIri());
+ assertEquals(propertyResource.getKey(), resourceData.childName());
+ assertTrue(resourceData.exists());
+ });
+ }
+
+ // TODO:
+ // createParentsgood
+ // createParentsntogood
+ // createtasksgood
+ // createtasksnotgood
+
+ // todo:
+ @Test
+ void AllPropertyResourcesFoundInFdpResourceInfoMap_WhenCreatingTasks_ReturnsTasksThatExist() {
+ // Arrange
+
+
+ List fdpResourceResponseList = getResourceResponseList("Sample Distribution", "Dataset Series", "Analytics Distribution");
+ Map fdpResourceMap = createResourceInfoMap(fdpResourceResponseList);
+ when(fdpServiceMock.getAllResources()).thenReturn(fdpResourceResponseList);
+
+ List fdpSchemaDataResponseList = getSchemaDataResponseList("Distribution", "Dataset Series", "Distribution");
+ Map schemaInfoMap = createSchemaInfoMap(fdpSchemaDataResponseList);
+ when(fdpServiceMock.getAllSchemas()).thenReturn(fdpSchemaDataResponseList);
+
+ // Act
+ List result = resourceTaskService.createTasks();
+
+ // Assert
+ assertEquals(this.resourceTaskService.properties.resources.size(), result.size());
+ for (ResourceTask task : result) {
+ Properties.ResourceProperties resourceProperty = this.resourceTaskService.properties.resources.get(task.resource);
+ String expectedResourceName = fdpResourceMap.get(task.resource).name();
+ String expectedResourceId = fdpResourceMap.get(task.resource).uuid();
+ String expectedResourceSchemaUuid = schemaInfoMap.get(resourceProperty.schema()).uuid();
+
+ assertEquals(expectedResourceName, task.resource);
+ assertEquals(expectedResourceId, task.UUID);
+ assertEquals(expectedResourceSchemaUuid, task.shapeUUUID);
+ assertTrue(task.exists);
+ }
+ }
+
+ @Test
+ void AllPropertyResourcesNotFoundInFdpResourceInfoMap_WhenCreatingTasks_ReturnsTasksThatDoNotExist() {
+ // Arrange
+ List fdpResourceResponseList = getResourceResponseList("not-in-fdp-1", "not-in-fdp-2", "not-in-fdp-3");
+ when(fdpServiceMock.getAllResources()).thenReturn(fdpResourceResponseList);
+
+ List fdpSchemaDataResponseList = getSchemaDataResponseList("Distribution", "Dataset Series", "Distribution");
+ when(fdpServiceMock.getAllSchemas()).thenReturn(fdpSchemaDataResponseList);
+
+ // Act
+ List result = resourceTaskService.createTasks();
+
+ // Assert
+ assertEquals(this.resourceTaskService.properties.resources.size(), result.size());
+ for (ResourceTask task : result) {
+ assertEquals("", task.UUID);
+ assertFalse(task.exists);
+ }
+ }
+
+ @Test
+ void AllPropertyResourcesFoundInFdpResourceInfoMap_WhenCreatingParentTasks_ReturnsTasksWithFilledChildDataAndExistsIsTrue() {
+ // Arrange
+ List fdpResourceResponseList = getResourceResponseListWithParent("Dataset", "Sample Distribution", "Dataset Series", "Analytics Distribution");
+ Map resourceInfoMap = createResourceInfoMap(fdpResourceResponseList);
+ when(fdpServiceMock.getAllResources()).thenReturn(fdpResourceResponseList);
+
+ // Act
+ List result = resourceTaskService.createParentTasks();
+
+ assertEquals(this.resourceTaskService.properties.resources.size(), result.size());
+ for (ResourceTask task : result) {
+ Properties.ResourceProperties resourceProperty = this.resourceTaskService.properties.resources.get(task.childName);
+ String expectedParentResourceName = resourceProperty.parentResource();
+ String expectedUuid = resourceInfoMap.get(resourceProperty.parentResource()).uuid();
+ String expectedChildUuid = resourceInfoMap.get(task.childName).uuid();
+ String expectedChildIri = resourceProperty.parentRelationIri();
+
+ // Assert
+ assertEquals(expectedParentResourceName, task.resource);
+ assertEquals(expectedUuid, task.UUID);
+ assertEquals(expectedChildUuid, task.childUUuid);
+ assertEquals(expectedChildIri, task.childRelationIri);
+ assertTrue(task.exists);
+ }
+ }
+
+ @Test
+ void AllPropertyResourcesNotFoundInFdpResourceInfoMap_WhenCreatingParentTasks_ReturnsTasksWithEmptyChildDataAndExistsIsFalse() {
+ // Arrange
+ List fdpResourceResponseList = getResourceResponseListWithParent("not-in-fdp-parent", "not-in-fdp-1", "not-in-fdp-2", "not-in-fdp-3");
+ when(fdpServiceMock.getAllResources()).thenReturn(fdpResourceResponseList);
+
+ // Act
+ List result = resourceTaskService.createParentTasks();
+
+ // Assert
+ assertEquals(this.resourceTaskService.properties.resources.size(), result.size());
+ for (ResourceTask task : result) {
+ assertNull(task.childUUuid);
+ assertNull(task.childRelationIri);
+ assertFalse(task.exists);
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/src/test/java/nl/healthri/fdp/uploadschema/services/ShapeTaskServiceTest.java b/src/test/java/nl/healthri/fdp/uploadschema/services/ShapeTaskServiceTest.java
new file mode 100644
index 0000000..c97e4b2
--- /dev/null
+++ b/src/test/java/nl/healthri/fdp/uploadschema/services/ShapeTaskServiceTest.java
@@ -0,0 +1,277 @@
+package nl.healthri.fdp.uploadschema.services;
+
+import nl.healthri.fdp.uploadschema.domain.Version;
+import nl.healthri.fdp.uploadschema.domain.ShapeTask;
+import nl.healthri.fdp.uploadschema.domain.enums.ShapeStatus;
+import nl.healthri.fdp.uploadschema.dto.schema.SchemaDataResponseDto;
+import nl.healthri.fdp.uploadschema.utils.*;
+import nl.healthri.fdp.uploadschema.config.fdp.Properties;
+import org.eclipse.rdf4j.model.Model;
+import org.eclipse.rdf4j.model.impl.LinkedHashModel;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+
+import java.net.URI;
+import java.util.*;
+
+import static org.junit.jupiter.api.Assertions.*;
+import static org.mockito.Mockito.*;
+
+class ShapeTaskServiceTest {
+
+ @Mock
+ private FdpServiceInterface fdpServiceMock;
+ @Mock
+ private FileHandler fileHandlerMock;
+ @Mock
+ private Properties propertiesMock;
+ private ShapeTaskService shapeTaskService;
+
+ @BeforeEach
+ void setUp() {
+ fdpServiceMock = mock(FdpService.class);
+ fileHandlerMock = mock(FileHandler.class);
+ propertiesMock = mock(Properties.class);
+ shapeTaskService = new ShapeTaskService(fdpServiceMock, fileHandlerMock, propertiesMock);
+ }
+
+ Model newModel() {
+ String ttl = """
+ @prefix ex: .
+ @prefix foaf: .
+
+ ex:Alice a foaf:Person ;
+ foaf:name "Alice" ;
+ foaf:age 30 .
+ """;
+
+ return RdfUtils.fromTurtleString(ttl);
+ }
+
+ Model newDifferentModel() {
+ String ttl = """
+ @prefix ex: .
+ @prefix foaf: .
+
+ ex:Alice a foaf:Person ;
+ foaf:name "Peter" ;
+ foaf:age 40 .
+ """;
+
+ return RdfUtils.fromTurtleString(ttl);
+ }
+
+ @Test
+ void propertiesSchemaTitleFoundInPropertiesFiles_WhenLookingForTitleKeyValue_ReturnsNewShapeTask() {
+ // Arrange
+ String schemaTitle = "TestSchema";
+ URI fileUri = URI.create("file://test-schema.ttl");
+ List uris = List.of(fileUri);
+ Version version = new Version("1.0.0");
+
+ when(propertiesMock.getFiles()).thenReturn(Map.of(schemaTitle, uris));
+ when(propertiesMock.getSchemasToPublish()).thenReturn(List.of(schemaTitle));
+ when(propertiesMock.getVersion()).thenReturn(version);
+ when(propertiesMock.getParents(schemaTitle)).thenReturn(Set.of());
+ when(fdpServiceMock.getAllSchemas()).thenReturn(Collections.emptyList());
+
+ Model model = new LinkedHashModel();
+ when(fileHandlerMock.readFiles(uris)).thenReturn(model);
+
+ // Act
+ List tasks = shapeTaskService.createTasks();
+
+ // Assert
+ assertEquals(1, tasks.size());
+ ShapeTask task = tasks.getFirst();
+ assertEquals(schemaTitle, task.shape);
+ assertEquals(ShapeStatus.INSERT, task.status());
+ assertEquals(version, task.version);
+ }
+
+ @Test
+ void propertiesSchemaTitleNotFoundInPropertiesFiles_WhenLookingForTitleKeyValue_ReturnsNoSuchElementException() {
+ // Arrange
+ String schemaTitle = "MissingSchema";
+
+ // Act
+ when(propertiesMock.getSchemasToPublish()).thenReturn(List.of(schemaTitle));
+ when(propertiesMock.getFiles()).thenReturn(Map.of());
+
+ // Assert
+ assertThrows(NoSuchElementException.class, () -> shapeTaskService.createTasks());
+ }
+
+ @Test
+ void propertiesSchemaTitleFoundInFdpSchemaInfoMap_WhenLookingForTitleKeyValue_ReturnsShapeTaskWithStatusInsert() {
+ // Arrange
+ String schemaTitle = "InsertSchema";
+ URI uri = URI.create("file://insert-schema.ttl");
+ Version version = new Version("1.0.0");
+
+ Model model = newModel();
+ String ttl = RdfUtils.modelAsTurtleString(model);
+
+ SchemaDataResponseDto.Latest latest = new SchemaDataResponseDto.Latest(
+ "uuid-latest", "1.0.0", "vUuid", null, schemaTitle,
+ true, false, true, "type", "origin", "imported", ttl, "desc",
+ new ArrayList<>(), new ArrayList<>(), "res", "prefix"
+ );
+
+ SchemaDataResponseDto fdpSchemaDataResponse = new SchemaDataResponseDto(
+ "uuid-main", schemaTitle, latest, null,
+ new ArrayList<>(), new ArrayList<>(), new ArrayList<>()
+ );
+
+ when(propertiesMock.getFiles()).thenReturn(Map.of(schemaTitle, List.of(uri)));
+ when(propertiesMock.getSchemasToPublish()).thenReturn(List.of(schemaTitle));
+ when(propertiesMock.getVersion()).thenReturn(version);
+ when(propertiesMock.getParents(schemaTitle)).thenReturn(Set.of());
+ when(fdpServiceMock.getAllSchemas()).thenReturn(List.of(fdpSchemaDataResponse));
+ when(fileHandlerMock.readFiles(List.of(uri))).thenReturn(model);
+
+ // Act
+ List tasks = shapeTaskService.createTasks();
+
+ // Assert
+ assertEquals(1, tasks.size());
+ assertEquals(ShapeStatus.SAME, tasks.getFirst().status());
+ }
+
+ @Test
+ void propertiesSchemaTitleNotFoundInFdpSchemaInfoMap_WhenLookingForTitleKeyValue_ReturnsShapeTaskWithStatusSameOrUpdate() {
+ // Arrange
+ String schemaTitle = "InsertSchema";
+ URI uri = URI.create("file://insert-schema.ttl");
+ Version version = new Version("1.0.0");
+
+ when(propertiesMock.getFiles()).thenReturn(Map.of(schemaTitle, List.of(uri)));
+ when(propertiesMock.getSchemasToPublish()).thenReturn(List.of(schemaTitle));
+ when(propertiesMock.getVersion()).thenReturn(version);
+ when(propertiesMock.getParents(schemaTitle)).thenReturn(Set.of());
+ when(fdpServiceMock.getAllSchemas()).thenReturn(Collections.emptyList());
+
+ Model model = newModel();
+ when(fileHandlerMock.readFiles(List.of(uri))).thenReturn(model);
+
+ // Act
+ List tasks = shapeTaskService.createTasks();
+
+ // Assert
+ assertEquals(1, tasks.size());
+ assertEquals(ShapeStatus.INSERT, tasks.getFirst().status());
+ }
+
+ @Test
+ void NoChangesFound_WhenComparingPropertiesFileWithMatchingFdpShapeFile_ReturnsNewShapeTaskWithStatusSame() {
+ // Arrange
+ String schemaTitle = "SameSchema";
+ URI fileUri = URI.create("file://same-schema.ttl");
+ List uris = List.of(fileUri);
+ Version version = new Version("1.0.0");
+
+ Model model = newModel();
+ Model sameModel = newModel();
+ String ttlDifferentModel = RdfUtils.modelAsTurtleString(sameModel);
+
+ SchemaDataResponseDto.Latest latest = new SchemaDataResponseDto.Latest(
+ "uuid-latest",
+ "1.0.0",
+ "versionUuid",
+ null,
+ schemaTitle,
+ true,
+ false,
+ true,
+ "type",
+ "origin",
+ "importedFrom",
+ ttlDifferentModel,
+ "desc",
+ new ArrayList<>(),
+ new ArrayList<>(),
+ "resName",
+ "urlPrefix"
+ );
+
+ SchemaDataResponseDto existingResponse = new SchemaDataResponseDto(
+ "uuid-main",
+ schemaTitle,
+ latest,
+ null,
+ new ArrayList<>(),
+ new ArrayList<>(),
+ new ArrayList<>()
+ );
+
+ when(propertiesMock.getFiles()).thenReturn(Map.of(schemaTitle, uris));
+ when(propertiesMock.getSchemasToPublish()).thenReturn(List.of(schemaTitle));
+ when(propertiesMock.getVersion()).thenReturn(version);
+ when(propertiesMock.getParents(schemaTitle)).thenReturn(Set.of());
+ when(fdpServiceMock.getAllSchemas()).thenReturn(List.of(existingResponse));
+ when(fileHandlerMock.readFiles(uris)).thenReturn(model);
+
+ // Act
+ List tasks = shapeTaskService.createTasks();
+
+ // Assert
+ assertEquals(ShapeStatus.SAME, tasks.getFirst().status());
+ }
+
+ @Test
+ void ChangesFound_WhenComparingPropertiesFileWithMatchingFdpShapeFile_ReturnsNewShapeTaskWithStatusUpdate() {
+ // Arrange
+ String schemaTitle = "SameSchema";
+ URI fileUri = URI.create("file://same-schema.ttl");
+ List uris = List.of(fileUri);
+ Version version = new Version("1.0.0");
+
+ Model model = newModel();
+ Model sameModel = newDifferentModel();
+ String ttlDifferentModel = RdfUtils.modelAsTurtleString(sameModel);
+
+ SchemaDataResponseDto.Latest latest = new SchemaDataResponseDto.Latest(
+ "uuid-latest",
+ "1.0.0",
+ "versionUuid",
+ null,
+ schemaTitle,
+ true,
+ false,
+ true,
+ "type",
+ "origin",
+ "importedFrom",
+ ttlDifferentModel,
+ "desc",
+ new ArrayList<>(),
+ new ArrayList<>(),
+ "resName",
+ "urlPrefix"
+ );
+
+ SchemaDataResponseDto existingResponse = new SchemaDataResponseDto(
+ "uuid-main",
+ schemaTitle,
+ latest,
+ null,
+ new ArrayList<>(),
+ new ArrayList<>(),
+ new ArrayList<>()
+ );
+
+ when(propertiesMock.getFiles()).thenReturn(Map.of(schemaTitle, uris));
+ when(propertiesMock.getSchemasToPublish()).thenReturn(List.of(schemaTitle));
+ when(propertiesMock.getVersion()).thenReturn(version);
+ when(propertiesMock.getParents(schemaTitle)).thenReturn(Set.of());
+ when(fdpServiceMock.getAllSchemas()).thenReturn(List.of(existingResponse));
+ when(fileHandlerMock.readFiles(uris)).thenReturn(model);
+
+ // Act
+ List tasks = shapeTaskService.createTasks();
+
+ // Assert
+ assertEquals(ShapeStatus.UPDATE, tasks.getFirst().status());
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/nl/healthri/fdp/uploadschema/utils/PropertiesYamlTest.java b/src/test/java/nl/healthri/fdp/uploadschema/utils/PropertiesYamlTest.java
index 3fe0370..1a146d4 100644
--- a/src/test/java/nl/healthri/fdp/uploadschema/utils/PropertiesYamlTest.java
+++ b/src/test/java/nl/healthri/fdp/uploadschema/utils/PropertiesYamlTest.java
@@ -1,5 +1,6 @@
package nl.healthri.fdp.uploadschema.utils;
+import nl.healthri.fdp.uploadschema.config.fdp.Properties;
import org.junit.jupiter.api.Test;
import java.io.File;
diff --git a/src/test/java/nl/healthri/fdp/uploadschema/utils/VersionTest.java b/src/test/java/nl/healthri/fdp/uploadschema/utils/VersionTest.java
index 98fb4a8..29337e0 100644
--- a/src/test/java/nl/healthri/fdp/uploadschema/utils/VersionTest.java
+++ b/src/test/java/nl/healthri/fdp/uploadschema/utils/VersionTest.java
@@ -1,6 +1,6 @@
package nl.healthri.fdp.uploadschema.utils;
-import nl.healthri.fdp.uploadschema.Version;
+import nl.healthri.fdp.uploadschema.domain.Version;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;