From 51d8874a8fdf9488615aab7d80e7597f5b0d2341 Mon Sep 17 00:00:00 2001
From: jensroets
Date: Thu, 8 Sep 2022 16:50:02 +0200
Subject: [PATCH 001/693] 94299 Multiple Bitstream deletion endpoint
---
.../app/rest/RestResourceController.java | 33 +
.../repository/BitstreamRestRepository.java | 44 +
.../rest/repository/DSpaceRestRepository.java | 18 +
.../app/rest/BitstreamRestRepositoryIT.java | 955 ++++++++++++++++++
4 files changed, 1050 insertions(+)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java
index b82b4830753..24468660f01 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java
@@ -7,6 +7,7 @@
*/
package org.dspace.app.rest;
+import static org.dspace.app.rest.utils.ContextUtil.obtainContext;
import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT;
import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_HEX32;
import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG;
@@ -55,6 +56,8 @@
import org.dspace.app.rest.utils.RestRepositoryUtils;
import org.dspace.app.rest.utils.Utils;
import org.dspace.authorize.AuthorizeException;
+import org.dspace.content.DSpaceObject;
+import org.dspace.core.Context;
import org.dspace.util.UUIDUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
@@ -1050,6 +1053,13 @@ public ResponseEntity> delete(HttpServletRequest request,
return deleteInternal(apiCategory, model, uuid);
}
+ @RequestMapping(method = RequestMethod.DELETE, consumes = {"text/uri-list"})
+ public ResponseEntity> delete(HttpServletRequest request, @PathVariable String apiCategory,
+ @PathVariable String model)
+ throws HttpRequestMethodNotSupportedException {
+ return deleteUriListInternal(request, apiCategory, model);
+ }
+
/**
* Internal method to delete resource.
*
@@ -1067,6 +1077,29 @@ private ResponseEntity> deleteI
return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT);
}
+ public ResponseEntity> deleteUriListInternal(
+ HttpServletRequest request,
+ String apiCategory,
+ String model)
+ throws HttpRequestMethodNotSupportedException {
+ checkModelPluralForm(apiCategory, model);
+ DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model);
+ Context context = obtainContext(request);
+ List dsoStringList = utils.getStringListFromRequest(request);
+ List dsoList = utils.constructDSpaceObjectList(context, dsoStringList);
+ if (dsoStringList.size() != dsoList.size()) {
+ throw new ResourceNotFoundException("One or more bitstreams could not be found.");
+ }
+ try {
+ repository.delete(dsoList);
+ } catch (ClassCastException e) {
+ log.error("Something went wrong whilst creating the object for apiCategory: " + apiCategory +
+ " and model: " + model, e);
+ return ControllerUtils.toEmptyResponse(HttpStatus.INTERNAL_SERVER_ERROR);
+ }
+ return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT);
+ }
+
/**
* Execute a PUT request for an entity with id of type UUID;
*
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java
index ae3cf91d4c4..f599d993be4 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java
@@ -10,6 +10,8 @@
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
@@ -147,6 +149,48 @@ protected void delete(Context context, UUID id) throws AuthorizeException {
}
}
+ @Override
+ protected void deleteList(Context context, List dsoList)
+ throws SQLException, AuthorizeException {
+ // check if list is empty
+ if (dsoList.isEmpty()) {
+ throw new ResourceNotFoundException("No bitstreams given.");
+ }
+ // check if every DSO is a Bitstream
+ if (dsoList.stream().anyMatch(dso -> !(dso instanceof Bitstream))) {
+ throw new UnprocessableEntityException("Not all given items are bitstreams.");
+ }
+ // check that they're all part of the same Item
+ List items = new ArrayList<>();
+ for (DSpaceObject dso : dsoList) {
+ Bitstream bit = bs.find(context, dso.getID());
+ DSpaceObject bitstreamParent = bs.getParentObject(context, bit);
+ if (bit == null) {
+ throw new ResourceNotFoundException("The bitstream with uuid " + dso.getID() + " could not be found");
+ }
+ // we have to check if the bitstream has already been deleted
+ if (bit.isDeleted()) {
+ throw new UnprocessableEntityException("The bitstream with uuid " + bit.getID()
+ + " was already deleted");
+ } else {
+ items.add(bitstreamParent);
+ }
+ }
+ if (items.stream().distinct().count() > 1) {
+ throw new UnprocessableEntityException("Not all given items are part of the same Item.");
+ }
+ // delete all Bitstreams
+ Iterator iterator = dsoList.iterator();
+ while (iterator.hasNext()) {
+ Bitstream bit = (Bitstream) iterator.next();
+ try {
+ bs.delete(context, bit);
+ } catch (SQLException | IOException e) {
+ throw new RuntimeException(e.getMessage(), e);
+ }
+ }
+ }
+
/**
* Find the bitstream for the provided handle and sequence or filename.
* When a bitstream can be found with the sequence ID it will be returned if the user has "METADATA_READ" access.
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java
index 01f127eca5a..219b7c4123b 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java
@@ -26,6 +26,7 @@
import org.dspace.app.rest.model.RestAddressableModel;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.authorize.AuthorizeException;
+import org.dspace.content.DSpaceObject;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.core.Context;
import org.springframework.beans.factory.BeanNameAware;
@@ -256,6 +257,23 @@ public void deleteAll() {
}
+ public void delete(List dsoList) {
+ Context context = obtainContext();
+ try {
+ getThisRepository().deleteList(context, dsoList);
+ context.commit();
+ } catch (AuthorizeException e) {
+ throw new RESTAuthorizationException(e);
+ } catch (SQLException ex) {
+ throw new RuntimeException(ex.getMessage(), ex);
+ }
+ }
+
+ protected void deleteList(Context context, List list)
+ throws AuthorizeException, SQLException, RepositoryMethodNotImplementedException {
+ throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", "");
+ }
+
@Override
/**
* This method cannot be implemented we required all the find method to be paginated
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java
index f9c1e469fcf..391d9e41933 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java
@@ -13,6 +13,7 @@
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
+import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
@@ -1201,6 +1202,960 @@ public void deleteDeleted() throws Exception {
.andExpect(status().isNotFound());
}
+ @Test
+ public void deleteListOneBitstream() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ String bitstreamContent = "ThisIsSomeDummyText";
+ //Add a bitstream to an item
+ Bitstream bitstream = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
+ bitstream = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream")
+ .withDescription("Description")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID()))
+ .andExpect(status().is(204));
+
+ // Verify 404 after delete
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID()))
+ .andExpect(status().isNotFound());
+ }
+
+ @Test
+ public void deleteListOneOfMultipleBitstreams() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete bitstream1
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().is(204));
+
+ // Verify 404 after delete for bitstream1
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isNotFound());
+
+ // check that bitstream2 still exists
+ getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds()));
+
+ // check that bitstream3 still exists
+ getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds()))
+ ;
+ }
+
+ @Test
+ public void deleteListAllBitstreams() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete all bitstreams
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().is(204));
+
+ // Verify 404 after delete for bitstream1
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isNotFound());
+
+ // Verify 404 after delete for bitstream2
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isNotFound());
+
+ // Verify 404 after delete for bitstream3
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isNotFound());
+ }
+
+ @Test
+ public void deleteListForbidden() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(eperson.getEmail(), password);
+
+ // Delete using an unauthorized user
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isForbidden());
+
+ // Verify the bitstreams are still here
+ getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isOk());
+
+ getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk());
+
+ getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isOk());
+ }
+
+ @Test
+ public void deleteListUnauthorized() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ // Delete as anonymous
+ getClient().perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isUnauthorized());
+
+ // Verify the bitstreams are still here
+ getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isOk());
+
+ getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk());
+
+ getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isOk());
+ }
+
+ @Test
+ public void deleteListEmpty() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete with empty list throws 404
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content(""))
+ .andExpect(status().isNotFound());
+
+ // Verify the bitstreams are still here
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isOk());
+ }
+
+ @Test
+ public void deleteListNotBitstream() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete with list containing non-Bitstream throws 422
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID()
+ + " \n http://localhost:8080/server/api/core/items/" + publicItem1.getID()))
+ .andExpect(status().is(422));
+
+ // Verify the bitstreams are still here
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isOk());
+ }
+
+ @Test
+ public void deleteListDifferentItems() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. Two public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ Item publicItem2 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 1 bitstream to each item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem2, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete with list containing Bitstreams from different items throws 422
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().is(422));
+
+ // Verify the bitstreams are still here
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk());
+
+ }
+
+ @Test
+ public void deleteListLogo() throws Exception {
+ // We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ // ** GIVEN **
+ // 1. A community with a logo
+ parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").withLogo("logo_community")
+ .build();
+
+ // 2. A collection with a logo
+ Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection")
+ .withLogo("logo_collection").build();
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // trying to DELETE parentCommunity logo and collection logo should work
+ // we have to delete them separately otherwise it will throw 422 as they belong to different items
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + parentCommunity.getLogo().getID()))
+ .andExpect(status().is(204));
+
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + col.getLogo().getID()))
+ .andExpect(status().is(204));
+
+ // Verify 404 after delete for parentCommunity logo
+ getClient(token).perform(get("/api/core/bitstreams/" + parentCommunity.getLogo().getID()))
+ .andExpect(status().isNotFound());
+
+ // Verify 404 after delete for collection logo
+ getClient(token).perform(get("/api/core/bitstreams/" + col.getLogo().getID()))
+ .andExpect(status().isNotFound());
+ }
+
+ @Test
+ public void deleteListMissing() throws Exception {
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb"))
+ .andExpect(status().isNotFound());
+
+ // Verify 404 after failed delete
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb"))
+ .andExpect(status().isNotFound());
+ }
+
+ @Test
+ public void deleteListOneMissing() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete all bitstreams and a missing bitstream returns 404
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb"))
+ .andExpect(status().isNotFound());
+
+ // Verify the bitstreams are still here
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().isOk());
+ }
+
+ @Test
+ public void deleteListOneMissingDifferentItems() throws Exception {
+
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. Two public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ Item publicItem2 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 1 bitstream to each item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem2, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete all bitstreams and a missing bitstream returns 404
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb"))
+ .andExpect(status().isNotFound());
+
+ // Verify the bitstreams are still here
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().isOk());
+
+ getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID()))
+ .andExpect(status().isOk());
+
+ }
+
+ @Test
+ public void deleteListDeleted() throws Exception {
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ String bitstreamContent = "ThisIsSomeDummyText";
+ //Add a bitstream to an item
+ Bitstream bitstream = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
+ bitstream = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream")
+ .withDescription("Description")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID()))
+ .andExpect(status().is(204));
+
+ // Verify 404 when trying to delete a non-existing, already deleted, bitstream
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID()))
+ .andExpect(status().is(422));
+ }
+
+ @Test
+ public void deleteListOneDeleted() throws Exception {
+ //We turn off the authorization system in order to create the structure as defined below
+ context.turnOffAuthorisationSystem();
+
+ //** GIVEN **
+ //1. A community-collection structure with one parent community with sub-community and one collection.
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
+ .withName("Sub Community")
+ .build();
+ Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
+
+ //2. One public items that is readable by Anonymous
+ Item publicItem1 = ItemBuilder.createItem(context, col1)
+ .withTitle("Test")
+ .withIssueDate("2010-10-17")
+ .withAuthor("Smith, Donald")
+ .withSubject("ExtraEntry")
+ .build();
+
+ // Add 3 bitstreams to the item
+ String bitstreamContent1 = "ThisIsSomeDummyText1";
+ Bitstream bitstream1 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) {
+ bitstream1 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream1")
+ .withDescription("Description1")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent2 = "ThisIsSomeDummyText2";
+ Bitstream bitstream2 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) {
+ bitstream2 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream2")
+ .withDescription("Description2")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ String bitstreamContent3 = "ThisIsSomeDummyText3";
+ Bitstream bitstream3 = null;
+ try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) {
+ bitstream3 = BitstreamBuilder.
+ createBitstream(context, publicItem1, is)
+ .withName("Bitstream3")
+ .withDescription("Description3")
+ .withMimeType("text/plain")
+ .build();
+ }
+
+ context.restoreAuthSystemState();
+
+ String token = getAuthToken(admin.getEmail(), password);
+
+ // Delete bitstream1
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()))
+ .andExpect(status().is(204));
+
+ // Verify 404 when trying to delete a non-existing, already deleted, bitstream
+ getClient(token).perform(delete("/api/core/bitstreams")
+ .contentType(TEXT_URI_LIST)
+ .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID()
+ + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID()))
+ .andExpect(status().is(422));
+ }
+
@Test
public void patchBitstreamMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
From 464465560187002f0d50dbd0f6a9f12044a42723 Mon Sep 17 00:00:00 2001
From: jensroets
Date: Wed, 14 Sep 2022 15:49:03 +0200
Subject: [PATCH 002/693] 94299 Multiple Bitstream deletion endpoint: rename
items to parents
---
.../dspace/app/rest/repository/BitstreamRestRepository.java | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java
index f599d993be4..3696b386680 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java
@@ -161,7 +161,7 @@ protected void deleteList(Context context, List dsoList)
throw new UnprocessableEntityException("Not all given items are bitstreams.");
}
// check that they're all part of the same Item
- List items = new ArrayList<>();
+ List parents = new ArrayList<>();
for (DSpaceObject dso : dsoList) {
Bitstream bit = bs.find(context, dso.getID());
DSpaceObject bitstreamParent = bs.getParentObject(context, bit);
@@ -173,10 +173,10 @@ protected void deleteList(Context context, List dsoList)
throw new UnprocessableEntityException("The bitstream with uuid " + bit.getID()
+ " was already deleted");
} else {
- items.add(bitstreamParent);
+ parents.add(bitstreamParent);
}
}
- if (items.stream().distinct().count() > 1) {
+ if (parents.stream().distinct().count() > 1) {
throw new UnprocessableEntityException("Not all given items are part of the same Item.");
}
// delete all Bitstreams
From b05d19ed6caca47fb0f599fb48145223f934b272 Mon Sep 17 00:00:00 2001
From: Andrea Bollini
Date: Tue, 6 Sep 2022 19:17:24 +0200
Subject: [PATCH 003/693] Always use md5 checksum for data integrity check.
Send it to S3 to exclude corruption during upload
---
.../storage/bitstore/S3BitStoreService.java | 30 ++++++++++++++-----
1 file changed, 23 insertions(+), 7 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index 992b940df2b..f5225154db1 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -8,8 +8,12 @@
package org.dspace.storage.bitstore;
import java.io.File;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.security.DigestInputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
import java.util.Map;
import java.util.function.Supplier;
import javax.validation.constraints.NotNull;
@@ -35,7 +39,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.io.FileUtils;
+import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpStatus;
import org.apache.logging.log4j.LogManager;
@@ -258,15 +262,24 @@ public void put(Bitstream bitstream, InputStream in) throws IOException {
String key = getFullKey(bitstream.getInternalId());
//Copy istream to temp file, and send the file, with some metadata
File scratchFile = File.createTempFile(bitstream.getInternalId(), "s3bs");
- try {
- FileUtils.copyInputStreamToFile(in, scratchFile);
- long contentLength = scratchFile.length();
-
+ try (
+ FileOutputStream fos = new FileOutputStream(scratchFile);
+ // Read through a digest input stream that will work out the MD5
+ DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA));
+ ) {
+ Utils.bufferedCopy(dis, fos);
+ in.close();
+ byte[] md5Digest = dis.getMessageDigest().digest();
+ String md5Base64 = Base64.encodeBase64String(md5Digest);
+ ObjectMetadata objMetadata = new ObjectMetadata();
+ objMetadata.setContentMD5(md5Base64);
PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, scratchFile);
PutObjectResult putObjectResult = s3Service.putObject(putObjectRequest);
- bitstream.setSizeBytes(contentLength);
- bitstream.setChecksum(putObjectResult.getETag());
+ bitstream.setSizeBytes(scratchFile.length());
+ // we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if
+ // the bucket is encrypted
+ bitstream.setChecksum(Utils.toHex(md5Digest));
bitstream.setChecksumAlgorithm(CSA);
scratchFile.delete();
@@ -274,6 +287,9 @@ public void put(Bitstream bitstream, InputStream in) throws IOException {
} catch (AmazonClientException | IOException e) {
log.error("put(" + bitstream.getInternalId() + ", is)", e);
throw new IOException(e);
+ } catch (NoSuchAlgorithmException nsae) {
+ // Should never happen
+ log.warn("Caught NoSuchAlgorithmException", nsae);
} finally {
if (scratchFile.exists()) {
scratchFile.delete();
From d7d2723d65a8ff2b0ca9f0cf231db0abc7b2ca5b Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 30 Sep 2022 16:08:36 +0200
Subject: [PATCH 004/693] [DURACOM-92] Use TransferManager to download files
from S3
---
.../DeleteOnCloseFileInputStream.java | 42 +++++++++++++++++++
.../storage/bitstore/S3BitStoreService.java | 22 ++++++++--
2 files changed, 60 insertions(+), 4 deletions(-)
create mode 100644 dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java
new file mode 100644
index 00000000000..62c24544eea
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java
@@ -0,0 +1,42 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.storage.bitstore;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+/**
+ * When inputstream closes, then delete the file
+ * http://stackoverflow.com/a/4694155/368581
+ */
+public class DeleteOnCloseFileInputStream extends FileInputStream {
+
+ private File file;
+
+ public DeleteOnCloseFileInputStream(String fileName) throws FileNotFoundException {
+ this(new File(fileName));
+ }
+
+ public DeleteOnCloseFileInputStream(File file) throws FileNotFoundException {
+ super(file);
+ this.file = file;
+ }
+
+ public void close() throws IOException {
+ try {
+ super.close();
+ } finally {
+ if (file != null) {
+ file.delete();
+ file = null;
+ }
+ }
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index f5225154db1..6f671145891 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -15,6 +15,7 @@
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Map;
+import java.util.UUID;
import java.util.function.Supplier;
import javax.validation.constraints.NotNull;
@@ -32,7 +33,9 @@
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult;
-import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.transfer.Download;
+import com.amazonaws.services.s3.transfer.TransferManager;
+import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
@@ -238,9 +241,20 @@ public String generateId() {
public InputStream get(Bitstream bitstream) throws IOException {
String key = getFullKey(bitstream.getInternalId());
try {
- S3Object object = s3Service.getObject(new GetObjectRequest(bucketName, key));
- return (object != null) ? object.getObjectContent() : null;
- } catch (AmazonClientException e) {
+ File tempFile = File.createTempFile("s3-disk-copy-" + UUID.randomUUID(), "temp");
+ tempFile.deleteOnExit();
+
+ GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, key);
+
+ TransferManager transferManager = TransferManagerBuilder.standard()
+ .withS3Client(s3Service)
+ .build();
+
+ Download download = transferManager.download(getObjectRequest, tempFile);
+ download.waitForCompletion();
+
+ return new DeleteOnCloseFileInputStream(tempFile);
+ } catch (AmazonClientException | InterruptedException e) {
log.error("get(" + key + ")", e);
throw new IOException(e);
}
From 5210aedbf32d9fda7cde79e407d7ba256aad97a1 Mon Sep 17 00:00:00 2001
From: Andrea Bollini
Date: Sun, 2 Oct 2022 19:44:29 +0200
Subject: [PATCH 005/693] CST-6950 fix checksum check and history
---
.../checker/ChecksumHistoryServiceImpl.java | 3 +-
.../storage/bitstore/S3BitStoreService.java | 43 ++++++++++++++++++-
dspace/config/modules/storage.cfg | 34 +++++++++++++++
dspace/config/spring/api/bitstore.xml | 1 +
4 files changed, 79 insertions(+), 2 deletions(-)
create mode 100644 dspace/config/modules/storage.cfg
diff --git a/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java
index f8d6560e924..f7b05d4de9d 100644
--- a/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java
@@ -74,7 +74,8 @@ public void addHistory(Context context, MostRecentChecksum mostRecentChecksum) t
if (mostRecentChecksum.getBitstream().isDeleted()) {
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.BITSTREAM_MARKED_DELETED);
} else {
- checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.CHECKSUM_MATCH);
+ checksumResult = checksumResultService.findByCode(context,
+ mostRecentChecksum.getChecksumResult().getResultCode());
}
checksumHistory.setResult(checksumResult);
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index 6f671145891..b1e26f8624d 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -97,6 +97,7 @@ public class S3BitStoreService extends BaseBitStoreService {
private String awsSecretKey;
private String awsRegionName;
private boolean useRelativePath;
+ private boolean trustS3Etag;
/**
* container for all the assets
@@ -330,7 +331,34 @@ public Map about(Bitstream bitstream, Map attrs) throws IOException {
try {
ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key);
if (objectMetadata != null) {
- return this.about(objectMetadata, attrs);
+ if (attrs.containsKey("size_bytes")) {
+ attrs.put("size_bytes", objectMetadata.getContentLength());
+ }
+ if (attrs.containsKey("checksum")) {
+ String eTag = objectMetadata.getETag();
+ if (trustS3Etag && isMD5Checksum(eTag)) {
+ attrs.put("checksum", eTag);
+ } else {
+ try (
+ InputStream in = get(bitstream);
+ // Read through a digest input stream that will work out the MD5
+ DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA));
+ ) {
+ in.close();
+ byte[] md5Digest = dis.getMessageDigest().digest();
+ String md5Base64 = Base64.encodeBase64String(md5Digest);
+ attrs.put("checksum", md5Base64);
+ } catch (NoSuchAlgorithmException nsae) {
+ // Should never happen
+ log.warn("Caught NoSuchAlgorithmException", nsae);
+ }
+ }
+ attrs.put("checksum_algorithm", CSA);
+ }
+ if (attrs.containsKey("modified")) {
+ attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime()));
+ }
+ return attrs;
}
} catch (AmazonS3Exception e) {
if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) {
@@ -343,6 +371,11 @@ public Map about(Bitstream bitstream, Map attrs) throws IOException {
return null;
}
+ private boolean isMD5Checksum(String eTag) {
+ // if the etag is NOT an MD5 it end with -x where x is the number of part used in the multipart upload
+ return StringUtils.contains(eTag, "-");
+ }
+
/**
* Populates map values by checking key existence
*
@@ -495,6 +528,14 @@ public void setUseRelativePath(boolean useRelativePath) {
this.useRelativePath = useRelativePath;
}
+ public void setTrustS3Etag(boolean trustS3Etag) {
+ this.trustS3Etag = trustS3Etag;
+ }
+
+ public boolean isTrustS3Etag() {
+ return trustS3Etag;
+ }
+
/**
* Contains a command-line testing tool. Expects arguments:
* -a accessKey -s secretKey -f assetFileName
diff --git a/dspace/config/modules/storage.cfg b/dspace/config/modules/storage.cfg
new file mode 100644
index 00000000000..c19aab7c089
--- /dev/null
+++ b/dspace/config/modules/storage.cfg
@@ -0,0 +1,34 @@
+#---------------------------------------------------------------#
+#-----------------STORAGE CONFIGURATIONS------------------------#
+#---------------------------------------------------------------#
+# Configuration properties used by the bitstore.xml config file #
+# #
+#---------------------------------------------------------------#
+
+# Use the localStore or the s3Store implementation
+assetstore.storename.0 = localStore
+
+# For using a relative path (xx/xx/xx/xxx...) set to true, default it false
+assetstore.s3.useRelativePath = false
+
+## Assetstore S3 configuration, only used if the above configuration
+## is set to s3Store
+
+# S3 bucket name to store assets in, default would generate a bucket
+# based on the dspace host name
+assetstore.s3.bucketName =
+# Subfolder to organize assets within the bucket, in case this bucket
+# is shared. Optional, default is root level of bucket
+assetstore.s3.subfolder =
+
+# please do not use these in production but rely on the aws credentials
+# discovery mechanism to configure them (ENV VAR, EC2 Iam role, etc.)
+assetstore.s3.awsAccessKey =
+assetstore.s3.awsSecretKey =
+# to force the use of a specific region when credentials are provided
+# in this configuratin file. If credentials are left empty this prop
+# is ignored
+assetstore.s3.awsRegionName =
+# trust s3 ETag during the checker process, if it is a md5 checksum
+# setting it to false will download the file locally to compute the md5
+assetstore.s3.trustS3Etag = true
\ No newline at end of file
diff --git a/dspace/config/spring/api/bitstore.xml b/dspace/config/spring/api/bitstore.xml
index 15bb3ef1580..ee5328b5bc6 100644
--- a/dspace/config/spring/api/bitstore.xml
+++ b/dspace/config/spring/api/bitstore.xml
@@ -23,6 +23,7 @@
+
From 70b1ee19f3b5d7e5e0894fcbf0300a6f5f4f3faf Mon Sep 17 00:00:00 2001
From: Andrea Bollini
Date: Sun, 2 Oct 2022 20:47:48 +0200
Subject: [PATCH 006/693] CST-6950 fix checksum check
---
.../storage/bitstore/S3BitStoreService.java | 37 ++++++++-----------
1 file changed, 15 insertions(+), 22 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index b1e26f8624d..da170fe0b03 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -334,32 +334,25 @@ public Map about(Bitstream bitstream, Map attrs) throws IOException {
if (attrs.containsKey("size_bytes")) {
attrs.put("size_bytes", objectMetadata.getContentLength());
}
- if (attrs.containsKey("checksum")) {
- String eTag = objectMetadata.getETag();
- if (trustS3Etag && isMD5Checksum(eTag)) {
- attrs.put("checksum", eTag);
- } else {
- try (
- InputStream in = get(bitstream);
- // Read through a digest input stream that will work out the MD5
- DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA));
- ) {
- in.close();
- byte[] md5Digest = dis.getMessageDigest().digest();
- String md5Base64 = Base64.encodeBase64String(md5Digest);
- attrs.put("checksum", md5Base64);
- } catch (NoSuchAlgorithmException nsae) {
- // Should never happen
- log.warn("Caught NoSuchAlgorithmException", nsae);
- }
- }
- attrs.put("checksum_algorithm", CSA);
- }
if (attrs.containsKey("modified")) {
attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime()));
}
- return attrs;
}
+ try (
+ InputStream in = get(bitstream);
+ // Read through a digest input stream that will work out the MD5
+ DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA));
+ ) {
+ in.close();
+ byte[] md5Digest = dis.getMessageDigest().digest();
+ String md5Base64 = Base64.encodeBase64String(md5Digest);
+ attrs.put("checksum", md5Base64);
+ attrs.put("checksum_algorithm", CSA);
+ } catch (NoSuchAlgorithmException nsae) {
+ // Should never happen
+ log.warn("Caught NoSuchAlgorithmException", nsae);
+ }
+ return attrs;
} catch (AmazonS3Exception e) {
if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) {
return null;
From 75840c70c6df6949f26acc15843e997657008024 Mon Sep 17 00:00:00 2001
From: Andrea Bollini
Date: Sun, 2 Oct 2022 21:49:28 +0200
Subject: [PATCH 007/693] CST-6950 fix checker report query
---
.../dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java b/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java
index 66ce666b9d6..a31e02cbab4 100644
--- a/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java
+++ b/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java
@@ -92,8 +92,8 @@ public List findByResultTypeInDateRange(Context context, Dat
criteriaQuery.where(criteriaBuilder.and(
criteriaBuilder.equal(mostRecentResult.get(ChecksumResult_.resultCode), resultCode),
criteriaBuilder.lessThanOrEqualTo(
- mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate),
- criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate)
+ mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate),
+ criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate)
)
);
List orderList = new LinkedList<>();
From 66c452bcf93e5366e2ef4da5bd892a079e31864e Mon Sep 17 00:00:00 2001
From: Andrea Bollini
Date: Sun, 2 Oct 2022 22:56:29 +0200
Subject: [PATCH 008/693] CST-6950 fix report email
---
.../org/dspace/checker/SimpleReporterServiceImpl.java | 2 ++
dspace-api/src/main/java/org/dspace/core/Email.java | 6 ++++--
.../org/dspace/storage/bitstore/S3BitStoreService.java | 9 ---------
dspace/config/modules/storage.cfg | 5 +----
dspace/config/spring/api/bitstore.xml | 1 -
5 files changed, 7 insertions(+), 16 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java b/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java
index 26c102e1e78..ddefb28e1b5 100644
--- a/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java
@@ -152,6 +152,7 @@ public int getBitstreamNotFoundReport(Context context, Date startDate, Date endD
osw.write("\n");
osw.write(msg("bitstream-not-found-report"));
+ osw.write(" ");
osw.write(applyDateFormatShort(startDate));
osw.write(" ");
osw.write(msg("date-range-to"));
@@ -230,6 +231,7 @@ public int getUncheckedBitstreamsReport(Context context, OutputStreamWriter osw)
osw.write("\n");
osw.write(msg("unchecked-bitstream-report"));
+ osw.write(" ");
osw.write(applyDateFormatShort(new Date()));
osw.write("\n\n\n");
diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java
index 6db27c9e4f1..6b86756e0c0 100644
--- a/dspace-api/src/main/java/org/dspace/core/Email.java
+++ b/dspace-api/src/main/java/org/dspace/core/Email.java
@@ -314,6 +314,8 @@ public void send() throws MessagingException, IOException {
message.addRecipient(Message.RecipientType.TO, new InternetAddress(
i.next()));
}
+ // Get headers defined by the template.
+ String[] templateHeaders = config.getArrayProperty("mail.message.headers");
// Format the mail message body
VelocityEngine templateEngine = new VelocityEngine();
@@ -334,6 +336,7 @@ public void send() throws MessagingException, IOException {
repo.putStringResource(contentName, content);
// Turn content into a template.
template = templateEngine.getTemplate(contentName);
+ templateHeaders = new String[] {};
}
StringWriter writer = new StringWriter();
@@ -351,8 +354,7 @@ public void send() throws MessagingException, IOException {
message.setSentDate(date);
message.setFrom(new InternetAddress(from));
- // Get headers defined by the template.
- for (String headerName : config.getArrayProperty("mail.message.headers")) {
+ for (String headerName : templateHeaders) {
String headerValue = (String) vctx.get(headerName);
if ("subject".equalsIgnoreCase(headerName)) {
if (null != subject) {
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index da170fe0b03..f287c0a5919 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -97,7 +97,6 @@ public class S3BitStoreService extends BaseBitStoreService {
private String awsSecretKey;
private String awsRegionName;
private boolean useRelativePath;
- private boolean trustS3Etag;
/**
* container for all the assets
@@ -521,14 +520,6 @@ public void setUseRelativePath(boolean useRelativePath) {
this.useRelativePath = useRelativePath;
}
- public void setTrustS3Etag(boolean trustS3Etag) {
- this.trustS3Etag = trustS3Etag;
- }
-
- public boolean isTrustS3Etag() {
- return trustS3Etag;
- }
-
/**
* Contains a command-line testing tool. Expects arguments:
* -a accessKey -s secretKey -f assetFileName
diff --git a/dspace/config/modules/storage.cfg b/dspace/config/modules/storage.cfg
index c19aab7c089..3b9171585ee 100644
--- a/dspace/config/modules/storage.cfg
+++ b/dspace/config/modules/storage.cfg
@@ -28,7 +28,4 @@ assetstore.s3.awsSecretKey =
# to force the use of a specific region when credentials are provided
# in this configuratin file. If credentials are left empty this prop
# is ignored
-assetstore.s3.awsRegionName =
-# trust s3 ETag during the checker process, if it is a md5 checksum
-# setting it to false will download the file locally to compute the md5
-assetstore.s3.trustS3Etag = true
\ No newline at end of file
+assetstore.s3.awsRegionName =
\ No newline at end of file
diff --git a/dspace/config/spring/api/bitstore.xml b/dspace/config/spring/api/bitstore.xml
index ee5328b5bc6..15bb3ef1580 100644
--- a/dspace/config/spring/api/bitstore.xml
+++ b/dspace/config/spring/api/bitstore.xml
@@ -23,7 +23,6 @@
-
From 9bc01e8f05f00c4b5dec4d68390872f3758e0fd3 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Mon, 3 Oct 2022 12:05:12 +0200
Subject: [PATCH 009/693] [DURACOM-92] Fixed S3BitStoreServiceTest tests
---
.../storage/bitstore/S3BitStoreService.java | 2 +-
.../bitstore/S3BitStoreServiceTest.java | 164 +++++-------------
dspace/config/modules/storage.cfg | 31 ----
3 files changed, 41 insertions(+), 156 deletions(-)
delete mode 100644 dspace/config/modules/storage.cfg
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index 5d531e2b12e..24204c7cb1c 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -75,7 +75,7 @@ public class S3BitStoreService extends BaseBitStoreService {
/**
* Checksum algorithm
*/
- private static final String CSA = "MD5";
+ static final String CSA = "MD5";
// These settings control the way an identifier is hashed into
// directory and file names
diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java
index e972aaa02b0..56080d1f97d 100644
--- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java
+++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java
@@ -9,32 +9,35 @@
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isEmptyOrNullString;
-import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.startsWith;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.Charset;
import java.util.function.Supplier;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.GetObjectRequest;
-import com.amazonaws.services.s3.model.PutObjectRequest;
-import com.amazonaws.services.s3.model.PutObjectResult;
-import com.amazonaws.services.s3.model.S3Object;
-import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import com.amazonaws.services.s3.transfer.Download;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.Upload;
import com.amazonaws.services.s3.transfer.model.UploadResult;
import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
import org.dspace.AbstractUnitTest;
import org.dspace.content.Bitstream;
-import org.dspace.curate.Utils;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Test;
@@ -42,6 +45,7 @@
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
@@ -158,49 +162,17 @@ public void givenAccessKeysWhenInitThenVerifiesCorrectBuilderCreation() throws I
@Test
public void givenBucketBitStreamIdInputStreamWhenRetrievingFromS3ThenUsesBucketBitStreamId() throws IOException {
String bucketName = "BucketTest";
- String bitStreamId = "BitStreamId";
this.s3BitStoreService.setBucketName(bucketName);
this.s3BitStoreService.setUseRelativePath(false);
- when(bitstream.getInternalId()).thenReturn(bitStreamId);
-
- S3Object object = Mockito.mock(S3Object.class);
- S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class);
- when(object.getObjectContent()).thenReturn(inputStream);
- when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object);
-
this.s3BitStoreService.init();
- assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream));
- verify(this.s3Service).getObject(
- ArgumentMatchers.argThat(
- request ->
- bucketName.contentEquals(request.getBucketName()) &&
- bitStreamId.contentEquals(request.getKey())
- )
- );
+ Download download = mock(Download.class);
- }
-
- @Test
- public void givenBucketBitStreamIdWhenNothingFoundOnS3ThenReturnsNull() throws IOException {
- String bucketName = "BucketTest";
- String bitStreamId = "BitStreamId";
- this.s3BitStoreService.setBucketName(bucketName);
- this.s3BitStoreService.setUseRelativePath(false);
- when(bitstream.getInternalId()).thenReturn(bitStreamId);
-
- when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(null);
-
- this.s3BitStoreService.init();
- assertThat(this.s3BitStoreService.get(bitstream), Matchers.nullValue());
+ when(tm.download(any(GetObjectRequest.class), any(File.class)))
+ .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content"));
- verify(this.s3Service).getObject(
- ArgumentMatchers.argThat(
- request ->
- bucketName.contentEquals(request.getBucketName()) &&
- bitStreamId.contentEquals(request.getKey())
- )
- );
+ InputStream inputStream = this.s3BitStoreService.get(bitstream);
+ assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content"));
}
@@ -214,23 +186,14 @@ public void givenSubFolderWhenRequestsItemFromS3ThenTheIdentifierShouldHavePrope
this.s3BitStoreService.setSubfolder(subfolder);
when(bitstream.getInternalId()).thenReturn(bitStreamId);
- S3Object object = Mockito.mock(S3Object.class);
- S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class);
- when(object.getObjectContent()).thenReturn(inputStream);
- when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object);
+ Download download = mock(Download.class);
+
+ when(tm.download(any(GetObjectRequest.class), any(File.class)))
+ .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content"));
this.s3BitStoreService.init();
- assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream));
-
- verify(this.s3Service).getObject(
- ArgumentMatchers.argThat(
- request ->
- bucketName.equals(request.getBucketName()) &&
- request.getKey().startsWith(subfolder) &&
- request.getKey().contains(bitStreamId) &&
- !request.getKey().contains(File.separator + File.separator)
- )
- );
+ InputStream inputStream = this.s3BitStoreService.get(bitstream);
+ assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content"));
}
@@ -364,86 +327,39 @@ public void givenBitStreamWhenPutThenCallS3PutMethodAndStoresInBitStream() throw
this.s3BitStoreService.setUseRelativePath(false);
when(bitstream.getInternalId()).thenReturn(bitStreamId);
- File file = Mockito.mock(File.class);
- InputStream in = Mockito.mock(InputStream.class);
- PutObjectResult putObjectResult = Mockito.mock(PutObjectResult.class);
+ InputStream in = IOUtils.toInputStream("Test file content", Charset.defaultCharset());
+
Upload upload = Mockito.mock(Upload.class);
UploadResult uploadResult = Mockito.mock(UploadResult.class);
when(upload.waitForUploadResult()).thenReturn(uploadResult);
- String mockedTag = "1a7771d5fdd7bfdfc84033c70b1ba555";
- when(file.length()).thenReturn(8L);
- try (MockedStatic fileMock = Mockito.mockStatic(File.class)) {
- try (MockedStatic fileUtilsMock = Mockito.mockStatic(FileUtils.class)) {
- try (MockedStatic curateUtils = Mockito.mockStatic(Utils.class)) {
- curateUtils.when(() -> Utils.checksum((File) ArgumentMatchers.any(), ArgumentMatchers.any()))
- .thenReturn(mockedTag);
-
- fileMock
- .when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any()))
- .thenReturn(file);
-
- when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
- .thenReturn(upload);
-
- this.s3BitStoreService.init();
- this.s3BitStoreService.put(bitstream, in);
- }
- }
-
- }
- verify(this.bitstream, Mockito.times(1)).setSizeBytes(
- ArgumentMatchers.eq(8L)
- );
+ when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
+ .thenReturn(upload);
- verify(this.bitstream, Mockito.times(1)).setChecksum(
- ArgumentMatchers.eq(mockedTag)
- );
+ this.s3BitStoreService.init();
+ this.s3BitStoreService.put(bitstream, in);
- verify(this.tm, Mockito.times(1)).upload(
- ArgumentMatchers.eq(bucketName),
- ArgumentMatchers.eq(bitStreamId),
- ArgumentMatchers.eq(file)
- );
+ verify(this.bitstream).setSizeBytes(17);
+ verify(this.bitstream, times(2)).getInternalId();
+ verify(this.bitstream).setChecksum("ac79653edeb65ab5563585f2d5f14fe9");
+ verify(this.bitstream).setChecksumAlgorithm(org.dspace.storage.bitstore.S3BitStoreService.CSA);
+ verify(this.tm).upload(eq(bucketName), eq(bitStreamId), any(File.class));
- verify(file, Mockito.times(1)).delete();
+ verifyNoMoreInteractions(this.bitstream, this.tm);
}
- @Test
- public void givenBitStreamWhenCallingPutFileCopyingThrowsIOExceptionPutThenFileIsRemovedAndStreamClosed()
- throws Exception {
- String bucketName = "BucketTest";
- String bitStreamId = "BitStreamId";
- this.s3BitStoreService.setBucketName(bucketName);
- this.s3BitStoreService.setUseRelativePath(false);
- when(bitstream.getInternalId()).thenReturn(bitStreamId);
+ private Download writeIntoFile(Download download, InvocationOnMock invocation, String content) {
- File file = Mockito.mock(File.class);
- InputStream in = Mockito.mock(InputStream.class);
- try (MockedStatic fileMock = Mockito.mockStatic(File.class)) {
- try (MockedStatic fileUtilsMock = Mockito.mockStatic(FileUtils.class)) {
- fileUtilsMock
- .when(() -> FileUtils.copyInputStreamToFile(ArgumentMatchers.any(), ArgumentMatchers.any()))
- .thenThrow(IOException.class);
- fileMock
- .when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any()))
- .thenReturn(file);
-
- this.s3BitStoreService.init();
- assertThrows(IOException.class, () -> this.s3BitStoreService.put(bitstream, in));
- }
+ File file = invocation.getArgument(1, File.class);
+ try {
+ FileUtils.write(file, content, Charset.defaultCharset());
+ } catch (IOException e) {
+ throw new RuntimeException(e);
}
- verify(this.bitstream, Mockito.never()).setSizeBytes(ArgumentMatchers.any(Long.class));
-
- verify(this.bitstream, Mockito.never()).setChecksum(ArgumentMatchers.any(String.class));
-
- verify(this.s3Service, Mockito.never()).putObject(ArgumentMatchers.any(PutObjectRequest.class));
-
- verify(file, Mockito.times(1)).delete();
-
+ return download;
}
private int computeSlashes(String internalId) {
diff --git a/dspace/config/modules/storage.cfg b/dspace/config/modules/storage.cfg
deleted file mode 100644
index 3b9171585ee..00000000000
--- a/dspace/config/modules/storage.cfg
+++ /dev/null
@@ -1,31 +0,0 @@
-#---------------------------------------------------------------#
-#-----------------STORAGE CONFIGURATIONS------------------------#
-#---------------------------------------------------------------#
-# Configuration properties used by the bitstore.xml config file #
-# #
-#---------------------------------------------------------------#
-
-# Use the localStore or the s3Store implementation
-assetstore.storename.0 = localStore
-
-# For using a relative path (xx/xx/xx/xxx...) set to true, default it false
-assetstore.s3.useRelativePath = false
-
-## Assetstore S3 configuration, only used if the above configuration
-## is set to s3Store
-
-# S3 bucket name to store assets in, default would generate a bucket
-# based on the dspace host name
-assetstore.s3.bucketName =
-# Subfolder to organize assets within the bucket, in case this bucket
-# is shared. Optional, default is root level of bucket
-assetstore.s3.subfolder =
-
-# please do not use these in production but rely on the aws credentials
-# discovery mechanism to configure them (ENV VAR, EC2 Iam role, etc.)
-assetstore.s3.awsAccessKey =
-assetstore.s3.awsSecretKey =
-# to force the use of a specific region when credentials are provided
-# in this configuratin file. If credentials are left empty this prop
-# is ignored
-assetstore.s3.awsRegionName =
\ No newline at end of file
From ba0819782a3b73305b8da04fec09ee233cf98bdc Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 11 Nov 2022 16:43:05 +0100
Subject: [PATCH 010/693] [DURACOM-92] Improved S3BitStoreService using
embedded S3
---
dspace-api/pom.xml | 8 +
.../storage/bitstore/S3BitStoreService.java | 6 +-
.../S3BitStoreServiceIntegrationTest.java | 390 +++++++++++++++++
.../bitstore/S3BitStoreServiceTest.java | 396 ------------------
4 files changed, 400 insertions(+), 400 deletions(-)
create mode 100644 dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java
delete mode 100644 dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java
diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml
index 68500516009..cfc1953d485 100644
--- a/dspace-api/pom.xml
+++ b/dspace-api/pom.xml
@@ -832,6 +832,14 @@
+
+
+ io.findify
+ s3mock_2.13
+ 0.2.6
+ test
+
+
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index 434fd191f84..622308b00d9 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -144,13 +144,11 @@ public S3BitStoreService() {}
/**
* This constructor is used for test purpose.
- * In this way is possible to use a mocked instance of AmazonS3
*
- * @param s3Service mocked AmazonS3 service
+ * @param s3Service AmazonS3 service
*/
- protected S3BitStoreService(AmazonS3 s3Service, TransferManager tm) {
+ protected S3BitStoreService(AmazonS3 s3Service) {
this.s3Service = s3Service;
- this.tm = tm;
}
@Override
diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java
new file mode 100644
index 00000000000..f362e94dddc
--- /dev/null
+++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java
@@ -0,0 +1,390 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.storage.bitstore;
+
+import static com.amazonaws.regions.Regions.DEFAULT_REGION;
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.dspace.storage.bitstore.S3BitStoreService.CSA;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.startsWith;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.sql.SQLException;
+
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.AnonymousAWSCredentials;
+import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import com.amazonaws.services.s3.model.AmazonS3Exception;
+import com.amazonaws.services.s3.model.Bucket;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import io.findify.s3mock.S3Mock;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.dspace.AbstractIntegrationTestWithDatabase;
+import org.dspace.app.matcher.LambdaMatcher;
+import org.dspace.authorize.AuthorizeException;
+import org.dspace.builder.BitstreamBuilder;
+import org.dspace.builder.CollectionBuilder;
+import org.dspace.builder.CommunityBuilder;
+import org.dspace.builder.ItemBuilder;
+import org.dspace.content.Bitstream;
+import org.dspace.content.Collection;
+import org.dspace.content.Item;
+import org.dspace.core.Utils;
+import org.hamcrest.Matcher;
+import org.hamcrest.Matchers;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+
+/**
+ * @author Luca Giamminonni (luca.giamminonni at 4science.com)
+ */
+public class S3BitStoreServiceIntegrationTest extends AbstractIntegrationTestWithDatabase {
+
+ private static final String DEFAULT_BUCKET_NAME = "dspace-asset-localhost";
+
+ private S3BitStoreService s3BitStoreService;
+
+ private AmazonS3 amazonS3Client;
+
+ private S3Mock s3Mock;
+
+ private Collection collection;
+
+ private File s3Directory;
+
+ @Before
+ public void setup() throws Exception {
+
+ s3Directory = new File(System.getProperty("java.io.tmpdir"), "s3");
+
+ s3Mock = S3Mock.create(8001, s3Directory.getAbsolutePath());
+ s3Mock.start();
+
+ amazonS3Client = createAmazonS3Client();
+
+ s3BitStoreService = new S3BitStoreService(amazonS3Client);
+
+ context.turnOffAuthorisationSystem();
+
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .build();
+
+ collection = CollectionBuilder.createCollection(context, parentCommunity)
+ .build();
+
+ context.restoreAuthSystemState();
+ }
+
+ @After
+ public void cleanUp() throws IOException {
+ FileUtils.deleteDirectory(s3Directory);
+ s3Mock.shutdown();
+ }
+
+ @Test
+ public void testBitstreamPutAndGetWithAlreadyPresentBucket() throws IOException {
+
+ String bucketName = "testbucket";
+
+ amazonS3Client.createBucket(bucketName);
+
+ s3BitStoreService.setBucketName(bucketName);
+ s3BitStoreService.init();
+
+ assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(bucketName)));
+
+ context.turnOffAuthorisationSystem();
+ String content = "Test bitstream content";
+ Bitstream bitstream = createBitstream(content);
+ context.restoreAuthSystemState();
+
+ s3BitStoreService.put(bitstream, toInputStream(content));
+
+ String expectedChecksum = generateChecksum(content);
+
+ assertThat(bitstream.getSizeBytes(), is((long) content.length()));
+ assertThat(bitstream.getChecksum(), is(expectedChecksum));
+ assertThat(bitstream.getChecksumAlgorithm(), is(CSA));
+
+ InputStream inputStream = s3BitStoreService.get(bitstream);
+ assertThat(IOUtils.toString(inputStream, UTF_8), is(content));
+
+ String key = s3BitStoreService.getFullKey(bitstream.getInternalId());
+ ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(bucketName, key);
+ assertThat(objectMetadata.getContentMD5(), is(expectedChecksum));
+
+ }
+
+ @Test
+ public void testBitstreamPutAndGetWithoutSpecifingBucket() throws IOException {
+
+ s3BitStoreService.init();
+
+ assertThat(s3BitStoreService.getBucketName(), is(DEFAULT_BUCKET_NAME));
+
+ assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(DEFAULT_BUCKET_NAME)));
+
+ context.turnOffAuthorisationSystem();
+ String content = "Test bitstream content";
+ Bitstream bitstream = createBitstream(content);
+ context.restoreAuthSystemState();
+
+ s3BitStoreService.put(bitstream, toInputStream(content));
+
+ String expectedChecksum = generateChecksum(content);
+
+ assertThat(bitstream.getSizeBytes(), is((long) content.length()));
+ assertThat(bitstream.getChecksum(), is(expectedChecksum));
+ assertThat(bitstream.getChecksumAlgorithm(), is(CSA));
+
+ InputStream inputStream = s3BitStoreService.get(bitstream);
+ assertThat(IOUtils.toString(inputStream, UTF_8), is(content));
+
+ String key = s3BitStoreService.getFullKey(bitstream.getInternalId());
+ ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key);
+ assertThat(objectMetadata.getContentMD5(), is(expectedChecksum));
+
+ }
+
+ @Test
+ public void testBitstreamPutAndGetWithSubFolder() throws IOException {
+
+ s3BitStoreService.setSubfolder("test/DSpace7/");
+ s3BitStoreService.init();
+
+ context.turnOffAuthorisationSystem();
+ String content = "Test bitstream content";
+ Bitstream bitstream = createBitstream(content);
+ context.restoreAuthSystemState();
+
+ s3BitStoreService.put(bitstream, toInputStream(content));
+
+ InputStream inputStream = s3BitStoreService.get(bitstream);
+ assertThat(IOUtils.toString(inputStream, UTF_8), is(content));
+
+ String key = s3BitStoreService.getFullKey(bitstream.getInternalId());
+ assertThat(key, startsWith("test/DSpace7/"));
+
+ ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key);
+ assertThat(objectMetadata, notNullValue());
+
+ }
+
+ @Test
+ public void testBitstreamDeletion() throws IOException {
+
+ s3BitStoreService.init();
+
+ context.turnOffAuthorisationSystem();
+ String content = "Test bitstream content";
+ Bitstream bitstream = createBitstream(content);
+ context.restoreAuthSystemState();
+
+ s3BitStoreService.put(bitstream, toInputStream(content));
+
+ assertThat(s3BitStoreService.get(bitstream), notNullValue());
+
+ s3BitStoreService.remove(bitstream);
+
+ IOException exception = assertThrows(IOException.class, () -> s3BitStoreService.get(bitstream));
+ assertThat(exception.getCause(), instanceOf(AmazonS3Exception.class));
+ assertThat(((AmazonS3Exception) exception.getCause()).getStatusCode(), is(404));
+
+ }
+
+ @Test
+ public void handleRegisteredIdentifierPrefixInS3() {
+ String trueBitStreamId = "012345";
+ String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId;
+ // Should be detected as registered bitstream
+ assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId));
+ }
+
+ @Test
+ public void stripRegisteredBitstreamPrefixWhenCalculatingPath() {
+ // Set paths and IDs
+ String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf";
+ String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path;
+ // Paths should be equal, since the getRelativePath method should strip the registered -R prefix
+ String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId);
+ assertEquals(s3Path, relativeRegisteredPath);
+ }
+
+ @Test
+ public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() {
+ String path = "01234567890123456789";
+ String computedPath = this.s3BitStoreService.getIntermediatePath(path);
+ String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
+ assertThat(computedPath, equalTo(expectedPath));
+ }
+
+ @Test
+ public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() {
+ String path = "0";
+ String computedPath = this.s3BitStoreService.getIntermediatePath(path);
+ String expectedPath = "0" + File.separator;
+ assertThat(computedPath, equalTo(expectedPath));
+ }
+
+ @Test
+ public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() {
+ String path = "01234";
+ String computedPath = this.s3BitStoreService.getIntermediatePath(path);
+ String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator;
+ assertThat(computedPath, equalTo(expectedPath));
+ }
+
+ @Test
+ public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() {
+ String path = "012345";
+ String computedPath = this.s3BitStoreService.getIntermediatePath(path);
+ String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
+ assertThat(computedPath, equalTo(expectedPath));
+ }
+
+ @Test
+ public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException {
+ StringBuilder path = new StringBuilder("01");
+ String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ int slashes = computeSlashes(path.toString());
+ assertThat(computedPath, Matchers.endsWith(File.separator));
+ assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
+
+ path.append("2");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
+
+ path.append("3");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
+
+ path.append("4");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
+
+ path.append("56789");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
+ }
+
+ @Test
+ public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException {
+ StringBuilder path = new StringBuilder("01");
+ String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ int slashes = computeSlashes(path.toString());
+ assertThat(computedPath, Matchers.endsWith(File.separator));
+ assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
+
+ path.append("2");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ slashes = computeSlashes(path.toString());
+ assertThat(computedPath, Matchers.endsWith(File.separator));
+ assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
+
+ path.append("3");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ slashes = computeSlashes(path.toString());
+ assertThat(computedPath, Matchers.endsWith(File.separator));
+ assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
+
+ path.append("4");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ slashes = computeSlashes(path.toString());
+ assertThat(computedPath, Matchers.endsWith(File.separator));
+ assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
+
+ path.append("56789");
+ computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
+ slashes = computeSlashes(path.toString());
+ assertThat(computedPath, Matchers.endsWith(File.separator));
+ assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
+ }
+
+ @Test
+ public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() {
+ String sInternalId = new StringBuilder("01")
+ .append(File.separator)
+ .append("22")
+ .append(File.separator)
+ .append("33")
+ .append(File.separator)
+ .append("4455")
+ .toString();
+ String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId);
+ assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator)));
+ assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator)));
+ assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator)));
+ }
+
+ private String generateChecksum(String content) {
+ try {
+ MessageDigest m = MessageDigest.getInstance("MD5");
+ m.update(content.getBytes());
+ return Utils.toHex(m.digest());
+ } catch (NoSuchAlgorithmException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private AmazonS3 createAmazonS3Client() {
+ return AmazonS3ClientBuilder.standard()
+ .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()))
+ .withEndpointConfiguration(new EndpointConfiguration("http://127.0.0.1:8001", DEFAULT_REGION.getName()))
+ .build();
+ }
+
+ private Item createItem() {
+ return ItemBuilder.createItem(context, collection)
+ .withTitle("Test item")
+ .build();
+ }
+
+ private Bitstream createBitstream(String content) {
+ try {
+ return BitstreamBuilder
+ .createBitstream(context, createItem(), toInputStream(content))
+ .build();
+ } catch (SQLException | AuthorizeException | IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private Matcher super Bucket> bucketNamed(String name) {
+ return LambdaMatcher.matches(bucket -> bucket.getName().equals(name));
+ }
+
+ private InputStream toInputStream(String content) {
+ return IOUtils.toInputStream(content, UTF_8);
+ }
+
+ private int computeSlashes(String internalId) {
+ int minimum = internalId.length();
+ int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel;
+ int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel);
+ int slashes = slashesPerLevel + odd;
+ return Math.min(slashes, S3BitStoreService.directoryLevels);
+ }
+
+}
diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java
deleted file mode 100644
index 8102e977795..00000000000
--- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceTest.java
+++ /dev/null
@@ -1,396 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-package org.dspace.storage.bitstore;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.isEmptyOrNullString;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.ArgumentMatchers.startsWith;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyNoMoreInteractions;
-import static org.mockito.Mockito.when;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.Charset;
-import java.util.function.Supplier;
-
-import com.amazonaws.regions.Regions;
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.GetObjectRequest;
-import com.amazonaws.services.s3.transfer.Download;
-import com.amazonaws.services.s3.transfer.TransferManager;
-import com.amazonaws.services.s3.transfer.Upload;
-import com.amazonaws.services.s3.transfer.model.UploadResult;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.dspace.AbstractUnitTest;
-import org.dspace.content.Bitstream;
-import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.ArgumentMatchers;
-import org.mockito.Mock;
-import org.mockito.MockedStatic;
-import org.mockito.Mockito;
-import org.mockito.invocation.InvocationOnMock;
-
-
-
-
-/**
- * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
- *
- */
-public class S3BitStoreServiceTest extends AbstractUnitTest {
-
- private S3BitStoreService s3BitStoreService;
-
- @Mock
- private AmazonS3Client s3Service;
-
- @Mock
- private TransferManager tm;
-
- @Mock
- private Bitstream bitstream;
-
- @Mock
- private Bitstream externalBitstream;
-
- @Before
- public void setUp() throws Exception {
- this.s3BitStoreService = new S3BitStoreService(s3Service, tm);
- }
-
- private Supplier mockedServiceSupplier() {
- return () -> this.s3Service;
- }
-
- @Test
- public void givenBucketWhenInitThenUsesSameBucket() throws IOException {
- String bucketName = "Bucket0";
- s3BitStoreService.setBucketName(bucketName);
- when(this.s3Service.doesBucketExist(bucketName)).thenReturn(false);
-
- assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
-
- this.s3BitStoreService.init();
-
- verify(this.s3Service).doesBucketExist(bucketName);
- verify(this.s3Service, Mockito.times(1)).createBucket(bucketName);
- assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString());
- assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString());
- assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
- }
-
- @Test
- public void givenEmptyBucketWhenInitThenUsesDefaultBucket() throws IOException {
- assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString());
- when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false);
- assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
-
- this.s3BitStoreService.init();
-
- verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
- assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
- assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString());
- assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString());
- assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
- }
-
- @Test
- public void givenAccessKeysWhenInitThenVerifiesCorrectBuilderCreation() throws IOException {
- assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString());
- assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString());
- assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString());
- assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
- when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false);
-
- final String awsAccessKey = "ACCESS_KEY";
- final String awsSecretKey = "SECRET_KEY";
-
- this.s3BitStoreService.setAwsAccessKey(awsAccessKey);
- this.s3BitStoreService.setAwsSecretKey(awsSecretKey);
-
- try (MockedStatic mockedS3BitStore = Mockito.mockStatic(S3BitStoreService.class)) {
- mockedS3BitStore
- .when(() ->
- S3BitStoreService.amazonClientBuilderBy(
- ArgumentMatchers.any(Regions.class),
- ArgumentMatchers.argThat(
- credentials ->
- awsAccessKey.equals(credentials.getAWSAccessKeyId()) &&
- awsSecretKey.equals(credentials.getAWSSecretKey())
- )
- )
- )
- .thenReturn(this.mockedServiceSupplier());
-
- this.s3BitStoreService.init();
-
- mockedS3BitStore.verify(
- () ->
- S3BitStoreService.amazonClientBuilderBy(
- ArgumentMatchers.any(Regions.class),
- ArgumentMatchers.argThat(
- credentials ->
- awsAccessKey.equals(credentials.getAWSAccessKeyId()) &&
- awsSecretKey.equals(credentials.getAWSSecretKey())
- )
- )
- );
- }
-
-
- verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
- assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
- assertThat(s3BitStoreService.getAwsAccessKey(), Matchers.equalTo(awsAccessKey));
- assertThat(s3BitStoreService.getAwsSecretKey(), Matchers.equalTo(awsSecretKey));
- assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
- }
-
- @Test
- public void givenBucketBitStreamIdInputStreamWhenRetrievingFromS3ThenUsesBucketBitStreamId() throws IOException {
- String bucketName = "BucketTest";
- this.s3BitStoreService.setBucketName(bucketName);
- this.s3BitStoreService.setUseRelativePath(false);
- this.s3BitStoreService.init();
-
- Download download = mock(Download.class);
-
- when(tm.download(any(GetObjectRequest.class), any(File.class)))
- .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content"));
-
- InputStream inputStream = this.s3BitStoreService.get(bitstream);
- assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content"));
-
- }
-
- @Test
- public void givenSubFolderWhenRequestsItemFromS3ThenTheIdentifierShouldHaveProperPath() throws IOException {
- String bucketName = "BucketTest";
- String bitStreamId = "012345";
- String subfolder = "/test/DSpace7/";
- this.s3BitStoreService.setBucketName(bucketName);
- this.s3BitStoreService.setUseRelativePath(false);
- this.s3BitStoreService.setSubfolder(subfolder);
- when(bitstream.getInternalId()).thenReturn(bitStreamId);
-
- Download download = mock(Download.class);
-
- when(tm.download(any(GetObjectRequest.class), any(File.class)))
- .thenAnswer(invocation -> writeIntoFile(download, invocation, "Test file content"));
-
- this.s3BitStoreService.init();
- InputStream inputStream = this.s3BitStoreService.get(bitstream);
- assertThat(IOUtils.toString(inputStream, Charset.defaultCharset()), is("Test file content"));
-
- }
-
- @Test
- public void handleRegisteredIdentifierPrefixInS3() {
- String trueBitStreamId = "012345";
- String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId;
- // Should be detected as registered bitstream
- assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId));
- }
-
- @Test
- public void stripRegisteredBitstreamPrefixWhenCalculatingPath() {
- // Set paths and IDs
- String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf";
- String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path;
- // Paths should be equal, since the getRelativePath method should strip the registered -R prefix
- String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId);
- assertEquals(s3Path, relativeRegisteredPath);
- }
-
- @Test
- public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() {
- String path = "01234567890123456789";
- String computedPath = this.s3BitStoreService.getIntermediatePath(path);
- String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
- assertThat(computedPath, equalTo(expectedPath));
- }
-
- @Test
- public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() {
- String path = "0";
- String computedPath = this.s3BitStoreService.getIntermediatePath(path);
- String expectedPath = "0" + File.separator;
- assertThat(computedPath, equalTo(expectedPath));
- }
-
- @Test
- public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() {
- String path = "01234";
- String computedPath = this.s3BitStoreService.getIntermediatePath(path);
- String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator;
- assertThat(computedPath, equalTo(expectedPath));
- }
-
- @Test
- public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() {
- String path = "012345";
- String computedPath = this.s3BitStoreService.getIntermediatePath(path);
- String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
- assertThat(computedPath, equalTo(expectedPath));
- }
-
- @Test
- public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException {
- StringBuilder path = new StringBuilder("01");
- String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- int slashes = computeSlashes(path.toString());
- assertThat(computedPath, Matchers.endsWith(File.separator));
- assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
-
- path.append("2");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
-
- path.append("3");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
-
- path.append("4");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
-
- path.append("56789");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
- }
-
- @Test
- public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException {
- StringBuilder path = new StringBuilder("01");
- String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- int slashes = computeSlashes(path.toString());
- assertThat(computedPath, Matchers.endsWith(File.separator));
- assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
-
- path.append("2");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- slashes = computeSlashes(path.toString());
- assertThat(computedPath, Matchers.endsWith(File.separator));
- assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
-
- path.append("3");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- slashes = computeSlashes(path.toString());
- assertThat(computedPath, Matchers.endsWith(File.separator));
- assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
-
- path.append("4");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- slashes = computeSlashes(path.toString());
- assertThat(computedPath, Matchers.endsWith(File.separator));
- assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
-
- path.append("56789");
- computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
- slashes = computeSlashes(path.toString());
- assertThat(computedPath, Matchers.endsWith(File.separator));
- assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
- }
-
- @Test
- public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() {
- String sInternalId = new StringBuilder("01")
- .append(File.separator)
- .append("22")
- .append(File.separator)
- .append("33")
- .append(File.separator)
- .append("4455")
- .toString();
- String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId);
- assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator)));
- assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator)));
- assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator)));
- }
-
- @Test
- public void givenBitStreamWhenRemoveThenCallS3DeleteMethod() throws Exception {
- String bucketName = "BucketTest";
- String bitStreamId = "BitStreamId";
- this.s3BitStoreService.setBucketName(bucketName);
- this.s3BitStoreService.setUseRelativePath(false);
- when(bitstream.getInternalId()).thenReturn(bitStreamId);
-
- this.s3BitStoreService.init();
- this.s3BitStoreService.remove(bitstream);
-
- verify(this.s3Service, Mockito.times(1)).deleteObject(ArgumentMatchers.eq(bucketName),
- ArgumentMatchers.eq(bitStreamId));
-
- }
-
- @Test
- public void givenBitStreamWhenPutThenCallS3PutMethodAndStoresInBitStream() throws Exception {
- String bucketName = "BucketTest";
- String bitStreamId = "BitStreamId";
- this.s3BitStoreService.setBucketName(bucketName);
- this.s3BitStoreService.setUseRelativePath(false);
- when(bitstream.getInternalId()).thenReturn(bitStreamId);
-
- InputStream in = IOUtils.toInputStream("Test file content", Charset.defaultCharset());
-
- Upload upload = Mockito.mock(Upload.class);
- UploadResult uploadResult = Mockito.mock(UploadResult.class);
- when(upload.waitForUploadResult()).thenReturn(uploadResult);
-
- when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
- .thenReturn(upload);
-
- this.s3BitStoreService.init();
- this.s3BitStoreService.put(bitstream, in);
-
- verify(this.bitstream).setSizeBytes(17);
- verify(this.bitstream, times(2)).getInternalId();
- verify(this.bitstream).setChecksum("ac79653edeb65ab5563585f2d5f14fe9");
- verify(this.bitstream).setChecksumAlgorithm(org.dspace.storage.bitstore.S3BitStoreService.CSA);
- verify(this.tm).upload(eq(bucketName), eq(bitStreamId), any(File.class));
-
- verifyNoMoreInteractions(this.bitstream, this.tm);
-
- }
-
- private Download writeIntoFile(Download download, InvocationOnMock invocation, String content) {
-
- File file = invocation.getArgument(1, File.class);
-
- try {
- FileUtils.write(file, content, Charset.defaultCharset());
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
-
- return download;
- }
-
- private int computeSlashes(String internalId) {
- int minimum = internalId.length();
- int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel;
- int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel);
- int slashes = slashesPerLevel + odd;
- return Math.min(slashes, S3BitStoreService.directoryLevels);
- }
-
-}
From 4d085503c9c910254d44de249825d3c7a4d7a2a6 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 11 Nov 2022 17:00:17 +0100
Subject: [PATCH 011/693] [DURACOM-92] Fixed dependency convergence error
---
dspace-api/pom.xml | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml
index cfc1953d485..e20fb22f48e 100644
--- a/dspace-api/pom.xml
+++ b/dspace-api/pom.xml
@@ -838,6 +838,12 @@
s3mock_2.130.2.6test
+
+
+ com.amazonawsl
+ aws-java-sdk-s3
+
+
@@ -903,6 +909,11 @@
swagger-core1.6.2
+
+ org.scala-lang
+ scala-library
+ 2.13.2
+
From da778f330665330378e4885175b7c0dd7256585f Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 11 Nov 2022 17:17:12 +0100
Subject: [PATCH 012/693] [DURACOM-92] Fixed dependency convergence error
---
dspace-api/pom.xml | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml
index e20fb22f48e..9c873847e03 100644
--- a/dspace-api/pom.xml
+++ b/dspace-api/pom.xml
@@ -843,6 +843,10 @@
com.amazonawslaws-java-sdk-s3
+
+ com.amazonaws
+ aws-java-sdk-s3
+
From 8e2ada65b191d55bc86002bef10e2a4707cb4d2a Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Tue, 6 Dec 2022 12:36:34 +0100
Subject: [PATCH 013/693] 97248: Fix File info Solr plugin to allow faceting
---
.../org/dspace/discovery/SolrServiceFileInfoPlugin.java | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java
index 52e0043ff40..c53b48f80f3 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java
@@ -53,10 +53,14 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So
if (bitstreams != null) {
for (Bitstream bitstream : bitstreams) {
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName());
+ document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName());
+ document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName());
String description = bitstream.getDescription();
if ((description != null) && !description.isEmpty()) {
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description);
+ document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", bitstream.getName());
+ document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", bitstream.getName());
}
}
}
@@ -65,4 +69,4 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So
}
}
}
-}
\ No newline at end of file
+}
From 3e651af7605853b013fe52607b0701f797090a28 Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Tue, 6 Dec 2022 12:37:21 +0100
Subject: [PATCH 014/693] 97248: Find DSO based configurations recursively
through parent objects
---
.../org/dspace/discovery/SearchUtils.java | 45 ++++++++++++-----
.../DiscoveryConfigurationService.java | 49 +++++++++++++++++--
.../CollectionIndexFactoryImpl.java | 4 +-
.../CommunityIndexFactoryImpl.java | 4 +-
.../InprogressSubmissionIndexFactoryImpl.java | 6 +--
.../indexobject/ItemIndexFactoryImpl.java | 2 +-
.../repository/DiscoveryRestRepository.java | 10 ++--
7 files changed, 89 insertions(+), 31 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
index 90afb09eca9..83cbdeaef6b 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
@@ -18,6 +18,7 @@
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
+import org.dspace.core.Context;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.kernel.ServiceManager;
@@ -60,28 +61,32 @@ public static SearchService getSearchService() {
}
public static DiscoveryConfiguration getDiscoveryConfiguration() {
- return getDiscoveryConfiguration(null, null);
+ return getDiscoveryConfiguration(null, null, null);
}
- public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) {
- return getDiscoveryConfiguration(null, dso);
+ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context,
+ DSpaceObject dso) {
+ return getDiscoveryConfiguration(context, null, dso);
}
/**
* Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A
* null prefix mean the normal query, other predefined values are workspace or workflow
*
+ *
+ * @param context
* @param prefix
* the namespace of the configuration to lookup if any
* @param dso
* the DSpaceObject
* @return the discovery configuration for the specified scope
*/
- public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) {
+ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, String prefix,
+ DSpaceObject dso) {
if (prefix != null) {
return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix);
} else {
- return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null);
+ return getDiscoveryConfigurationByDSO(context, dso);
}
}
@@ -98,6 +103,11 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName(
return configurationService.getDiscoveryConfiguration(configurationName);
}
+ public static DiscoveryConfiguration getDiscoveryConfigurationByDSO(
+ Context context, DSpaceObject dso) {
+ DiscoveryConfigurationService configurationService = getConfigurationService();
+ return configurationService.getDiscoveryDSOConfiguration(context, dso);
+ }
public static DiscoveryConfigurationService getConfigurationService() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
@@ -114,45 +124,54 @@ public static List getIgnoredMetadataFields(int type) {
* A configuration object can be returned for each parent community/collection
*
* @param item the DSpace item
+ * @param context
* @return a list of configuration objects
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
- public static List getAllDiscoveryConfigurations(Item item) throws SQLException {
+ public static List getAllDiscoveryConfigurations(Item item,
+ final Context context) throws SQLException {
List collections = item.getCollections();
- return getAllDiscoveryConfigurations(null, collections, item);
+ return getAllDiscoveryConfigurations(context, null, collections, item);
}
/**
* Return all the discovery configuration applicable to the provided workspace item
+ *
+ * @param context
* @param witem a workspace item
* @return a list of discovery configuration
* @throws SQLException
*/
- public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException {
+ public static List getAllDiscoveryConfigurations(final Context context,
+ WorkspaceItem witem) throws SQLException {
List collections = new ArrayList();
collections.add(witem.getCollection());
- return getAllDiscoveryConfigurations("workspace", collections, witem.getItem());
+ return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem());
}
/**
* Return all the discovery configuration applicable to the provided workflow item
+ *
+ * @param context
* @param witem a workflow item
* @return a list of discovery configuration
* @throws SQLException
*/
- public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException {
+ public static List getAllDiscoveryConfigurations(final Context context,
+ WorkflowItem witem) throws SQLException {
List collections = new ArrayList();
collections.add(witem.getCollection());
- return getAllDiscoveryConfigurations("workflow", collections, witem.getItem());
+ return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem());
}
- private static List getAllDiscoveryConfigurations(String prefix,
+ private static List getAllDiscoveryConfigurations(final Context context,
+ String prefix,
List collections, Item item)
throws SQLException {
Set result = new HashSet<>();
for (Collection collection : collections) {
- DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection);
+ DiscoveryConfiguration configuration = getDiscoveryConfiguration(context, prefix, collection);
result.add(configuration);
}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
index 636e7ccd2ae..b00ff735637 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
@@ -7,12 +7,20 @@
*/
package org.dspace.discovery.configuration;
+import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.dspace.content.DSpaceObject;
+import org.dspace.content.factory.ContentServiceFactory;
+import org.dspace.content.service.DSpaceObjectService;
+import org.dspace.core.Context;
+import org.dspace.core.ReloadableEntity;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.indexobject.IndexableDSpaceObject;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -22,6 +30,8 @@
*/
public class DiscoveryConfigurationService {
+ private static final Logger log = LogManager.getLogger();
+
private Map map;
private Map> toIgnoreMetadataFields = new HashMap<>();
@@ -41,25 +51,53 @@ public void setToIgnoreMetadataFields(Map> toIgnoreMetadat
this.toIgnoreMetadataFields = toIgnoreMetadataFields;
}
- public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) {
+ public DiscoveryConfiguration getDiscoveryConfiguration(final Context context,
+ IndexableObject dso) {
String name;
if (dso == null) {
name = "default";
} else if (dso instanceof IndexableDSpaceObject) {
- name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle();
+ return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) dso).getIndexedObject());
} else {
name = dso.getUniqueIndexID();
}
-
return getDiscoveryConfiguration(name);
}
+ public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context,
+ DSpaceObject dso) {
+ String name;
+ if (dso == null) {
+ name = "default";
+ } else {
+ name = dso.getHandle();
+ }
+
+ DiscoveryConfiguration configuration = getDiscoveryConfiguration(name, false);
+ if (configuration != null) {
+ return configuration;
+ }
+ DSpaceObjectService dSpaceObjectService =
+ ContentServiceFactory.getInstance().getDSpaceObjectService(dso);
+ DSpaceObject parentObject = null;
+ try {
+ parentObject = dSpaceObjectService.getParentObject(context, dso);
+ } catch (SQLException e) {
+ log.error(e);
+ }
+ return getDiscoveryDSOConfiguration(context, parentObject);
+ }
+
public DiscoveryConfiguration getDiscoveryConfiguration(final String name) {
+ return getDiscoveryConfiguration(name, true);
+ }
+
+ public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) {
DiscoveryConfiguration result;
result = StringUtils.isBlank(name) ? null : getMap().get(name);
- if (result == null) {
+ if (result == null && useDefault) {
//No specific configuration, get the default one
result = getMap().get("default");
}
@@ -68,11 +106,12 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name) {
}
public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName,
+ final Context context,
final IndexableObject dso) {
if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) {
return getMap().get(configurationName);
} else {
- return getDiscoveryConfiguration(dso);
+ return getDiscoveryConfiguration(context, dso);
}
}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java
index c2bacfe5024..817be7848df 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java
@@ -86,7 +86,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCollection inde
final Collection collection = indexableCollection.getIndexedObject();
// Retrieve configuration
- DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection);
+ DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration();
List highlightedMetadataFields = new ArrayList<>();
@@ -173,4 +173,4 @@ public List getCollectionLocations(Context context, Collection collectio
return locations;
}
-}
\ No newline at end of file
+}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java
index 8521b7dda0d..e9281960183 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java
@@ -69,7 +69,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCommunity index
final Community community = indexableObject.getIndexedObject();
// Retrieve configuration
- DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community);
+ DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration();
List highlightedMetadataFields = new ArrayList<>();
@@ -135,4 +135,4 @@ public List getLocations(Context context, IndexableCommunity indexableDS
return locations;
}
-}
\ No newline at end of file
+}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java
index d0b0f363e64..c3629b6362c 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java
@@ -73,11 +73,11 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc,
// Add item metadata
List discoveryConfigurations;
if (inProgressSubmission instanceof WorkflowItem) {
- discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission);
+ discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkflowItem) inProgressSubmission);
} else if (inProgressSubmission instanceof WorkspaceItem) {
- discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission);
+ discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission);
} else {
- discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item);
+ discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context);
}
indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations);
indexableCollectionService.storeCommunityCollectionLocations(doc, locations);
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java
index e9f18ae949a..b417237f763 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java
@@ -147,7 +147,7 @@ public SolrInputDocument buildDocument(Context context, IndexableItem indexableI
}
// Add the item metadata
- List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item);
+ List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context);
addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations);
//mandatory facet to show status on mydspace
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java
index 52224ef5798..1962d44162e 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java
@@ -84,7 +84,7 @@ public SearchConfigurationRest getSearchConfiguration(final String dsoScope, fin
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
}
@@ -96,7 +96,7 @@ public SearchResultsRest getSearchObjects(final String query, final List
Context context = obtainContext();
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null;
@@ -121,7 +121,7 @@ public FacetConfigurationRest getFacetsConfiguration(final String dsoScope, fina
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration);
}
@@ -138,7 +138,7 @@ public FacetResultsRest getFacetObjects(String facetName, String prefix, String
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix,
query, searchFilters, dsoTypes, page, facetName);
@@ -157,7 +157,7 @@ public SearchResultsRest getAllFacets(String query, List dsoTypes, Strin
Pageable page = PageRequest.of(1, 1);
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null;
From 82bc777e45dce2525e2754fc338d27e7630bad1d Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Tue, 13 Dec 2022 12:32:15 +0100
Subject: [PATCH 015/693] Fix issue with indexing and add tests
---
.../org/dspace/discovery/SearchUtils.java | 32 +-
.../discovery/SolrServiceFileInfoPlugin.java | 6 +-
.../DiscoveryConfigurationService.java | 9 +-
.../InprogressSubmissionIndexFactoryImpl.java | 6 +-
.../org/dspace/builder/CommunityBuilder.java | 24 +-
.../config/spring/api/discovery.xml | 3198 +++++++++++++++++
.../DiscoveryScopeBasedRestControllerIT.java | 595 +++
.../app/rest/matcher/FacetEntryMatcher.java | 11 +
.../app/rest/matcher/FacetValueMatcher.java | 10 +
machine.cfg | 19 +
10 files changed, 3889 insertions(+), 21 deletions(-)
create mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java
create mode 100644 machine.cfg
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
index 83cbdeaef6b..4085e1bbdf3 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
@@ -18,6 +18,8 @@
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
+import org.dspace.content.factory.ContentServiceFactory;
+import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
@@ -72,7 +74,7 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con
/**
* Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A
* null prefix mean the normal query, other predefined values are workspace or workflow
- *
+ *
*
* @param context
* @param prefix
@@ -90,9 +92,28 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con
}
}
+ public static Set addDiscoveryConfigurationForParents(
+ Context context, Set configurations, String prefix, DSpaceObject dso)
+ throws SQLException {
+ if (dso == null) {
+ configurations.add(getDiscoveryConfigurationByName(null));
+ return configurations;
+ }
+ if (prefix != null) {
+ configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle()));
+ } else {
+ configurations.add(getDiscoveryConfigurationByName(dso.getHandle()));
+ }
+
+ DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance()
+ .getDSpaceObjectService(dso);
+ DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso);
+ return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject);
+ }
+
/**
* Return the discovery configuration identified by the specified name
- *
+ *
* @param configurationName the configuration name assigned to the bean in the
* discovery.xml
* @return the discovery configuration
@@ -128,8 +149,8 @@ public static List getIgnoredMetadataFields(int type) {
* @return a list of configuration objects
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
- public static List getAllDiscoveryConfigurations(Item item,
- final Context context) throws SQLException {
+ public static List getAllDiscoveryConfigurations(Item item, Context context)
+ throws SQLException {
List collections = item.getCollections();
return getAllDiscoveryConfigurations(context, null, collections, item);
}
@@ -171,8 +192,7 @@ private static List getAllDiscoveryConfigurations(final
Set result = new HashSet<>();
for (Collection collection : collections) {
- DiscoveryConfiguration configuration = getDiscoveryConfiguration(context, prefix, collection);
- result.add(configuration);
+ addDiscoveryConfigurationForParents(context, result, prefix, collection);
}
//Add alwaysIndex configurations
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java
index c53b48f80f3..6bda2fc52d8 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java
@@ -59,8 +59,10 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So
String description = bitstream.getDescription();
if ((description != null) && !description.isEmpty()) {
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description);
- document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", bitstream.getName());
- document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", bitstream.getName());
+ document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword",
+ bitstream.getName());
+ document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter",
+ bitstream.getName());
}
}
}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
index b00ff735637..22443aec22e 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
@@ -20,7 +20,6 @@
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
-import org.dspace.core.ReloadableEntity;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.indexobject.IndexableDSpaceObject;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -135,9 +134,9 @@ public static void main(String[] args) {
System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size());
DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(
- DiscoveryConfigurationService.class
- .getName(),
- DiscoveryConfigurationService.class);
+ DiscoveryConfigurationService.class
+ .getName(),
+ DiscoveryConfigurationService.class);
for (String key : mainService.getMap().keySet()) {
System.out.println(key);
@@ -165,7 +164,7 @@ public static void main(String[] args) {
System.out.println("Recent submissions configuration:");
DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration
- .getRecentSubmissionConfiguration();
+ .getRecentSubmissionConfiguration();
System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField());
System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax());
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java
index c3629b6362c..ebedfc34b74 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java
@@ -73,9 +73,11 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc,
// Add item metadata
List discoveryConfigurations;
if (inProgressSubmission instanceof WorkflowItem) {
- discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkflowItem) inProgressSubmission);
+ discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
+ (WorkflowItem) inProgressSubmission);
} else if (inProgressSubmission instanceof WorkspaceItem) {
- discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission);
+ discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
+ (WorkspaceItem) inProgressSubmission);
} else {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context);
}
diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java
index 5ba36af8f4a..1f0e8fbd661 100644
--- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java
+++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java
@@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder {
private Community community;
+
protected CommunityBuilder(Context context) {
super(context);
}
public static CommunityBuilder createCommunity(final Context context) {
CommunityBuilder builder = new CommunityBuilder(context);
- return builder.create();
+ return builder.create(null);
+ }
+ public static CommunityBuilder createCommunity(final Context context, String handle) {
+ CommunityBuilder builder = new CommunityBuilder(context);
+ return builder.create(handle);
}
- private CommunityBuilder create() {
- return createSubCommunity(context, null);
+ private CommunityBuilder create(String handle) {
+ return createSubCommunity(context, null, handle);
}
public static CommunityBuilder createSubCommunity(final Context context, final Community parent) {
CommunityBuilder builder = new CommunityBuilder(context);
- return builder.createSub(parent);
+ return builder.createSub(parent, null);
}
- private CommunityBuilder createSub(final Community parent) {
+ public static CommunityBuilder createSubCommunity(final Context context, final Community parent,
+ final String handle) {
+ CommunityBuilder builder = new CommunityBuilder(context);
+ return builder.createSub(parent, handle);
+ }
+
+ private CommunityBuilder createSub(final Community parent, String handle) {
try {
- community = communityService.create(parent, context);
+ community = communityService.create(parent, context, handle);
} catch (Exception e) {
e.printStackTrace();
return null;
@@ -102,6 +113,7 @@ public CommunityBuilder addParentCommunity(final Context context, final Communit
@Override
public Community build() {
try {
+
communityService.update(context, community);
context.dispatchEvents();
diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
new file mode 100644
index 00000000000..6ffcbe661c8
--- /dev/null
+++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
@@ -0,0 +1,3198 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item
+
+ withdrawn:true OR discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:PoolTask OR search.resourcetype:ClaimedTask
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:XmlWorkflowItem
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:Publication
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:Person
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:Project
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:OrgUnit
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:JournalIssue
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:JournalVolume
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:Journal
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+
+
+
+
+
+
+
+
+
+
+ relation.isAuthorOfPublication
+
+
+
+
+
+
+
+
+
+
+ relation.isProjectOfPublication
+
+
+
+
+
+
+
+
+
+
+
+ relation.isOrgUnitOfPublication
+
+
+
+
+
+
+
+
+
+
+ relation.isPublicationOfJournalIssue
+
+
+
+
+
+
+
+
+
+
+ relation.isJournalOfPublication
+
+
+
+
+
+
+
+
+
+
+ dc.contributor.author
+ dc.creator
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dspace.entity.type
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.subject.*
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.date.issued
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.type
+
+
+
+
+
+
+
+
+ dc.identifier
+
+
+
+
+
+
+
+
+ placeholder.placeholder.placeholder
+
+
+
+
+
+
+
+
+
+ placeholder.placeholder.placeholder
+
+
+
+
+
+
+
+
+ person.jobTitle
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ person.knowsLanguage
+
+
+
+
+
+
+
+
+
+
+
+
+ person.birthDate
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ person.familyName
+
+
+
+
+
+
+
+
+
+
+ person.givenName
+
+
+
+
+
+
+
+
+
+
+ relation.isOrgUnitOfPerson
+
+
+
+
+
+
+
+
+
+
+ relation.isProjectOfPerson
+
+
+
+
+
+
+
+
+
+
+ relation.isPublicationOfAuthor
+
+
+
+
+
+
+
+
+
+
+
+ organization.address.addressCountry
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ organization.address.addressLocality
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ organization.foundingDate
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ organization.legalName
+
+
+
+
+
+
+
+
+
+
+ relation.isPersonOfOrgUnit
+
+
+
+
+
+
+
+
+
+
+ relation.isProjectOfOrgUnit
+
+
+
+
+
+
+
+
+
+
+ relation.isPublicationOfOrgUnit
+
+
+
+
+
+
+
+
+
+
+ creativework.keywords
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ creativework.datePublished
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ publicationissue.issueNumber
+
+
+
+
+
+
+
+
+
+
+ relation.isPublicationOfJournalIssue
+
+
+
+
+
+
+
+
+
+
+ publicationVolume.volumeNumber
+
+
+
+
+
+
+
+
+
+
+ relation.isIssueOfJournalVolume
+
+
+
+
+
+
+
+
+
+
+ relation.isJournalOfVolume
+
+
+
+
+
+
+
+
+
+
+ creativework.publisher
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ creativework.editor
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ relation.isVolumeOfJournal
+
+
+
+
+
+
+
+
+
+
+
+
+
+ placeholder.placeholder.placeholder
+
+
+
+
+
+
+
+
+
+ relation.isOrgUnitOfProject
+
+
+
+
+
+
+
+
+
+
+
+ relation.isPersonOfProject
+
+
+
+
+
+
+
+
+
+
+
+ relation.isPublicationOfProject
+
+
+
+
+
+
+
+
+
+
+ relation.isContributorOfPublication
+
+
+
+
+
+
+
+
+
+
+ relation.isPublicationOfContributor
+
+
+
+
+
+
+
+
+
+
+ relation.isFundingAgencyOfProject
+
+
+
+
+
+
+
+
+
+
+ relation.isProjectOfFundingAgency
+
+
+
+
+
+
+
+
+
+
+ dc.test.parentcommunity1field
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.test.subcommunity11field
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.test.collection111field
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.test.collection121field
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.test.subcommunity21field
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.test.collection211field
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.test.collection221field
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java
new file mode 100644
index 00000000000..a0edf1a0c70
--- /dev/null
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java
@@ -0,0 +1,595 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.app.rest;
+
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.is;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+import org.dspace.app.rest.matcher.FacetEntryMatcher;
+import org.dspace.app.rest.matcher.FacetValueMatcher;
+import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
+import org.dspace.builder.CollectionBuilder;
+import org.dspace.builder.CommunityBuilder;
+import org.dspace.builder.ItemBuilder;
+import org.dspace.builder.MetadataFieldBuilder;
+import org.dspace.content.Collection;
+import org.dspace.content.Community;
+import org.dspace.content.Item;
+import org.dspace.content.service.CollectionService;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+
+public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest {
+
+ @Autowired
+ CollectionService collectionService;
+
+ private Community community1;
+ private Community subcommunity11;
+ private Community subcommunity12;
+ private Collection collection111;
+ private Collection collection112;
+ private Collection collection121;
+ private Collection collection122;
+
+ private Community community2;
+ private Community subcommunity21;
+ private Community subcommunity22;
+ private Collection collection211;
+ private Collection collection212;
+ private Collection collection221;
+ private Collection collection222;
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+
+ context.turnOffAuthorisationSystem();
+
+ MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build();
+ MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build();
+ MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build();
+ MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build();
+ MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build();
+ MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build();
+ MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build();
+
+ community1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1")
+ .build();
+ subcommunity11 = CommunityBuilder
+ .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-1")
+ .build();
+ subcommunity12 = CommunityBuilder
+ .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-2")
+ .build();
+ collection111 = CollectionBuilder
+ .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1")
+ .build();
+ collection112 = CollectionBuilder
+ .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2")
+ .build();
+ collection121 = CollectionBuilder
+ .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1")
+ .build();
+
+ collection122 = CollectionBuilder
+ .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2")
+ .build();
+
+ community2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2")
+ .build();
+
+
+ subcommunity21 = CommunityBuilder
+ .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-1")
+ .build();
+ subcommunity22 = CommunityBuilder
+ .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-2")
+ .build();
+ collection211 = CollectionBuilder
+ .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1")
+ .build();
+ collection212 = CollectionBuilder
+ .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2")
+ .build();
+ collection221 = CollectionBuilder
+ .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1")
+ .build();
+ collection222 = CollectionBuilder
+ .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2")
+ .build();
+
+
+ Item item111 = ItemBuilder.createItem(context, collection111)
+ .withMetadata("dc", "contributor", "author", "author-item111")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item111")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item111")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item111")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item111")
+ .build();
+
+ Item item112 = ItemBuilder.createItem(context, collection112)
+ .withMetadata("dc", "contributor", "author", "author-item112")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item112")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item112")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item112")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item112")
+ .build();
+
+ Item item121 = ItemBuilder.createItem(context, collection121)
+ .withMetadata("dc", "contributor", "author", "author-item121")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item121")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item121")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item121")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item121")
+ .build();
+
+ Item item122 = ItemBuilder.createItem(context, collection122)
+ .withMetadata("dc", "contributor", "author", "author-item122")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item122")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item122")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item122")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item122")
+ .build();
+
+ Item item211 = ItemBuilder.createItem(context, collection211)
+ .withMetadata("dc", "contributor", "author", "author-item211")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item211")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item211")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item211")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item211")
+ .build();
+
+ Item item212 = ItemBuilder.createItem(context, collection212)
+ .withMetadata("dc", "contributor", "author", "author-item212")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item212")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item212")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item212")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item212")
+ .build();
+
+ Item item221 = ItemBuilder.createItem(context, collection221)
+ .withMetadata("dc", "contributor", "author", "author-item221")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item221")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item221")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item221")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item221")
+ .build();
+
+ Item item222 = ItemBuilder.createItem(context, collection222)
+ .withMetadata("dc", "contributor", "author", "author-item222")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222")
+ .withMetadata("dc", "test", "collection111field", "collection111field-item222")
+ .withMetadata("dc", "test", "collection121field", "collection121field-item222")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222")
+ .withMetadata("dc", "test", "collection211field", "collection211field-item222")
+ .withMetadata("dc", "test", "collection221field", "collection221field-item222")
+ .build();
+
+ Item mappedItem111222 = ItemBuilder
+ .createItem(context, collection111)
+ .withMetadata("dc", "contributor", "author", "author-mappedItem111222")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222")
+ .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222")
+ .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222")
+ .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222")
+ .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222")
+ .build();
+
+
+ Item mappedItem122211 = ItemBuilder
+ .createItem(context, collection122)
+ .withMetadata("dc", "contributor", "author", "author-mappedItem122211")
+ .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211")
+ .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211")
+ .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211")
+ .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211")
+ .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211")
+ .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211")
+ .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211")
+ .build();
+
+
+ collectionService.addItem(context, collection222, mappedItem111222);
+ collectionService.addItem(context, collection211, mappedItem122211);
+
+
+ context.dispatchEvents();
+ context.restoreAuthSystemState();
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community1.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/parentcommunity1field")
+ .param("scope", String.valueOf(community1.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-item111", 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-item112", 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-item121", 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-item122", 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-mappedItem111222",
+ 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-mappedItem122211", 1)
+ )
+ ));
+
+
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("subcommunity11field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/subcommunity11field")
+ .param("scope", String.valueOf(subcommunity11.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("subcommunity11field",
+ "subcommunity11field-item111", 1),
+ FacetValueMatcher.matchEntry("subcommunity11field",
+ "subcommunity11field-item112", 1),
+ FacetValueMatcher.matchEntry("subcommunity11field",
+ "subcommunity11field-mappedItem111222", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("collection111field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/collection111field")
+ .param("scope", String.valueOf(collection111.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("collection111field",
+ "collection111field-item111", 1),
+ FacetValueMatcher.matchEntry("collection111field",
+ "collection111field-mappedItem111222", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("subcommunity11field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/subcommunity11field")
+ .param("scope", String.valueOf(collection112.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("subcommunity11field",
+ "subcommunity11field-item112", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/parentcommunity1field")
+ .param("scope", String.valueOf(subcommunity12.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-item121", 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-item122", 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-mappedItem122211", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("collection121field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/collection121field")
+ .param("scope", String.valueOf(collection121.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("collection121field",
+ "collection121field-item121", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/parentcommunity1field")
+ .param("scope", String.valueOf(collection122.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-item122", 1),
+ FacetValueMatcher.matchEntry("parentcommunity1field",
+ "parentcommunity1field-mappedItem122211", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community2.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.subjectFacet(false),
+ FacetEntryMatcher.dateIssuedFacet(false),
+ FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
+ FacetEntryMatcher.entityTypeFacet(false)
+ ))
+ );
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("subcommunity21field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/subcommunity21field")
+ .param("scope", String.valueOf(subcommunity21.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("subcommunity21field",
+ "subcommunity21field-item211", 1),
+ FacetValueMatcher.matchEntry("subcommunity21field",
+ "subcommunity21field-item212", 1),
+ FacetValueMatcher.matchEntry("subcommunity21field",
+ "subcommunity21field-mappedItem122211", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("collection211field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/collection211field")
+ .param("scope", String.valueOf(collection211.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("collection211field",
+ "collection211field-item211", 1),
+ FacetValueMatcher.matchEntry("collection211field",
+ "collection211field-mappedItem122211", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("subcommunity21field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/subcommunity21field")
+ .param("scope", String.valueOf(collection212.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("subcommunity21field",
+ "subcommunity21field-item212", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception {
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.subjectFacet(false),
+ FacetEntryMatcher.dateIssuedFacet(false),
+ FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
+ FacetEntryMatcher.entityTypeFacet(false)
+ ))
+ );
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.matchFacet("collection221field", "text", false)))
+ );
+
+ getClient().perform(get("/api/discover/facets/collection221field")
+ .param("scope", String.valueOf(collection221.getID())))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._embedded.values",
+ containsInAnyOrder(
+ FacetValueMatcher.matchEntry("collection221field",
+ "collection221field-item221", 1)
+ )
+ ));
+ }
+
+ @Test
+ public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception {
+
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID())))
+
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.type", is("discover")))
+ .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
+ .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
+ FacetEntryMatcher.authorFacet(false),
+ FacetEntryMatcher.subjectFacet(false),
+ FacetEntryMatcher.dateIssuedFacet(false),
+ FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
+ FacetEntryMatcher.entityTypeFacet(false)
+ ))
+ );
+ }
+
+
+}
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java
index 5e3c477506b..34b7b8b30d6 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java
@@ -99,6 +99,17 @@ public static Matcher super Object> hasContentInOriginalBundleFacet(boolean ha
);
}
+ public static Matcher super Object> matchFacet(String name, String facetType, boolean hasNext) {
+ return allOf(
+ hasJsonPath("$.name", is(name)),
+ hasJsonPath("$.facetType", is(facetType)),
+ hasJsonPath("$.facetLimit", any(Integer.class)),
+ hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)),
+ hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name))
+ );
+ }
+
+
/**
* Check that a facet over the dc.type exists and match the default configuration
*
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java
index a68356da532..1efafb5406c 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java
@@ -52,6 +52,16 @@ public static Matcher super Object> entrySubject(String label, int count) {
);
}
+ public static Matcher super Object> matchEntry(String facet, String label, int count) {
+ return allOf(
+ hasJsonPath("$.label", is(label)),
+ hasJsonPath("$.type", is("discover")),
+ hasJsonPath("$.count", is(count)),
+ hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")),
+ hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals"))
+ );
+ }
+
public static Matcher super Object> entrySubject(String label, String authority, int count) {
return allOf(
diff --git a/machine.cfg b/machine.cfg
new file mode 100644
index 00000000000..14f0d1d0b06
--- /dev/null
+++ b/machine.cfg
@@ -0,0 +1,19 @@
+dspace.shortname = or-platform-7
+
+dspace.dir=/Users/yana/dspaces/or-platform-7
+
+dspace.server.url =http://localhost:8080/server-or7
+dspace.ui.url = http://localhost:4000
+
+# URL for connecting to database
+# * Postgres template: jdbc:postgrook naar de toekomst toe wilt dat zeggen dat de backend gewoon in orde is en mogelijk enkel nog eesql://localhost:5432/dspace
+# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe
+#db.url = ${db.url}
+#db.url = jdbc:postgresql://localhost:5432/or-platform-7
+db.url = jdbc:postgresql://localhost:5434/or-platform-7-4
+
+
+
+solr.server = http://localhost:8983/solr
+
+
From c538b9cbedd2d7ab7ab88b912a5eeb75a180e10d Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Tue, 13 Dec 2022 14:27:29 +0100
Subject: [PATCH 016/693] Add docs and remove unused site configuration
---
.../org/dspace/discovery/SearchUtils.java | 19 ++-
.../DiscoveryConfigurationService.java | 15 ++
.../config/spring/api/discovery.xml | 133 +-----------------
.../DiscoveryScopeBasedRestControllerIT.java | 56 ++++++--
dspace/config/spring/api/discovery.xml | 129 -----------------
5 files changed, 77 insertions(+), 275 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
index 4085e1bbdf3..418720be4a4 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java
@@ -77,6 +77,7 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con
*
*
* @param context
+ * the database context
* @param prefix
* the namespace of the configuration to lookup if any
* @param dso
@@ -92,6 +93,15 @@ public static DiscoveryConfiguration getDiscoveryConfiguration(final Context con
}
}
+ /**
+ * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set
+ * @param context - The database context
+ * @param configurations - The set of configurations to add the retrieved configurations to
+ * @param prefix - The namespace of the configuration to lookup if any
+ * @param dso - The DSpace Object
+ * @return the set of configurations with additional retrieved ones for the dspace object and parents
+ * @throws SQLException
+ */
public static Set addDiscoveryConfigurationForParents(
Context context, Set configurations, String prefix, DSpaceObject dso)
throws SQLException {
@@ -124,6 +134,13 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName(
return configurationService.getDiscoveryConfiguration(configurationName);
}
+
+ /**
+ * Return the discovery configuration for the provided DSO
+ * @param context - The database context
+ * @param dso - The DSpace object to retrieve the configuration for
+ * @return the discovery configuration for the provided DSO
+ */
public static DiscoveryConfiguration getDiscoveryConfigurationByDSO(
Context context, DSpaceObject dso) {
DiscoveryConfigurationService configurationService = getConfigurationService();
@@ -145,7 +162,7 @@ public static List getIgnoredMetadataFields(int type) {
* A configuration object can be returned for each parent community/collection
*
* @param item the DSpace item
- * @param context
+ * @param context the database context
* @return a list of configuration objects
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
index 22443aec22e..c0eba58669e 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
@@ -63,6 +63,13 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final Context context,
return getDiscoveryConfiguration(name);
}
+ /**
+ * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will
+ * be checked until there is no parent left, in which case the "default" configuration will be returned.
+ * @param context - The database context
+ * @param dso - The DSpace object to retrieve the configuration for
+ * @return the discovery configuration for the provided DSO.
+ */
public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context,
DSpaceObject dso) {
String name;
@@ -91,6 +98,14 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name) {
return getDiscoveryConfiguration(name, true);
}
+ /**
+ * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration
+ * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is
+ * found.
+ * @param name - The name of the configuration to retrieve
+ * @param useDefault - Whether the default configuration should be used when no match is found
+ * @return the configuration for the provided name
+ */
public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) {
DiscoveryConfiguration result;
diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
index 6ffcbe661c8..e029c65aa00 100644
--- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
+++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
@@ -50,9 +50,6 @@
-->
-
-
-
@@ -77,6 +74,7 @@
+
@@ -543,121 +541,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java
index a0edf1a0c70..15c1019584b 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java
@@ -29,12 +29,42 @@
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
+/**
+ * This class tests the correct inheritance of Discovery configurations for sub communities and collections.
+ * To thoroughly test this, a community and collection structure is set up to where different communities have custom
+ * configurations configured for them.
+ *
+ * The following structure is uses:
+ * - Parent Community 1 - Custom configuration: discovery-parent-community-1
+ * -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1
+ * -- Collection 111 - Custom configuration: discovery-collection-1-1-1
+ * -- Collection 112
+ * -- Subcommunity 12
+ * -- Collection 121 - Custom configuration: discovery-collection-1-2-1
+ * -- Collection 122
+ * - Parent Community 2
+ * -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1
+ * -- Collection 211 - Custom configuration: discovery-collection-2-1-1
+ * -- Collection 212
+ * -- Subcommunity 22
+ * -- Collection 221 - Custom configuration: discovery-collection-2-2-1
+ * -- Collection 222
+ *
+ * Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is
+ * indexed and provided for the different search scopes.
+ *
+ * Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222,
+ * and one in collection 122 and 211.
+ *
+ * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to
+ * power these facets are indexed properly.
+ */
public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest {
@Autowired
CollectionService collectionService;
- private Community community1;
+ private Community parentCommunity1;
private Community subcommunity11;
private Community subcommunity12;
private Collection collection111;
@@ -42,7 +72,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg
private Collection collection121;
private Collection collection122;
- private Community community2;
+ private Community parentCommunity2;
private Community subcommunity21;
private Community subcommunity22;
private Collection collection211;
@@ -64,13 +94,13 @@ public void setUp() throws Exception {
MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build();
- community1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1")
- .build();
+ parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1")
+ .build();
subcommunity11 = CommunityBuilder
- .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-1")
+ .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1")
.build();
subcommunity12 = CommunityBuilder
- .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-2")
+ .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2")
.build();
collection111 = CollectionBuilder
.createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1")
@@ -86,15 +116,15 @@ public void setUp() throws Exception {
.createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2")
.build();
- community2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2")
- .build();
+ parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2")
+ .build();
subcommunity21 = CommunityBuilder
- .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-1")
+ .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1")
.build();
subcommunity22 = CommunityBuilder
- .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-2")
+ .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2")
.build();
collection211 = CollectionBuilder
.createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1")
@@ -235,7 +265,7 @@ public void setUp() throws Exception {
@Test
public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception {
- getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community1.getID())))
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
@@ -246,7 +276,7 @@ public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception {
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
- .param("scope", String.valueOf(community1.getID())))
+ .param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
@@ -435,7 +465,7 @@ public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception {
@Test
public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception {
- getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community2.getID())))
+ getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml
index 4392e02cb3d..ae1992fbff3 100644
--- a/dspace/config/spring/api/discovery.xml
+++ b/dspace/config/spring/api/discovery.xml
@@ -50,9 +50,6 @@
-->
-
-
-
@@ -534,120 +531,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
From 463edac869855150b3bb1c6e2f31c8a97482a633 Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Tue, 13 Dec 2022 17:08:02 +0100
Subject: [PATCH 017/693] Remove local file
---
machine.cfg | 19 -------------------
1 file changed, 19 deletions(-)
delete mode 100644 machine.cfg
diff --git a/machine.cfg b/machine.cfg
deleted file mode 100644
index 14f0d1d0b06..00000000000
--- a/machine.cfg
+++ /dev/null
@@ -1,19 +0,0 @@
-dspace.shortname = or-platform-7
-
-dspace.dir=/Users/yana/dspaces/or-platform-7
-
-dspace.server.url =http://localhost:8080/server-or7
-dspace.ui.url = http://localhost:4000
-
-# URL for connecting to database
-# * Postgres template: jdbc:postgrook naar de toekomst toe wilt dat zeggen dat de backend gewoon in orde is en mogelijk enkel nog eesql://localhost:5432/dspace
-# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe
-#db.url = ${db.url}
-#db.url = jdbc:postgresql://localhost:5432/or-platform-7
-db.url = jdbc:postgresql://localhost:5434/or-platform-7-4
-
-
-
-solr.server = http://localhost:8983/solr
-
-
From 14534b4eafb8f5333440a624f07395b2cb2f14eb Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Tue, 13 Dec 2022 17:47:35 +0100
Subject: [PATCH 018/693] Move context to first argument in
getDiscoveryConfigurationByNameOrDso
---
.../configuration/DiscoveryConfigurationService.java | 4 ++--
.../app/rest/repository/DiscoveryRestRepository.java | 10 +++++-----
2 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
index c0eba58669e..d7bc3b0f353 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
@@ -119,8 +119,8 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boole
return result;
}
- public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName,
- final Context context,
+ public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final Context context,
+ final String configurationName,
final IndexableObject dso) {
if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) {
return getMap().get(configurationName);
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java
index 1962d44162e..e337e76ef25 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java
@@ -84,7 +84,7 @@ public SearchConfigurationRest getSearchConfiguration(final String dsoScope, fin
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject);
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
}
@@ -96,7 +96,7 @@ public SearchResultsRest getSearchObjects(final String query, final List
Context context = obtainContext();
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject);
DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null;
@@ -121,7 +121,7 @@ public FacetConfigurationRest getFacetsConfiguration(final String dsoScope, fina
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject);
return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration);
}
@@ -138,7 +138,7 @@ public FacetResultsRest getFacetObjects(String facetName, String prefix, String
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject);
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix,
query, searchFilters, dsoTypes, page, facetName);
@@ -157,7 +157,7 @@ public SearchResultsRest getAllFacets(String query, List dsoTypes, Strin
Pageable page = PageRequest.of(1, 1);
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject);
+ .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject);
DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null;
From 38b30c394c982c4760a8afc9676bfbe139de5e10 Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Wed, 14 Dec 2022 10:32:54 +0100
Subject: [PATCH 019/693] Fix openSearchController issue
---
.../src/main/java/org/dspace/app/rest/OpenSearchController.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java
index 79ca3817534..665504139cb 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java
@@ -176,7 +176,7 @@ public void search(HttpServletRequest request,
if (dsoObject != null) {
container = scopeResolver.resolveScope(context, dsoObject);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
- .getDiscoveryConfigurationByNameOrDso("site", container);
+ .getDiscoveryConfiguration(context, container);
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray(
From 69500ad5d579f6891bbf35c35e29b18f120b20e9 Mon Sep 17 00:00:00 2001
From: Yana De Pauw
Date: Thu, 15 Dec 2022 11:55:05 +0100
Subject: [PATCH 020/693] Fix discovery test config and make ids for
relationship profiles unique
---
.../config/spring/api/discovery.xml | 779 ++++++++++++++++--
dspace/config/spring/api/discovery.xml | 14 +-
2 files changed, 724 insertions(+), 69 deletions(-)
diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
index e029c65aa00..a5d7682d4cf 100644
--- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
+++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml
@@ -48,12 +48,15 @@
the key is used to refer to the page (the "site" or a community/collection handle)
the value-ref is a reference to an identifier of the DiscoveryConfiguration format
-->
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
@@ -61,17 +64,48 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -176,7 +210,145 @@
+ (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ dc.title
+ dc.contributor.author
+ dc.creator
+ dc.subject
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community
+ -withdrawn:true AND -discoverable:false
@@ -313,7 +485,7 @@
- search.resourcetype:Item
+ search.resourcetype:Item AND latestVersion:truewithdrawn:true OR discoverable:false
@@ -455,7 +627,7 @@
- search.resourcetype:Item
+ search.resourcetype:Item AND latestVersion:true
@@ -541,10 +713,11 @@
+
+ class="org.dspace.discovery.configuration.DiscoveryConfiguration"
+ scope="prototype">
@@ -579,7 +752,7 @@
- search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem
+ (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem
@@ -616,8 +789,8 @@
+ class="org.dspace.discovery.configuration.DiscoveryConfiguration"
+ scope="prototype">
@@ -691,8 +864,8 @@
+ class="org.dspace.discovery.configuration.DiscoveryConfiguration"
+ scope="prototype">
@@ -814,7 +987,79 @@
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Publication
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
search.resourcetype:Item AND entityType_keyword:Publication
+ -withdrawn:true AND -discoverable:false
@@ -875,7 +1120,8 @@
- search.resourcetype:Item AND entityType_keyword:Person
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Person
+ -withdrawn:true AND -discoverable:false
@@ -893,23 +1139,28 @@
-
-
+
+
-
+
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
@@ -918,7 +1169,10 @@
-
+
+
+
+
@@ -928,7 +1182,9 @@
- search.resourcetype:Item AND entityType_keyword:Project
+
+ search.resourcetype:Item AND entityType_keyword:Person
+ -withdrawn:true AND -discoverable:false
@@ -946,27 +1202,200 @@
-
-
+
+
-
-
-
+
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Project
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND entityType_keyword:Project
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -990,7 +1419,9 @@
+
search.resourcetype:Item AND entityType_keyword:OrgUnit
+ -withdrawn:true AND -discoverable:false
@@ -1049,7 +1480,69 @@
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalIssue
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
search.resourcetype:Item AND entityType_keyword:JournalIssue
+ -withdrawn:true AND -discoverable:false
@@ -1107,7 +1600,68 @@
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalVolume
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
search.resourcetype:Item AND entityType_keyword:JournalVolume
+ -withdrawn:true AND -discoverable:false
@@ -1165,7 +1719,68 @@
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Journal
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
search.resourcetype:Item AND entityType_keyword:Journal
+ -withdrawn:true AND -discoverable:false
@@ -1238,7 +1853,8 @@
- search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person)
+ search.resourcetype:Item AND latestVersion:true AND (entityType_keyword:OrgUnit OR entityType_keyword:Person)
+ -withdrawn:true AND -discoverable:false
@@ -1293,7 +1909,8 @@
- search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization
+ search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization
+ -withdrawn:true AND -discoverable:false
@@ -1302,6 +1919,44 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ search.resourcetype:Item
+ search.entitytype:${researcher-profile.entity-type:Person}
+ -withdrawn:true AND -discoverable:false
+
+
+
+
+
+
+
+
+
@@ -2288,7 +2943,7 @@
- relation.isAuthorOfPublication
+ relation.isAuthorOfPublication.latestForDiscovery
@@ -2299,7 +2954,7 @@
- relation.isProjectOfPublication
+ relation.isProjectOfPublication.latestForDiscovery
@@ -2311,7 +2966,7 @@
- relation.isOrgUnitOfPublication
+ relation.isOrgUnitOfPublication.latestForDiscovery
@@ -2322,7 +2977,7 @@
- relation.isPublicationOfJournalIssue
+ relation.isPublicationOfJournalIssue.latestForDiscovery
@@ -2333,7 +2988,7 @@
- relation.isJournalOfPublication
+ relation.isJournalOfPublication.latestForDiscovery
@@ -2539,7 +3194,7 @@
- relation.isOrgUnitOfPerson
+ relation.isOrgUnitOfPerson.latestForDiscovery
@@ -2550,7 +3205,7 @@
- relation.isProjectOfPerson
+ relation.isProjectOfPerson.latestForDiscovery
@@ -2562,7 +3217,7 @@
- relation.isPublicationOfAuthor
+ relation.isPublicationOfAuthor.latestForDiscovery
@@ -2634,7 +3289,7 @@
- relation.isPersonOfOrgUnit
+ relation.isPersonOfOrgUnit.latestForDiscovery
@@ -2645,7 +3300,7 @@
- relation.isProjectOfOrgUnit
+ relation.isProjectOfOrgUnit.latestForDiscovery
@@ -2657,7 +3312,7 @@
- relation.isPublicationOfOrgUnit
+ relation.isPublicationOfOrgUnit.latestForDiscovery
@@ -2711,7 +3366,7 @@
- relation.isPublicationOfJournalIssue
+ relation.isPublicationOfJournalIssue.latestForDiscovery
@@ -2734,7 +3389,7 @@
- relation.isIssueOfJournalVolume
+ relation.isIssueOfJournalVolume.latestForDiscovery
@@ -2745,7 +3400,7 @@
- relation.isJournalOfVolume
+ relation.isJournalOfVolume.latestForDiscovery
@@ -2786,7 +3441,7 @@
- relation.isVolumeOfJournal
+ relation.isVolumeOfJournal.latestForDiscovery
@@ -2811,7 +3466,7 @@
- relation.isOrgUnitOfProject
+ relation.isOrgUnitOfProject.latestForDiscovery
@@ -2823,7 +3478,7 @@
- relation.isPersonOfProject
+ relation.isPersonOfProject.latestForDiscovery
@@ -2835,7 +3490,7 @@
- relation.isPublicationOfProject
+ relation.isPublicationOfProject.latestForDiscovery
@@ -2846,7 +3501,7 @@
- relation.isContributorOfPublication
+ relation.isContributorOfPublication.latestForDiscovery
@@ -2857,7 +3512,7 @@
- relation.isPublicationOfContributor
+ relation.isPublicationOfContributor.latestForDiscovery
@@ -2868,7 +3523,7 @@
- relation.isFundingAgencyOfProject
+ relation.isFundingAgencyOfProject.latestForDiscovery
@@ -2879,7 +3534,7 @@
- relation.isProjectOfFundingAgency
+ relation.isProjectOfFundingAgency.latestForDiscovery
diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml
index 5e2cae5e9fe..37d5f2548a4 100644
--- a/dspace/config/spring/api/discovery.xml
+++ b/dspace/config/spring/api/discovery.xml
@@ -996,7 +996,7 @@
-
+
@@ -1129,7 +1129,7 @@
-
+
@@ -1246,7 +1246,7 @@
-
+
@@ -1366,7 +1366,7 @@
-
+
@@ -1491,7 +1491,7 @@
-
+
@@ -1611,7 +1611,7 @@
-
+
@@ -1730,7 +1730,7 @@
-
+
From 1300cdc75b25181fdeebda20661aaa02b2d92bfc Mon Sep 17 00:00:00 2001
From: Yury Bondarenko
Date: Mon, 19 Dec 2022 11:20:53 +0100
Subject: [PATCH 021/693] 97248: Cache discovery configurations by UUID
---
.../DiscoveryConfigurationService.java | 47 +++++++++++++------
1 file changed, 33 insertions(+), 14 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
index d7bc3b0f353..7d5b435555f 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java
@@ -12,6 +12,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
@@ -34,6 +35,12 @@ public class DiscoveryConfigurationService {
private Map map;
private Map> toIgnoreMetadataFields = new HashMap<>();
+ /**
+ * Discovery configurations, cached by DSO UUID. When a DSO doesn't have its own configuration, we take the one of
+ * the first parent that does. This cache ensures we don't have to go up the hierarchy every time.
+ */
+ private final Map uuidMap = new HashMap<>();
+
public Map getMap() {
return map;
}
@@ -72,26 +79,38 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final Context context,
*/
public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context,
DSpaceObject dso) {
- String name;
+ // Fall back to default configuration
if (dso == null) {
- name = "default";
- } else {
- name = dso.getHandle();
+ return getDiscoveryConfiguration("default", false);
}
- DiscoveryConfiguration configuration = getDiscoveryConfiguration(name, false);
- if (configuration != null) {
- return configuration;
+ // Attempt to retrieve cached configuration by UUID
+ if (uuidMap.containsKey(dso.getID())) {
+ return uuidMap.get(dso.getID());
}
- DSpaceObjectService dSpaceObjectService =
+
+ DiscoveryConfiguration configuration;
+
+ // Attempt to retrieve configuration by DSO handle
+ configuration = getDiscoveryConfiguration(dso.getHandle(), false);
+
+ if (configuration == null) {
+ // Recurse up the Comm/Coll hierarchy until a configuration is found
+ DSpaceObjectService dSpaceObjectService =
ContentServiceFactory.getInstance().getDSpaceObjectService(dso);
- DSpaceObject parentObject = null;
- try {
- parentObject = dSpaceObjectService.getParentObject(context, dso);
- } catch (SQLException e) {
- log.error(e);
+ DSpaceObject parentObject = null;
+ try {
+ parentObject = dSpaceObjectService.getParentObject(context, dso);
+ } catch (SQLException e) {
+ log.error(e);
+ }
+ configuration = getDiscoveryDSOConfiguration(context, parentObject);
}
- return getDiscoveryDSOConfiguration(context, parentObject);
+
+ // Cache the resulting configuration
+ uuidMap.put(dso.getID(), configuration);
+
+ return configuration;
}
public DiscoveryConfiguration getDiscoveryConfiguration(final String name) {
From fc9f692bed09f252ab0fcd4d9cd48eb805a7d1f5 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Thu, 12 Jan 2023 17:22:45 +0100
Subject: [PATCH 022/693] [DURACOM-92] Tested and improved
S3BitStoreService.about method
---
dspace-api/pom.xml | 1 +
.../org/dspace/checker/CheckerCommand.java | 2 +-
.../storage/bitstore/BaseBitStoreService.java | 32 +++---
.../storage/bitstore/BitStoreService.java | 5 +-
.../bitstore/BitstreamStorageServiceImpl.java | 31 ++----
.../storage/bitstore/DSBitStoreService.java | 13 +--
.../storage/bitstore/S3BitStoreService.java | 100 ++++++------------
.../service/BitstreamStorageService.java | 2 +-
...tionTest.java => S3BitStoreServiceIT.java} | 55 +++++++++-
9 files changed, 123 insertions(+), 118 deletions(-)
rename dspace-api/src/test/java/org/dspace/storage/bitstore/{S3BitStoreServiceIntegrationTest.java => S3BitStoreServiceIT.java} (87%)
diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml
index 9c873847e03..814add2085a 100644
--- a/dspace-api/pom.xml
+++ b/dspace-api/pom.xml
@@ -917,6 +917,7 @@
org.scala-langscala-library2.13.2
+ test
diff --git a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java
index 6b16d51bfe1..87b0de4a657 100644
--- a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java
+++ b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java
@@ -245,7 +245,7 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException {
info.setProcessStartDate(new Date());
try {
- Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
+ Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
if (MapUtils.isNotEmpty(checksumMap)) {
info.setBitstreamFound(true);
if (checksumMap.containsKey("checksum")) {
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java
index 209c1e21e74..5b367d7a813 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java
@@ -14,6 +14,8 @@
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
@@ -153,22 +155,24 @@ protected boolean isLonger(String internalId, int endIndex) {
* Retrieves a map of useful metadata about the File (size, checksum, modified)
*
* @param file The File to analyze
- * @param attrs The map where we are storing values
+ * @param attrs The list of requested metadata values
* @return Map of updated metadatas / attrs
* @throws IOException
*/
- public Map about(File file, Map attrs) throws IOException {
+ public Map about(File file, List attrs) throws IOException {
+
+ Map metadata = new HashMap();
+
try {
if (file != null && file.exists()) {
- this.putValueIfExistsKey(attrs, SIZE_BYTES, file.length());
- if (attrs.containsKey(CHECKSUM)) {
- attrs.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file)));
- attrs.put(CHECKSUM_ALGORITHM, CSA);
+ this.putValueIfExistsKey(attrs, metadata, SIZE_BYTES, file.length());
+ if (attrs.contains(CHECKSUM)) {
+ metadata.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file)));
+ metadata.put(CHECKSUM_ALGORITHM, CSA);
}
- this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(file.lastModified()));
- return attrs;
+ this.putValueIfExistsKey(attrs, metadata, MODIFIED, String.valueOf(file.lastModified()));
}
- return null;
+ return metadata;
} catch (Exception e) {
log.error("about( FilePath: " + file.getAbsolutePath() + ", Map: " + attrs.toString() + ")", e);
throw new IOException(e);
@@ -204,13 +208,9 @@ private byte[] generateChecksumFrom(FileInputStream fis) throws IOException, NoS
}
}
- protected void putValueIfExistsKey(Map attrs, String key, Object value) {
- this.putEntryIfExistsKey(attrs, key, Map.entry(key, value));
- }
-
- protected void putEntryIfExistsKey(Map attrs, String key, Map.Entry entry) {
- if (attrs.containsKey(key)) {
- attrs.put(entry.getKey(), entry.getValue());
+ protected void putValueIfExistsKey(List attrs, Map metadata, String key, Object value) {
+ if (attrs.contains(key)) {
+ metadata.put(key, value);
}
}
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java
index b6ac540c504..5a02ad1d561 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java
@@ -9,6 +9,7 @@
import java.io.IOException;
import java.io.InputStream;
+import java.util.List;
import java.util.Map;
import org.dspace.content.Bitstream;
@@ -62,13 +63,13 @@ public interface BitStoreService {
* Obtain technical metadata about an asset in the asset store.
*
* @param bitstream The bitstream to describe
- * @param attrs A Map whose keys consist of desired metadata fields
+ * @param attrs A List of desired metadata fields
* @return attrs
* A Map with key/value pairs of desired metadata
* If file not found, then return null
* @throws java.io.IOException If a problem occurs while obtaining metadata
*/
- public Map about(Bitstream bitstream, Map attrs) throws IOException;
+ public Map about(Bitstream bitstream, List attrs) throws IOException;
/**
* Remove an asset from the asset store.
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
index 0bd71088da4..3124fd89ca2 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
@@ -165,12 +165,9 @@ public UUID register(Context context, Bitstream bitstream, int assetstore,
bitstream.setStoreNumber(assetstore);
bitstreamService.update(context, bitstream);
- Map wantedMetadata = new HashMap();
- wantedMetadata.put("size_bytes", null);
- wantedMetadata.put("checksum", null);
- wantedMetadata.put("checksum_algorithm", null);
+ List wantedMetadata = List.of("size_bytes", "checksum", "checksum_algorithm");
+ Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata);
- Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata);
if (MapUtils.isEmpty(receivedMetadata)) {
String message = "Not able to register bitstream:" + bitstream.getID() + " at path: " + bitstreamPath;
log.error(message);
@@ -200,13 +197,8 @@ public UUID register(Context context, Bitstream bitstream, int assetstore,
}
@Override
- public Map computeChecksum(Context context, Bitstream bitstream) throws IOException {
- Map wantedMetadata = new HashMap();
- wantedMetadata.put("checksum", null);
- wantedMetadata.put("checksum_algorithm", null);
-
- Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata);
- return receivedMetadata;
+ public Map computeChecksum(Context context, Bitstream bitstream) throws IOException {
+ return this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("checksum", "checksum_algorithm"));
}
@Override
@@ -232,10 +224,9 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio
List storage = bitstreamService.findDeletedBitstreams(context);
for (Bitstream bitstream : storage) {
UUID bid = bitstream.getID();
- Map wantedMetadata = new HashMap();
- wantedMetadata.put("size_bytes", null);
- wantedMetadata.put("modified", null);
- Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata);
+ List wantedMetadata = List.of("size_bytes", "modified");
+ Map receivedMetadata = this.getStore(bitstream.getStoreNumber())
+ .about(bitstream, wantedMetadata);
// Make sure entries which do not exist are removed
@@ -328,13 +319,11 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio
@Nullable
@Override
public Long getLastModified(Bitstream bitstream) throws IOException {
- Map attrs = new HashMap();
- attrs.put("modified", null);
- attrs = this.getStore(bitstream.getStoreNumber()).about(bitstream, attrs);
- if (attrs == null || !attrs.containsKey("modified")) {
+ Map metadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("modified"));
+ if (metadata == null || !metadata.containsKey("modified")) {
return null;
}
- return Long.valueOf(attrs.get("modified").toString());
+ return Long.valueOf(metadata.get("modified").toString());
}
/**
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java
index 1fdf1e84e11..6fef7365e48 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java
@@ -15,6 +15,7 @@
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
+import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Logger;
@@ -126,13 +127,13 @@ public void put(Bitstream bitstream, InputStream in) throws IOException {
/**
* Obtain technical metadata about an asset in the asset store.
*
- * @param bitstream The asset to describe
- * @param attrs A Map whose keys consist of desired metadata fields
- * @return attrs
- * A Map with key/value pairs of desired metadata
- * @throws java.io.IOException If a problem occurs while obtaining metadata
+ * @param bitstream The asset to describe
+ * @param attrs A List of desired metadata fields
+ * @return attrs A Map with key/value pairs of desired metadata
+ * @throws java.io.IOException If a problem occurs while obtaining
+ * metadata
*/
- public Map about(Bitstream bitstream, Map attrs) throws IOException {
+ public Map about(Bitstream bitstream, List attrs) throws IOException {
try {
// potentially expensive, since it may calculate the checksum
File file = getFile(bitstream);
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index 622308b00d9..d2c9839905c 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -7,6 +7,8 @@
*/
package org.dspace.storage.bitstore;
+import static java.lang.String.valueOf;
+
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
@@ -14,6 +16,8 @@
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.function.Supplier;
@@ -26,7 +30,6 @@
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.AmazonS3Exception;
import com.amazonaws.services.s3.model.GetObjectRequest;
@@ -42,6 +45,7 @@
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpStatus;
import org.apache.logging.log4j.LogManager;
@@ -208,7 +212,7 @@ public void init() throws IOException {
}
try {
- if (!s3Service.doesBucketExist(bucketName)) {
+ if (!s3Service.doesBucketExistV2(bucketName)) {
s3Service.createBucket(bucketName);
log.info("Creating new S3 Bucket: " + bucketName);
}
@@ -294,10 +298,6 @@ public void put(Bitstream bitstream, InputStream in) throws IOException {
) {
Utils.bufferedCopy(dis, fos);
in.close();
- byte[] md5Digest = dis.getMessageDigest().digest();
- String md5Base64 = Base64.encodeBase64String(md5Digest);
- ObjectMetadata objMetadata = new ObjectMetadata();
- objMetadata.setContentMD5(md5Base64);
Upload upload = tm.upload(bucketName, key, scratchFile);
@@ -306,7 +306,7 @@ public void put(Bitstream bitstream, InputStream in) throws IOException {
bitstream.setSizeBytes(scratchFile.length());
// we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if
// the bucket is encrypted
- bitstream.setChecksum(Utils.toHex(md5Digest));
+ bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest()));
bitstream.setChecksumAlgorithm(CSA);
} catch (AmazonClientException | IOException | InterruptedException e) {
@@ -329,86 +329,53 @@ public void put(Bitstream bitstream, InputStream in) throws IOException {
* (Does not use getContentMD5, as that is 128-bit MD5 digest calculated on caller's side)
*
* @param bitstream The asset to describe
- * @param attrs A Map whose keys consist of desired metadata fields
+ * @param attrs A List of desired metadata fields
* @return attrs
* A Map with key/value pairs of desired metadata
* If file not found, then return null
* @throws java.io.IOException If a problem occurs while obtaining metadata
*/
@Override
- public Map about(Bitstream bitstream, Map attrs) throws IOException {
+ public Map about(Bitstream bitstream, List attrs) throws IOException {
+
String key = getFullKey(bitstream.getInternalId());
// If this is a registered bitstream, strip the -R prefix before retrieving
if (isRegisteredBitstream(key)) {
key = key.substring(REGISTERED_FLAG.length());
}
+
+ Map metadata = new HashMap<>();
+
try {
+
ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key);
if (objectMetadata != null) {
- if (attrs.containsKey("size_bytes")) {
- attrs.put("size_bytes", objectMetadata.getContentLength());
- }
- if (attrs.containsKey("modified")) {
- attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime()));
- }
+ putValueIfExistsKey(attrs, metadata, "size_bytes", objectMetadata.getContentLength());
+ putValueIfExistsKey(attrs, metadata, "modified", valueOf(objectMetadata.getLastModified().getTime()));
}
- try (
- InputStream in = get(bitstream);
- // Read through a digest input stream that will work out the MD5
- DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA));
- ) {
- in.close();
- byte[] md5Digest = dis.getMessageDigest().digest();
- String md5Base64 = Base64.encodeBase64String(md5Digest);
- attrs.put("checksum", md5Base64);
- attrs.put("checksum_algorithm", CSA);
- } catch (NoSuchAlgorithmException nsae) {
- // Should never happen
- log.warn("Caught NoSuchAlgorithmException", nsae);
+
+ putValueIfExistsKey(attrs, metadata, "checksum_algorithm", CSA);
+
+ if (attrs.contains("checksum")) {
+ try (InputStream in = get(bitstream)) {
+ byte[] md5Digest = MessageDigest.getInstance(CSA).digest(IOUtils.toByteArray(in));
+ metadata.put("checksum", Base64.encodeBase64String(md5Digest));
+ } catch (NoSuchAlgorithmException nsae) {
+ // Should never happen
+ log.warn("Caught NoSuchAlgorithmException", nsae);
+ }
}
- return attrs;
+
+ return metadata;
} catch (AmazonS3Exception e) {
if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) {
- return null;
+ return metadata;
}
} catch (AmazonClientException e) {
log.error("about(" + key + ", attrs)", e);
throw new IOException(e);
}
- return null;
- }
-
- private boolean isMD5Checksum(String eTag) {
- // if the etag is NOT an MD5 it end with -x where x is the number of part used in the multipart upload
- return StringUtils.contains(eTag, "-");
- }
-
- /**
- * Populates map values by checking key existence
- *
- * Adds technical metadata about an asset in the asset store, like:
- *
- *
size_bytes
- *
checksum
- *
checksum_algorithm
- *
modified
- *
- *
- * @param objectMetadata containing technical data
- * @param attrs map with keys populated
- * @return Map of enriched attrs with values
- */
- public Map about(ObjectMetadata objectMetadata, Map attrs) {
- if (objectMetadata != null) {
- this.putValueIfExistsKey(attrs, SIZE_BYTES, objectMetadata.getContentLength());
-
- // put CHECKSUM_ALGORITHM if exists CHECKSUM
- this.putValueIfExistsKey(attrs, CHECKSUM, objectMetadata.getETag());
- this.putEntryIfExistsKey(attrs, CHECKSUM, Map.entry(CHECKSUM_ALGORITHM, CSA));
-
- this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(objectMetadata.getLastModified().getTime()));
- }
- return attrs;
+ return metadata;
}
/**
@@ -572,13 +539,14 @@ public static void main(String[] args) throws Exception {
String accessKey = command.getOptionValue("a");
String secretKey = command.getOptionValue("s");
- String assetFile = command.getOptionValue("f");
S3BitStoreService store = new S3BitStoreService();
AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
- store.s3Service = new AmazonS3Client(awsCredentials);
+ store.s3Service = AmazonS3ClientBuilder.standard()
+ .withCredentials(new AWSStaticCredentialsProvider(awsCredentials))
+ .build();
//Todo configurable region
Region usEast1 = Region.getRegion(Regions.US_EAST_1);
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java
index 209ef5d16be..7f5ed8f9129 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java
@@ -102,7 +102,7 @@ public interface BitstreamStorageService {
public UUID register(Context context, Bitstream bitstream, int assetstore, String bitstreamPath)
throws SQLException, IOException, AuthorizeException;
- public Map computeChecksum(Context context, Bitstream bitstream) throws IOException;
+ public Map computeChecksum(Context context, Bitstream bitstream) throws IOException;
/**
* Does the internal_id column in the bitstream row indicate the bitstream
diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java
similarity index 87%
rename from dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java
rename to dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java
index f362e94dddc..7e14f82be28 100644
--- a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIntegrationTest.java
+++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java
@@ -13,6 +13,7 @@
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@@ -27,6 +28,8 @@
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.SQLException;
+import java.util.List;
+import java.util.Map;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.AnonymousAWSCredentials;
@@ -37,6 +40,7 @@
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.ObjectMetadata;
import io.findify.s3mock.S3Mock;
+import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.dspace.AbstractIntegrationTestWithDatabase;
@@ -60,7 +64,7 @@
/**
* @author Luca Giamminonni (luca.giamminonni at 4science.com)
*/
-public class S3BitStoreServiceIntegrationTest extends AbstractIntegrationTestWithDatabase {
+public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
private static final String DEFAULT_BUCKET_NAME = "dspace-asset-localhost";
@@ -122,7 +126,7 @@ public void testBitstreamPutAndGetWithAlreadyPresentBucket() throws IOException
s3BitStoreService.put(bitstream, toInputStream(content));
- String expectedChecksum = generateChecksum(content);
+ String expectedChecksum = Utils.toHex(generateChecksum(content));
assertThat(bitstream.getSizeBytes(), is((long) content.length()));
assertThat(bitstream.getChecksum(), is(expectedChecksum));
@@ -153,7 +157,7 @@ public void testBitstreamPutAndGetWithoutSpecifingBucket() throws IOException {
s3BitStoreService.put(bitstream, toInputStream(content));
- String expectedChecksum = generateChecksum(content);
+ String expectedChecksum = Utils.toHex(generateChecksum(content));
assertThat(bitstream.getSizeBytes(), is((long) content.length()));
assertThat(bitstream.getChecksum(), is(expectedChecksum));
@@ -214,6 +218,47 @@ public void testBitstreamDeletion() throws IOException {
}
+ @Test
+ public void testAbout() throws IOException {
+
+ s3BitStoreService.init();
+
+ context.turnOffAuthorisationSystem();
+ String content = "Test bitstream content";
+ Bitstream bitstream = createBitstream(content);
+ context.restoreAuthSystemState();
+
+ s3BitStoreService.put(bitstream, toInputStream(content));
+
+ Map about = s3BitStoreService.about(bitstream, List.of());
+ assertThat(about.size(), is(0));
+
+ about = s3BitStoreService.about(bitstream, List.of("size_bytes"));
+ assertThat(about, hasEntry("size_bytes", 22L));
+ assertThat(about.size(), is(1));
+
+ about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified"));
+ assertThat(about, hasEntry("size_bytes", 22L));
+ assertThat(about, hasEntry(is("modified"), notNullValue()));
+ assertThat(about.size(), is(2));
+
+ String expectedChecksum = Base64.encodeBase64String(generateChecksum(content));
+
+ about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum"));
+ assertThat(about, hasEntry("size_bytes", 22L));
+ assertThat(about, hasEntry(is("modified"), notNullValue()));
+ assertThat(about, hasEntry("checksum", expectedChecksum));
+ assertThat(about.size(), is(3));
+
+ about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum", "checksum_algorithm"));
+ assertThat(about, hasEntry("size_bytes", 22L));
+ assertThat(about, hasEntry(is("modified"), notNullValue()));
+ assertThat(about, hasEntry("checksum", expectedChecksum));
+ assertThat(about, hasEntry("checksum_algorithm", CSA));
+ assertThat(about.size(), is(4));
+
+ }
+
@Test
public void handleRegisteredIdentifierPrefixInS3() {
String trueBitStreamId = "012345";
@@ -338,11 +383,11 @@ public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRem
assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator)));
}
- private String generateChecksum(String content) {
+ private byte[] generateChecksum(String content) {
try {
MessageDigest m = MessageDigest.getInstance("MD5");
m.update(content.getBytes());
- return Utils.toHex(m.digest());
+ return m.digest();
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
From b9f5bf9f8972a74bcb8adfa3656109a37d3b2722 Mon Sep 17 00:00:00 2001
From: eskander
Date: Mon, 6 Feb 2023 14:38:46 +0200
Subject: [PATCH 023/693] [DSC-879] Identifiers Signature should check all item
types
---
dspace/config/spring/api/deduplication.xml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/dspace/config/spring/api/deduplication.xml b/dspace/config/spring/api/deduplication.xml
index bdf2747f47b..7dd4f1e7bd2 100644
--- a/dspace/config/spring/api/deduplication.xml
+++ b/dspace/config/spring/api/deduplication.xml
@@ -33,6 +33,7 @@
+
@@ -48,6 +49,7 @@
+
@@ -55,6 +57,7 @@
+
@@ -62,6 +65,7 @@
+
@@ -78,6 +82,7 @@
+
From 69d124603b256e2ddb0de93892a88f41c17d4e9d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Paulo=20Gra=C3=A7a?=
Date: Fri, 10 Feb 2023 15:35:54 +0000
Subject: [PATCH 024/693] Downgrade stylesheet version
Due to an warning message reported in: #8661
---
.../src/main/resources/org/dspace/license/LicenseCleanup.xsl | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl
index 84c62158fe7..d9a9745a1b1 100644
--- a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl
+++ b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl
@@ -8,7 +8,7 @@
http://www.dspace.org/license/
-->
-
-
\ No newline at end of file
+
From e4fcfdada6cc1f8e0950785d0feaab54b3869afe Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Paulo=20Gra=C3=A7a?=
Date: Fri, 10 Feb 2023 15:43:44 +0000
Subject: [PATCH 025/693] Downgrade stylesheet version
Due to an warning message reported in: #8661
---
.../src/main/resources/org/dspace/license/CreativeCommons.xsl | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl
index f32942a302a..d9f6cd36143 100644
--- a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl
+++ b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl
@@ -8,7 +8,7 @@
http://www.dspace.org/license/
-->
-
@@ -47,4 +47,4 @@
-
\ No newline at end of file
+
From 191d3700bb083129c7bd925be2a16df852f9049e Mon Sep 17 00:00:00 2001
From: Francesco Pio Scognamiglio
Date: Thu, 9 Feb 2023 14:45:42 +0100
Subject: [PATCH 026/693] [CST-6402] manage saf import via remote url
---
.../org/dspace/app/itemimport/ItemImport.java | 27 ++++++++++++++++---
.../ItemImportScriptConfiguration.java | 6 ++++-
2 files changed, 28 insertions(+), 5 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java
index 6870b94eee1..2b1089e3e01 100644
--- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java
+++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java
@@ -11,6 +11,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.net.URL;
import java.nio.file.Files;
import java.sql.SQLException;
import java.util.ArrayList;
@@ -74,6 +75,7 @@ public class ItemImport extends DSpaceRunnable {
protected boolean isQuiet = false;
protected boolean commandLineCollections = false;
protected boolean zip = false;
+ protected boolean remoteUrl = false;
protected String zipfilename = null;
protected boolean help = false;
protected File workDir = null;
@@ -253,6 +255,17 @@ public void internalRun() throws Exception {
* @param context
*/
protected void validate(Context context) {
+ // check zip type: uploaded file or remote url
+ if (commandLine.hasOption('z')) {
+ zipfilename = commandLine.getOptionValue('z');
+ } else if (commandLine.hasOption('u')) {
+ remoteUrl = true;
+ zipfilename = commandLine.getOptionValue('u');
+ }
+ if (StringUtils.isBlank(zipfilename)) {
+ throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file");
+ }
+
if (command == null) {
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
@@ -306,17 +319,24 @@ protected void process(Context context, ItemImportService itemImportService,
* @throws Exception
*/
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
- Optional optionalFileStream = handler.getFileStream(context, zipfilename);
+ Optional optionalFileStream = Optional.empty();
+ if (!remoteUrl) {
+ // manage zip via upload
+ optionalFileStream = handler.getFileStream(context, zipfilename);
+ } else {
+ // manage zip via remote url
+ optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
+ }
if (optionalFileStream.isPresent()) {
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
- workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
- sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} else {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
+ workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
+ sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
}
/**
@@ -356,7 +376,6 @@ protected void setMapFile() throws IOException {
*/
protected void setZip() {
zip = true;
- zipfilename = commandLine.getOptionValue('z');
}
/**
diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java
index a3149040c49..cfe97ad89bd 100644
--- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java
+++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java
@@ -64,7 +64,11 @@ public Options getOptions() {
options.addOption(Option.builder("z").longOpt("zip")
.desc("name of zip file")
.type(InputStream.class)
- .hasArg().required().build());
+ .hasArg().build());
+ options.addOption(Option.builder("u").longOpt("url")
+ .desc("url of zip file")
+ .type(InputStream.class)
+ .hasArg().build());
options.addOption(Option.builder("c").longOpt("collection")
.desc("destination collection(s) Handle or database ID")
.hasArg().required(false).build());
From 3aa3a945afc6070e89a250328c2d68e56bb267b1 Mon Sep 17 00:00:00 2001
From: Tim Donohue
Date: Fri, 17 Feb 2023 10:12:27 -0600
Subject: [PATCH 027/693] [maven-release-plugin] prepare for next development
iteration
---
dspace-api/pom.xml | 2 +-
dspace-iiif/pom.xml | 2 +-
dspace-oai/pom.xml | 2 +-
dspace-rdf/pom.xml | 2 +-
dspace-rest/pom.xml | 4 ++--
dspace-server-webapp/pom.xml | 2 +-
dspace-services/pom.xml | 2 +-
dspace-sword/pom.xml | 2 +-
dspace-swordv2/pom.xml | 2 +-
dspace/modules/additions/pom.xml | 2 +-
dspace/modules/pom.xml | 2 +-
dspace/modules/rest/pom.xml | 2 +-
dspace/modules/server/pom.xml | 2 +-
dspace/pom.xml | 2 +-
pom.xml | 32 ++++++++++++++++----------------
15 files changed, 31 insertions(+), 31 deletions(-)
diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml
index b02778f7bbc..d33387859fb 100644
--- a/dspace-api/pom.xml
+++ b/dspace-api/pom.xml
@@ -12,7 +12,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml
index b79e3954f93..7e26e22fa2e 100644
--- a/dspace-iiif/pom.xml
+++ b/dspace-iiif/pom.xml
@@ -15,7 +15,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml
index 0bd38a13904..27efba73d06 100644
--- a/dspace-oai/pom.xml
+++ b/dspace-oai/pom.xml
@@ -8,7 +8,7 @@
dspace-parentorg.dspace
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml
index 0a4aa46cbd3..95354621aa1 100644
--- a/dspace-rdf/pom.xml
+++ b/dspace-rdf/pom.xml
@@ -9,7 +9,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml
index df97a13ffc9..7fdf21ef4ce 100644
--- a/dspace-rest/pom.xml
+++ b/dspace-rest/pom.xml
@@ -3,7 +3,7 @@
org.dspacedspace-restwar
- 7.5
+ 7.6-SNAPSHOTDSpace (Deprecated) REST WebappDSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED.
Please consider using the REST API in the dspace-server-webapp instead!
@@ -12,7 +12,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml
index 251f36c026a..99aa88bebf0 100644
--- a/dspace-server-webapp/pom.xml
+++ b/dspace-server-webapp/pom.xml
@@ -15,7 +15,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml
index 362027d3da1..f3112b049bd 100644
--- a/dspace-services/pom.xml
+++ b/dspace-services/pom.xml
@@ -9,7 +9,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT
diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml
index 77c3fdb4c4a..f5ef7e01d82 100644
--- a/dspace-sword/pom.xml
+++ b/dspace-sword/pom.xml
@@ -15,7 +15,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml
index 4b20c40898a..35206d6ee29 100644
--- a/dspace-swordv2/pom.xml
+++ b/dspace-swordv2/pom.xml
@@ -13,7 +13,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml
index e71cb6e585a..8f5a6f84f49 100644
--- a/dspace/modules/additions/pom.xml
+++ b/dspace/modules/additions/pom.xml
@@ -17,7 +17,7 @@
org.dspacemodules
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace/modules/pom.xml b/dspace/modules/pom.xml
index 45e78a39d78..b60246ba6cc 100644
--- a/dspace/modules/pom.xml
+++ b/dspace/modules/pom.xml
@@ -11,7 +11,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT../../pom.xml
diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml
index b1a51b33be9..a7c9b5922c6 100644
--- a/dspace/modules/rest/pom.xml
+++ b/dspace/modules/rest/pom.xml
@@ -13,7 +13,7 @@
org.dspacemodules
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml
index ce301196793..9b696fa0cbd 100644
--- a/dspace/modules/server/pom.xml
+++ b/dspace/modules/server/pom.xml
@@ -13,7 +13,7 @@ just adding new jar in the classloader
modulesorg.dspace
- 7.5
+ 7.6-SNAPSHOT..
diff --git a/dspace/pom.xml b/dspace/pom.xml
index 3198433c8d7..7916648e478 100644
--- a/dspace/pom.xml
+++ b/dspace/pom.xml
@@ -16,7 +16,7 @@
org.dspacedspace-parent
- 7.5
+ 7.6-SNAPSHOT../pom.xml
diff --git a/pom.xml b/pom.xml
index 5fc99b9081b..00463e30079 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
org.dspacedspace-parentpom
- 7.5
+ 7.6-SNAPSHOTDSpace Parent Project
DSpace open source software is a turnkey institutional repository application.
@@ -872,14 +872,14 @@
org.dspacedspace-rest
- 7.5
+ 7.6-SNAPSHOTjarclassesorg.dspacedspace-rest
- 7.5
+ 7.6-SNAPSHOTwar
@@ -1030,69 +1030,69 @@
org.dspacedspace-api
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-apitest-jar
- 7.5
+ 7.6-SNAPSHOTtestorg.dspace.modulesadditions
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-sword
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-swordv2
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-oai
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-services
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-server-webapptest-jar
- 7.5
+ 7.6-SNAPSHOTtestorg.dspacedspace-rdf
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-iiif
- 7.5
+ 7.6-SNAPSHOTorg.dspacedspace-server-webapp
- 7.5
+ 7.6-SNAPSHOTjarclassesorg.dspacedspace-server-webapp
- 7.5
+ 7.6-SNAPSHOTwar
@@ -1932,7 +1932,7 @@
scm:git:git@github.com:DSpace/DSpace.gitscm:git:git@github.com:DSpace/DSpace.gitgit@github.com:DSpace/DSpace.git
- dspace-7.5
+ HEAD
From 358e2b5b4117cddd9bee1f237ddcf7b2048eba91 Mon Sep 17 00:00:00 2001
From: nwoodward
Date: Fri, 17 Feb 2023 10:21:34 -0600
Subject: [PATCH 028/693] restores member variables used in sharding functions
---
.../statistics/SolrLoggerServiceImpl.java | 20 +++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java
index a9525203854..773badc41c5 100644
--- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java
@@ -17,9 +17,12 @@
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
+import java.net.URI;
import java.net.URLEncoder;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.sql.SQLException;
import java.text.DateFormat;
import java.text.ParseException;
@@ -174,6 +177,23 @@ protected SolrLoggerServiceImpl() {
@Override
public void afterPropertiesSet() throws Exception {
+ statisticsCoreURL = configurationService.getProperty("solr-statistics.server");
+
+ if (null != statisticsCoreURL) {
+ Path statisticsPath = Paths.get(new URI(statisticsCoreURL).getPath());
+ statisticsCoreBase = statisticsPath
+ .getName(statisticsPath.getNameCount() - 1)
+ .toString();
+ } else {
+ log.warn("Unable to find solr-statistics.server parameter in DSpace configuration. This is required for " +
+ "sharding statistics.");
+ statisticsCoreBase = null;
+ }
+
+ log.info("solr-statistics.server: {}", statisticsCoreURL);
+ log.info("usage-statistics.dbfile: {}",
+ configurationService.getProperty("usage-statistics.dbfile"));
+
solr = solrStatisticsCore.getSolr();
// Read in the file so we don't have to do it all the time
From b0567aa22490d273fe8c607e1d29ca1c5e278cb5 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 22 Feb 2023 06:56:19 +0000
Subject: [PATCH 029/693] Bump commons-fileupload from 1.3.3 to 1.5
Bumps commons-fileupload from 1.3.3 to 1.5.
---
updated-dependencies:
- dependency-name: commons-fileupload:commons-fileupload
dependency-type: direct:production
...
Signed-off-by: dependabot[bot]
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 00463e30079..3d9e6851d1d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1484,7 +1484,7 @@
commons-fileuploadcommons-fileupload
- 1.3.3
+ 1.5commons-io
From 1acdc55104941d7fd12e787fee37bd54ba6bfdda Mon Sep 17 00:00:00 2001
From: "Mark H. Wood"
Date: Thu, 12 Jan 2023 16:44:20 -0500
Subject: [PATCH 030/693] Improve ResourcePolicy documentation.
---
.../org/dspace/authorize/ResourcePolicy.java | 7 ++
.../org/dspace/authorize/package-info.java | 74 +++++++++++++++++++
.../java/org/dspace/authorize/package.html | 68 -----------------
.../service/ResourcePolicyService.java | 28 +++++--
4 files changed, 104 insertions(+), 73 deletions(-)
create mode 100644 dspace-api/src/main/java/org/dspace/authorize/package-info.java
delete mode 100644 dspace-api/src/main/java/org/dspace/authorize/package.html
diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java
index 954bb969903..38b6aef45bc 100644
--- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java
+++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java
@@ -41,9 +41,16 @@
@Entity
@Table(name = "resourcepolicy")
public class ResourcePolicy implements ReloadableEntity {
+ /** This policy was set on submission, to give the submitter access. */
public static String TYPE_SUBMISSION = "TYPE_SUBMISSION";
+
+ /** This policy was set to allow access by a workflow group. */
public static String TYPE_WORKFLOW = "TYPE_WORKFLOW";
+
+ /** This policy was explicitly set on this object. */
public static String TYPE_CUSTOM = "TYPE_CUSTOM";
+
+ /** This policy was copied from the containing object's default policies. */
public static String TYPE_INHERITED = "TYPE_INHERITED";
@Id
diff --git a/dspace-api/src/main/java/org/dspace/authorize/package-info.java b/dspace-api/src/main/java/org/dspace/authorize/package-info.java
new file mode 100644
index 00000000000..df608abc184
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/authorize/package-info.java
@@ -0,0 +1,74 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+
+/**
+ * Represents permissions for access to DSpace content.
+ *
+ *
Philosophy
+ * DSpace's authorization system follows the classical "police state"
+ * philosophy of security - the user can do nothing, unless it is
+ * specifically allowed. Those permissions are spelled out with
+ * {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table
+ * in the database.
+ *
+ *
Policies are attached to Content
+ * Resource Policies get assigned to all of the content objects in
+ * DSpace - collections, communities, items, bundles, and bitstreams.
+ * (Currently they are not attached to non-content objects such as
+ * {@code EPerson} or {@code Group}. But they could be, hence the name
+ * {@code ResourcePolicy} instead of {@code ContentPolicy}.)
+ *
+ *
Policies are tuples
+ * Authorization is based on evaluating the tuple of (object, action, actor),
+ * such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson}
+ * "John Smith" can read an item. {@code ResourcePolicy} objects are pretty
+ * simple, describing a single instance of (object, action, actor). If multiple
+ * actors are desired, such as groups 10, 11, and 12 are allowed to READ Item
+ * 13, you simply create a {@code ResourcePolicy} for each group.
+ *
+ *
Built-in groups
+ * The install process should create two built-in groups - {@code Anonymous} for
+ * anonymous/public access, and {@code Administrators} for administrators.
+ * Group {@code Anonymous} allows anyone access, even if not authenticated.
+ * Group {@code Administrators}' members have super-user rights,
+ * and are allowed to do any action to any object.
+ *
+ *
Policy types
+ * Policies have a "type" used to distinguish policies which are applied for
+ * specific purposes.
+ *
+ *
CUSTOM
+ *
These are created and assigned explicitly by users.
+ *
INHERITED
+ *
These are copied from a containing object's default policies.
+ *
SUBMISSION
+ *
These are applied during submission to give the submitter access while
+ * composing a submission.
+ *
WORKFLOW
+ *
These are automatically applied during workflow, to give curators access
+ * to submissions in their curation queues. They usually have an
+ * automatically-created workflow group as the actor.
+ *
+ *
Start and End dates
+ * A policy may have a start date and/or an end date. The policy is considered
+ * not valid before the start date or after the end date. No date means do not
+ * apply the related test. For example, embargo until a given date can be
+ * expressed by a READ policy with a given start date, and a limited-time offer
+ * by a READ policy with a given end date.
+ *
+ *
Unused ResourcePolicy attributes
+ * {@code ResourcePolicy} has a few attributes that are currently unused,
+ * but are included with the intent that they will be used someday.
+ * One is the {@code EPerson} - policies could apply to only a single EPerson,
+ * but for ease of administration currently a Group is the recommended unit to
+ * use to describe the actor.
+ *
+ * @author dstuve
+ * @author mwood
+ */
+package org.dspace.authorize;
diff --git a/dspace-api/src/main/java/org/dspace/authorize/package.html b/dspace-api/src/main/java/org/dspace/authorize/package.html
deleted file mode 100644
index 66ce0f82477..00000000000
--- a/dspace-api/src/main/java/org/dspace/authorize/package.html
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
-
-
-
-
-
Handles permissions for DSpace content.
-
-
-
Philosophy
-DSpace's authorization system follows the classical "police state"
-philosophy of security - the user can do nothing, unless it is
-specifically allowed. Those permissions are spelled out with
-ResourcePolicy objects, stored in the resourcepolicy table in the
-database.
-
-
-
Policies are attached to Content
-
Policies are attached to Content
-Resource Policies get assigned to all of the content objects in
-DSpace - collections, communities, items, bundles, and bitstreams.
-(Currently they are not attached to non-content objects such as EPerson
-or Group. But they could be, hence the name ResourcePolicy instead of
-ContentPolicy.)
-
-
-
Policies are tuples
-Authorization is based on evaluating the tuple of (object, action, who),
-such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith"
-can read an item. ResourcePolicy objects are pretty simple, describing a single instance of
-(object, action, who). If multiple who's are desired, such as Groups 10, 11, and
-12 are allowed to READ Item 13, you simply create a ResourcePolicy for each
-group.
-
-
-
Special Groups
-The install process should create two special groups - group 0, for
-anonymous/public access, and group 1 for administrators.
-Group 0 (public/anonymous) allows anyone access, even if they are not
-authenticated. Group 1's (admin) members have super-user rights, and
-are allowed to do any action to any object.
-
-
-
Unused ResourcePolicy attributes
-ResourcePolicies have a few attributes that are currently unused,
-but are included with the intent that they will be used someday.
-One is start and end dates, for when policies will be active, so that
-permissions for content can change over time. The other is the EPerson -
-policies could apply to only a single EPerson, but for ease of
-administration currently a Group is the recommended unit to use to
-describe 'who'.
-
-
-
-
diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java
index f1d8b30242a..726078d7438 100644
--- a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java
+++ b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java
@@ -53,12 +53,19 @@ public List find(Context c, EPerson e, List groups, int a
throws SQLException;
/**
- * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
- * This method can be used to detect duplicate ResourcePolicies.
+ * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring
+ * IDs with a specific PolicyID. This method can be used to detect duplicate
+ * ResourcePolicies.
*
- * @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
- * @return List of resource policies for the same DSpaceObject, group and action but other policyID.
- * @throws SQLException
+ * @param context current DSpace session.
+ * @param dso find policies for this object.
+ * @param group find policies referring to this group.
+ * @param action find policies for this action.
+ * @param notPolicyID ResourcePolicies with this ID will be ignored while
+ * looking out for equal ResourcePolicies.
+ * @return List of resource policies for the same DSpaceObject, group and
+ * action but other policyID.
+ * @throws SQLException passed through.
*/
public List findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group,
int action, int notPolicyID)
@@ -68,6 +75,16 @@ public List findByTypeGroupActionExceptId(Context context, DSpac
public boolean isDateValid(ResourcePolicy resourcePolicy);
+ /**
+ * Create and persist a copy of a given ResourcePolicy, with an empty
+ * dSpaceObject field.
+ *
+ * @param context current DSpace session.
+ * @param resourcePolicy the policy to be copied.
+ * @return the copy.
+ * @throws SQLException passed through.
+ * @throws AuthorizeException passed through.
+ */
public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException;
public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException;
@@ -117,6 +134,7 @@ public List findExceptRpType(Context c, DSpaceObject o, int acti
* @param ePerson ePerson whose policies want to find
* @param offset the position of the first result to return
* @param limit paging limit
+ * @return some of the policies referring to {@code ePerson}.
* @throws SQLException if database error
*/
public List findByEPerson(Context context, EPerson ePerson, int offset, int limit)
From 47fab88c17d8ac821f99c0d597cc6e8d3c153a8a Mon Sep 17 00:00:00 2001
From: Kristof De Langhe
Date: Fri, 24 Feb 2023 17:30:28 +0100
Subject: [PATCH 031/693] 89779:
VersionedHandleIdentifierProviderWithCanonicalHandles fix pt1
---
.../spring/spring-dspace-core-services.xml | 15 ---
...VersionedHandleIdentifierProviderTest.java | 100 ++++++++++++++++++
.../config/spring/api/identifier-service.xml | 2 -
3 files changed, 100 insertions(+), 17 deletions(-)
create mode 100644 dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java
diff --git a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml
index 87bfcbc86c9..3ce641d99c3 100644
--- a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml
+++ b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml
@@ -13,15 +13,6 @@
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
-
-
-
-
@@ -31,12 +22,6 @@
-
-
-
-
diff --git a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java
new file mode 100644
index 00000000000..9db55bb3123
--- /dev/null
+++ b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderTest.java
@@ -0,0 +1,100 @@
+package org.dspace.identifier;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.dspace.AbstractIntegrationTestWithDatabase;
+import org.dspace.authorize.AuthorizeException;
+import org.dspace.builder.CollectionBuilder;
+import org.dspace.builder.CommunityBuilder;
+import org.dspace.builder.ItemBuilder;
+import org.dspace.builder.VersionBuilder;
+import org.dspace.content.Collection;
+import org.dspace.content.Item;
+import org.dspace.identifier.service.IdentifierService;
+import org.dspace.kernel.ServiceManager;
+import org.dspace.services.ConfigurationService;
+import org.dspace.services.factory.DSpaceServicesFactory;
+import org.dspace.utils.DSpace;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class VersionedHandleIdentifierProviderTest extends AbstractIntegrationTestWithDatabase {
+ private ServiceManager serviceManager;
+
+ private String handlePrefix;
+
+ private Collection collection;
+ private Item itemV1;
+ private Item itemV2;
+ private Item itemV3;
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ context.turnOffAuthorisationSystem();
+
+ ConfigurationService configurationService = new DSpace().getConfigurationService();
+ handlePrefix = configurationService.getProperty("handle.prefix");
+
+ serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
+
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .withName("Parent Community")
+ .build();
+ collection = CollectionBuilder.createCollection(context, parentCommunity)
+ .withName("Collection")
+ .build();
+ }
+
+ private void registerProvider(Class type) {
+ // Register our new provider
+ serviceManager.registerServiceClass(type.getName(), type);
+ IdentifierProvider identifierProvider =
+ (IdentifierProvider) serviceManager.getServiceByName(type.getName(), type);
+
+ // Overwrite the identifier-service's providers with the new one to ensure only this provider is used
+ IdentifierServiceImpl identifierService = serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0);
+ identifierService.setProviders(List.of(identifierProvider));
+ }
+
+ private void createVersions() throws SQLException, AuthorizeException {
+ itemV1 = ItemBuilder.createItem(context, collection)
+ .withTitle("First version")
+ .build();
+ itemV2 = VersionBuilder.createVersion(context, itemV1, "Second version").build().getItem();
+ itemV3 = VersionBuilder.createVersion(context, itemV1, "Third version").build().getItem();
+ }
+
+ @Test
+ public void testDefaultVersionedHandleProvider() throws Exception {
+ registerProvider(VersionedHandleIdentifierProvider.class);
+ createVersions();
+
+ assertEquals(handlePrefix + "/1", itemV1.getHandle());
+ assertEquals(handlePrefix + "/1.2", itemV2.getHandle());
+ assertEquals(handlePrefix + "/1.3", itemV3.getHandle());
+ }
+
+ @Test
+ public void testCanonicalVersionedHandleProvider() throws Exception {
+ registerProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class);
+ createVersions();
+
+ assertEquals(handlePrefix + "/1.3", itemV1.getHandle());
+ assertEquals(handlePrefix + "/1.2", itemV2.getHandle());
+ assertEquals(handlePrefix + "/1", itemV3.getHandle());
+ }
+
+ @After
+ @Override
+ public void destroy() throws Exception {
+ super.destroy();
+ // serviceManager.getApplicationContext().refresh();
+ }
+}
diff --git a/dspace/config/spring/api/identifier-service.xml b/dspace/config/spring/api/identifier-service.xml
index e9f08003bd6..dd5716a62e9 100644
--- a/dspace/config/spring/api/identifier-service.xml
+++ b/dspace/config/spring/api/identifier-service.xml
@@ -17,11 +17,9 @@
The VersionedHandleIdentifierProvider creates a new versioned
handle for every new version.
-->
-
@
-
- org.dspace.app.rest.Application
-
- org.apache.maven.plugins
- maven-war-plugin
-
- true
-
- true
-
-
-
- prepare-package
-
-
-
-
- org.apache.maven.plugins
- maven-jar-plugin
-
-
-
-
- test-jar
-
-
-
-
-
-
- com.mycila
- license-maven-plugin
-
-
- **/src/test/resources/**
- **/src/test/data/**
-
- src/main/webapp/index.html
- src/main/webapp/login.html
- src/main/webapp/styles.css
- src/main/webapp/js/hal/**
- src/main/webapp/js/vendor/**
-
-
-
+
+ org.dspace
+ dspace-parent
+ cris-2022.03.01-SNAPSHOT
+ ..
+
+
+
+
+ ${basedir}/..
+
+ @
+
+
+
+
+
+ org.dspace.modules
+ additions
+
+
+ org.dspace
+ dspace-server-webapp
+
+
+ org.apache.solr
+ solr-solrj
+
+
+
+
+ org.dspace
+ dspace-api
+ test-jar
+ test
+
+
+ org.dspace
+ dspace-server-webapp
+ test-jar
+ test
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.security
+ spring-security-test
+ ${spring-security.version}
+ test
+
+
+ com.jayway.jsonpath
+ json-path-assert
+ ${json-path.version}
+ test
+
+
+ junit
+ junit
+ test
+
+
+ com.h2database
+ h2
+ test
+
+
+ org.mockito
+ mockito-inline
+ test
+
+
+
+
+ org.apache.solr
+ solr-core
+ ${solr.client.version}
+ test
+
+
+
+ org.apache.commons
+ commons-text
+
+
+
+
+ org.apache.lucene
+ lucene-analyzers-icu
+ test
+
+
+
+
+
+
+
+
+ com.mycila
+ license-maven-plugin
+
+
+ **/src/test/resources/**
+ **/src/test/data/**
+
+ src/main/resources/static/index.html
+ src/main/resources/static/login.html
+ src/main/resources/static/styles.css
+ src/main/resources/static/js/hal/**
+ src/main/resources/static/js/vendor/**
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+
+
+
+
+
diff --git a/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java b/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java
new file mode 100644
index 00000000000..90039887f86
--- /dev/null
+++ b/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java
@@ -0,0 +1,45 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.app;
+
+import org.dspace.app.rest.WebApplication;
+import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
+import org.dspace.app.rest.utils.DSpaceKernelInitializer;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.builder.SpringApplicationBuilder;
+import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
+
+@SpringBootApplication(scanBasePackageClasses = WebApplication.class)
+public class Application extends SpringBootServletInitializer {
+
+ public static void main(String[] args) {
+ new SpringApplicationBuilder(Application.class)
+ .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer())
+ .run(args);
+ }
+
+ /**
+ * Override the default SpringBootServletInitializer.configure() method,
+ * passing it this Application class.
+ *
+ * This is necessary to allow us to build a deployable WAR, rather than
+ * always relying on embedded Tomcat.
+ *
+ * See: http://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#howto-create-a-deployable-war-file
+ *
+ * @param application
+ * @return
+ */
+ @Override
+ protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
+ // Pass this Application class, and our initializers for DSpace Kernel and Configuration
+ // NOTE: Kernel must be initialized before Configuration
+ return application.sources(Application.class)
+ .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer());
+ }
+}
diff --git a/dspace-server-webapp/src/main/resources/application.properties b/dspace-webapp-boot/src/main/resources/application.properties
similarity index 99%
rename from dspace-server-webapp/src/main/resources/application.properties
rename to dspace-webapp-boot/src/main/resources/application.properties
index a10e0f98a00..8dd8e2903cc 100644
--- a/dspace-server-webapp/src/main/resources/application.properties
+++ b/dspace-webapp-boot/src/main/resources/application.properties
@@ -27,6 +27,7 @@
# http://docs.spring.io/spring-boot/docs/current/reference/html/common-application-properties.html
#
+server.servlet.context-path=/server
########################
# DSpace Settings
#
diff --git a/dspace-server-webapp/src/main/webapp/index.html b/dspace-webapp-boot/src/main/resources/static/index.html
similarity index 100%
rename from dspace-server-webapp/src/main/webapp/index.html
rename to dspace-webapp-boot/src/main/resources/static/index.html
diff --git a/dspace-server-webapp/src/main/webapp/js/hal/http/client.js b/dspace-webapp-boot/src/main/resources/static/js/hal/http/client.js
similarity index 100%
rename from dspace-server-webapp/src/main/webapp/js/hal/http/client.js
rename to dspace-webapp-boot/src/main/resources/static/js/hal/http/client.js
diff --git a/dspace-server-webapp/src/main/webapp/js/vendor/CustomPostForm.js b/dspace-webapp-boot/src/main/resources/static/js/vendor/CustomPostForm.js
similarity index 100%
rename from dspace-server-webapp/src/main/webapp/js/vendor/CustomPostForm.js
rename to dspace-webapp-boot/src/main/resources/static/js/vendor/CustomPostForm.js
diff --git a/dspace-server-webapp/src/main/webapp/login.html b/dspace-webapp-boot/src/main/resources/static/login.html
similarity index 100%
rename from dspace-server-webapp/src/main/webapp/login.html
rename to dspace-webapp-boot/src/main/resources/static/login.html
diff --git a/dspace-server-webapp/src/main/webapp/styles.css b/dspace-webapp-boot/src/main/resources/static/styles.css
similarity index 100%
rename from dspace-server-webapp/src/main/webapp/styles.css
rename to dspace-webapp-boot/src/main/resources/static/styles.css
diff --git a/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java b/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java
similarity index 100%
rename from dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java
rename to dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java
diff --git a/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java b/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java
similarity index 100%
rename from dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java
rename to dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java
diff --git a/dspace/config/log4j2-console.xml b/dspace/config/log4j2-console.xml
index 3d51b123367..a0322abf19d 100644
--- a/dspace/config/log4j2-console.xml
+++ b/dspace/config/log4j2-console.xml
@@ -25,7 +25,7 @@
For command line / Ant scripts, we are only concerned about significant warnings/errors.
For the full detail, change this to INFO and re-run Ant. -->
-
+
diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml
deleted file mode 100644
index f890d39e699..00000000000
--- a/dspace/modules/server/pom.xml
+++ /dev/null
@@ -1,355 +0,0 @@
-
- 4.0.0
- org.dspace.modules
- server
- war
- DSpace Server Webapp:: Local Customizations
- Overlay customizations.
-This is probably a temporary solution to the build problems. We like to investigate about
-the possibility to remove the overlays enable a more flexible extension mechanism.
-The use of web-fragment and spring mvc technology allow us to add request handlers
-just adding new jar in the classloader
-
-
- modules
- org.dspace
- cris-2022.03.01-SNAPSHOT
- ..
-
-
-
-
- ${basedir}/../../..
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-dependency-plugin
-
-
- unpack
- prepare-package
-
- unpack-dependencies
-
-
- org.dspace.modules
- additions
-
- ${project.build.directory}/additions
- META-INF/**
-
-
-
-
-
- org.apache.maven.plugins
- maven-war-plugin
-
- false
-
- true
-
-
-
- ${project.build.directory}/additions
- WEB-INF/classes
-
-
-
-
-
- prepare-package
-
-
-
-
-
- org.codehaus.gmaven
- groovy-maven-plugin
-
-
- setproperty
- initialize
-
- execute
-
-
-
-
-
-
-
-
-
-
-
-
-
- unit-test-environment
-
- false
-
- skipUnitTests
- false
-
-
-
-
-
-
- maven-dependency-plugin
-
- ${project.build.directory}/testing
-
-
- org.dspace
- dspace-parent
- ${project.version}
- zip
- testEnvironment
-
-
-
-
-
- setupUnitTestEnvironment
- generate-test-resources
-
- unpack
-
-
-
-
-
-
-
- maven-surefire-plugin
-
-
-
-
-
- ${agnostic.build.dir}/testing/dspace
-
- true
- ${agnostic.build.dir}/testing/dspace/solr/
-
-
-
-
-
-
-
-
- org.dspace
- dspace-server-webapp
- test-jar
- test
-
-
-
-
-
-
- integration-test-environment
-
- false
-
- skipIntegrationTests
- false
-
-
-
-
-
-
- maven-dependency-plugin
-
- ${project.build.directory}/testing
-
-
- org.dspace
- dspace-parent
- ${project.version}
- zip
- testEnvironment
-
-
-
-
-
- setupIntegrationTestEnvironment
- pre-integration-test
-
- unpack
-
-
-
-
-
-
-
- maven-failsafe-plugin
-
-
-
-
- ${agnostic.build.dir}/testing/dspace
-
- true
- ${agnostic.build.dir}/testing/dspace/solr/
-
-
-
-
-
-
-
-
- org.dspace
- dspace-server-webapp
- test-jar
- test
-
-
-
-
-
- oracle-support
-
-
- db.name
- oracle
-
-
-
-
- com.oracle
- ojdbc6
-
-
-
-
-
-
-
-
- org.dspace.modules
- additions
-
-
- org.dspace
- dspace-server-webapp
- classes
-
-
- org.dspace
- dspace-server-webapp
- war
-
-
- org.apache.solr
- solr-solrj
- ${solr.client.version}
-
-
-
-
- org.dspace
- dspace-api
- test-jar
- test
-
-
- org.dspace
- dspace-server-webapp
- test-jar
- test
-
-
- org.springframework.boot
- spring-boot-starter-test
- test
-
-
- org.springframework.security
- spring-security-test
- ${spring-security.version}
- test
-
-
- com.jayway.jsonpath
- json-path-assert
- ${json-path.version}
- test
-
-
- junit
- junit
- test
-
-
- com.h2database
- h2
- test
-
-
- org.mockito
- mockito-inline
- test
-
-
-
-
- org.apache.solr
- solr-core
- ${solr.client.version}
- test
-
-
-
- org.apache.commons
- commons-text
-
-
-
-
- org.apache.lucene
- lucene-analyzers-icu
- test
-
-
-
-
-
diff --git a/dspace/modules/server/src/main/webapp/.gitignore b/dspace/modules/server/src/main/webapp/.gitignore
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/pom.xml b/pom.xml
index d30aa94e2a2..6a0ed9272cb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -798,6 +798,21 @@
+
+
+ dspace-webapp-boot
+
+
+ dspace-webapp-boot/pom.xml
+
+
+
+ dspace-webapp-boot
+
+
+
@@ -1133,32 +1148,24 @@
org.dspacedspace-server-webapp
- test-jar
- cris-2022.03.01-SNAPSHOT
- test
-
-
- org.dspace
- dspace-rdfcris-2022.03.01-SNAPSHOTorg.dspace
- dspace-iiif
+ dspace-server-webapp
+ test-jarcris-2022.03.01-SNAPSHOT
+ testorg.dspace
- dspace-server-webapp
+ dspace-rdfcris-2022.03.01-SNAPSHOT
- jar
- classesorg.dspace
- dspace-server-webapp
+ dspace-iiifcris-2022.03.01-SNAPSHOT
- war
From e9ed6d2d6327f1ce8aab8df6f498a462b37e6f56 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 15:32:59 +0100
Subject: [PATCH 043/693] [DSC-963] Fixed test configuration
---
.../src/test/resources/application.properties | 64 +++++++++++++++++++
.../src/main/resources/application.properties | 7 +-
2 files changed, 70 insertions(+), 1 deletion(-)
create mode 100644 dspace-server-webapp/src/test/resources/application.properties
diff --git a/dspace-server-webapp/src/test/resources/application.properties b/dspace-server-webapp/src/test/resources/application.properties
new file mode 100644
index 00000000000..9b408d9612d
--- /dev/null
+++ b/dspace-server-webapp/src/test/resources/application.properties
@@ -0,0 +1,64 @@
+#
+# The contents of this file are subject to the license and copyright
+# detailed in the LICENSE and NOTICE files at the root of the source
+# tree and available online at
+#
+# http://www.dspace.org/license/
+#
+
+# Spring Boot's Test application.properties
+
+########################
+# Jackson serialization settings
+#
+spring.jackson.serialization.fail-on-empty-beans=false
+
+########################
+# Internationalization
+#
+# Base Path for our messages file (i18n)
+spring.messages.basename=i18n/messages
+spring.messages.encoding=UTF-8
+
+########################
+# URI Encoding and Decoding
+#
+#
+# Charset of HTTP requests and responses. Added to the "Content-Type" header if not set explicitly.
+server.servlet.encoding.charset=UTF-8
+# Force the encoding to the configured charset on HTTP requests and responses.
+server.servlet.encoding.force=true
+
+###########################
+# Server Properties
+#
+# Error handling settings
+server.error.include-stacktrace = never
+
+# When to include the error message in error responses (introduced in Spring 2.3.x)
+server.error.include-message = always
+
+# Spring Boot proxy configuration (can be overridden in local.cfg).
+server.forward-headers-strategy=FRAMEWORK
+
+######################
+# Cache Properties
+# Added for IIIF cache support.
+# Path to configuration file.
+spring.cache.jcache.config=classpath:iiif/cache/ehcache.xml
+
+######################
+# Spring Boot Autoconfigure
+#
+# TODO: At some point we may want to investigate whether we can re-enable these and remove the custom DSpace init code
+spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration, \
+ org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration, \
+ org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration, \
+ org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration, \
+ org.springframework.boot.autoconfigure.velocity.VelocityAutoConfiguration
+
+spring.main.allow-bean-definition-overriding = true
+
+#########################
+# Spring Boot Logging levels
+logging.config = classpath:log4j2-test.xml
diff --git a/dspace-webapp-boot/src/main/resources/application.properties b/dspace-webapp-boot/src/main/resources/application.properties
index 8dd8e2903cc..0c26d530b74 100644
--- a/dspace-webapp-boot/src/main/resources/application.properties
+++ b/dspace-webapp-boot/src/main/resources/application.properties
@@ -27,7 +27,6 @@
# http://docs.spring.io/spring-boot/docs/current/reference/html/common-application-properties.html
#
-server.servlet.context-path=/server
########################
# DSpace Settings
#
@@ -38,6 +37,12 @@ server.servlet.context-path=/server
# NOTE: this configuration is filled out by Apache Ant during the DSpace install/update process. It does NOT
# interact with or read its configuration from dspace.cfg.
dspace.dir=${dspace.dir}
+
+########################
+# Servlet context path configuration for spring boot application running with embedded tomcat
+#
+server.servlet.context-path=/server
+
########################
# Jackson serialization settings
#
From 98ef4e560f9625df842cd4a5d442c9b852eff503 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 15:37:57 +0100
Subject: [PATCH 044/693] [DSC-963] Fixed dspace pom
---
dspace/pom.xml | 6 +++++-
pom.xml | 5 +++++
2 files changed, 10 insertions(+), 1 deletion(-)
diff --git a/dspace/pom.xml b/dspace/pom.xml
index 47ef9c46475..e3e05f0c096 100644
--- a/dspace/pom.xml
+++ b/dspace/pom.xml
@@ -217,7 +217,11 @@
org.dspacedspace-server-webapp
- war
+ compile
+
+
+ org.dspace
+ dspace-webapp-bootcompile
diff --git a/pom.xml b/pom.xml
index 6a0ed9272cb..fd6542421c7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1157,6 +1157,11 @@
cris-2022.03.01-SNAPSHOTtest
+
+ org.dspace
+ dspace-webapp-boot
+ cris-2022.03.01-SNAPSHOT
+ org.dspacedspace-rdf
From baeab16708a01f0c6a2d32886040292288174190 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 16:14:50 +0100
Subject: [PATCH 045/693] [DSC-963] Improved tests configuration
---
.../src/main/resources/application.properties | 0
.../src/test/resources/application.properties | 64 -------------------
2 files changed, 64 deletions(-)
rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/application.properties (100%)
delete mode 100644 dspace-server-webapp/src/test/resources/application.properties
diff --git a/dspace-webapp-boot/src/main/resources/application.properties b/dspace-server-webapp/src/main/resources/application.properties
similarity index 100%
rename from dspace-webapp-boot/src/main/resources/application.properties
rename to dspace-server-webapp/src/main/resources/application.properties
diff --git a/dspace-server-webapp/src/test/resources/application.properties b/dspace-server-webapp/src/test/resources/application.properties
deleted file mode 100644
index 9b408d9612d..00000000000
--- a/dspace-server-webapp/src/test/resources/application.properties
+++ /dev/null
@@ -1,64 +0,0 @@
-#
-# The contents of this file are subject to the license and copyright
-# detailed in the LICENSE and NOTICE files at the root of the source
-# tree and available online at
-#
-# http://www.dspace.org/license/
-#
-
-# Spring Boot's Test application.properties
-
-########################
-# Jackson serialization settings
-#
-spring.jackson.serialization.fail-on-empty-beans=false
-
-########################
-# Internationalization
-#
-# Base Path for our messages file (i18n)
-spring.messages.basename=i18n/messages
-spring.messages.encoding=UTF-8
-
-########################
-# URI Encoding and Decoding
-#
-#
-# Charset of HTTP requests and responses. Added to the "Content-Type" header if not set explicitly.
-server.servlet.encoding.charset=UTF-8
-# Force the encoding to the configured charset on HTTP requests and responses.
-server.servlet.encoding.force=true
-
-###########################
-# Server Properties
-#
-# Error handling settings
-server.error.include-stacktrace = never
-
-# When to include the error message in error responses (introduced in Spring 2.3.x)
-server.error.include-message = always
-
-# Spring Boot proxy configuration (can be overridden in local.cfg).
-server.forward-headers-strategy=FRAMEWORK
-
-######################
-# Cache Properties
-# Added for IIIF cache support.
-# Path to configuration file.
-spring.cache.jcache.config=classpath:iiif/cache/ehcache.xml
-
-######################
-# Spring Boot Autoconfigure
-#
-# TODO: At some point we may want to investigate whether we can re-enable these and remove the custom DSpace init code
-spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration, \
- org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration, \
- org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration, \
- org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration, \
- org.springframework.boot.autoconfigure.velocity.VelocityAutoConfiguration
-
-spring.main.allow-bean-definition-overriding = true
-
-#########################
-# Spring Boot Logging levels
-logging.config = classpath:log4j2-test.xml
From 05b6251469d5873a7bda8edc27cf81b17d22e2de Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 16:42:00 +0100
Subject: [PATCH 046/693] [DSC-963] Fixed dspace-server-webapp pom
---
dspace-server-webapp/pom.xml | 19 +++++++++++++++++++
1 file changed, 19 insertions(+)
diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml
index e6423a7bc8b..fa607629e7d 100644
--- a/dspace-server-webapp/pom.xml
+++ b/dspace-server-webapp/pom.xml
@@ -57,6 +57,25 @@
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ true
+ true
+
+
+
+
+
+
+ test-jar
+
+
+
+
From ab2ff11216eb38598e9fbe32ada6a496e2633f8f Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 17:39:33 +0100
Subject: [PATCH 047/693] [DSC-963] Fixed Sword tests
---
.../src/test/java/org/dspace/app/rdf/RdfIT.java | 2 +-
.../src/test/java/org/dspace/app/sword/Swordv1IT.java | 2 +-
.../src/test/java/org/dspace/app/sword2/Swordv2IT.java | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java
index 85ab3dcadd7..10f06370ad5 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java
@@ -47,7 +47,7 @@
*/
// Ensure the RDF endpoint IS ENABLED before any tests run.
// This annotation overrides default DSpace config settings loaded into Spring Context
-@TestPropertySource(properties = {"rdf.enabled = true"})
+@TestPropertySource(properties = {"rdf.enabled = true", "server.servlet.context-path = /"})
public class RdfIT extends AbstractWebClientIntegrationTest {
@Autowired
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java
index 24244e1773e..ffef89316b9 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java
@@ -34,7 +34,7 @@
*/
// Ensure the SWORD SERVER IS ENABLED before any tests run.
// This annotation overrides default DSpace config settings loaded into Spring Context
-@TestPropertySource(properties = {"sword-server.enabled = true"})
+@TestPropertySource(properties = { "sword-server.enabled = true", "server.servlet.context-path = /" })
public class Swordv1IT extends AbstractWebClientIntegrationTest {
@Autowired
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java
index 95ec7625141..f9caeead664 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java
@@ -34,7 +34,7 @@
*/
// Ensure the SWORDv2 SERVER IS ENABLED before any tests run.
// This annotation overrides default DSpace config settings loaded into Spring Context
-@TestPropertySource(properties = {"swordv2-server.enabled = true"})
+@TestPropertySource(properties = {"swordv2-server.enabled = true", "server.servlet.context-path = /"})
public class Swordv2IT extends AbstractWebClientIntegrationTest {
@Autowired
From 819bf788081dbc69c82ce050ba64aeb3a964415e Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 17:50:08 +0100
Subject: [PATCH 048/693] [DSC-963] Fixed dspace pom
---
dspace/pom.xml | 1 +
pom.xml | 13 +++++++------
2 files changed, 8 insertions(+), 6 deletions(-)
diff --git a/dspace/pom.xml b/dspace/pom.xml
index e3e05f0c096..0dba032e688 100644
--- a/dspace/pom.xml
+++ b/dspace/pom.xml
@@ -222,6 +222,7 @@
org.dspacedspace-webapp-boot
+ warcompile
diff --git a/pom.xml b/pom.xml
index fd6542421c7..e17dfbf384c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1148,29 +1148,30 @@
org.dspacedspace-server-webapp
+ test-jarcris-2022.03.01-SNAPSHOT
+ testorg.dspace
- dspace-server-webapp
- test-jar
+ dspace-rdfcris-2022.03.01-SNAPSHOT
- testorg.dspace
- dspace-webapp-boot
+ dspace-iiifcris-2022.03.01-SNAPSHOTorg.dspace
- dspace-rdf
+ dspace-server-webappcris-2022.03.01-SNAPSHOTorg.dspace
- dspace-iiif
+ dspace-webapp-bootcris-2022.03.01-SNAPSHOT
+ war
From 382105dfdae418eb6a238f1bfa9d968c82a58727 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 19:32:46 +0100
Subject: [PATCH 049/693] [DSC-963] Set default servlet context path on
application-test.properties
---
.../src/test/java/org/dspace/app/rdf/RdfIT.java | 2 +-
.../src/test/java/org/dspace/app/sword/Swordv1IT.java | 2 +-
.../src/test/java/org/dspace/app/sword2/Swordv2IT.java | 2 +-
.../src/test/resources/application-test.properties | 4 +++-
4 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java
index 10f06370ad5..85ab3dcadd7 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rdf/RdfIT.java
@@ -47,7 +47,7 @@
*/
// Ensure the RDF endpoint IS ENABLED before any tests run.
// This annotation overrides default DSpace config settings loaded into Spring Context
-@TestPropertySource(properties = {"rdf.enabled = true", "server.servlet.context-path = /"})
+@TestPropertySource(properties = {"rdf.enabled = true"})
public class RdfIT extends AbstractWebClientIntegrationTest {
@Autowired
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java
index ffef89316b9..24244e1773e 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword/Swordv1IT.java
@@ -34,7 +34,7 @@
*/
// Ensure the SWORD SERVER IS ENABLED before any tests run.
// This annotation overrides default DSpace config settings loaded into Spring Context
-@TestPropertySource(properties = { "sword-server.enabled = true", "server.servlet.context-path = /" })
+@TestPropertySource(properties = {"sword-server.enabled = true"})
public class Swordv1IT extends AbstractWebClientIntegrationTest {
@Autowired
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java
index f9caeead664..95ec7625141 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/sword2/Swordv2IT.java
@@ -34,7 +34,7 @@
*/
// Ensure the SWORDv2 SERVER IS ENABLED before any tests run.
// This annotation overrides default DSpace config settings loaded into Spring Context
-@TestPropertySource(properties = {"swordv2-server.enabled = true", "server.servlet.context-path = /"})
+@TestPropertySource(properties = {"swordv2-server.enabled = true"})
public class Swordv2IT extends AbstractWebClientIntegrationTest {
@Autowired
diff --git a/dspace-server-webapp/src/test/resources/application-test.properties b/dspace-server-webapp/src/test/resources/application-test.properties
index 9a396cf8e5b..e92e1166e35 100644
--- a/dspace-server-webapp/src/test/resources/application-test.properties
+++ b/dspace-server-webapp/src/test/resources/application-test.properties
@@ -14,4 +14,6 @@
## Log4j2 configuration for test environment
## This file is found on classpath at src/test/resources/log4j2-test.xml
-logging.config = classpath:log4j2-test.xml
\ No newline at end of file
+logging.config = classpath:log4j2-test.xml
+
+server.servlet.context-path=/
\ No newline at end of file
From 7524053a5c10071b99edf453a112ecc30f06779f Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 10 Mar 2023 20:41:07 +0100
Subject: [PATCH 050/693] [DSC-963] Improved TestApplication configuration
---
.../test/java/org/dspace/app/{ => rest}/TestApplication.java | 5 ++---
.../app/rest/test/AbstractControllerIntegrationTest.java | 2 +-
.../app/rest/test/AbstractWebClientIntegrationTest.java | 2 +-
3 files changed, 4 insertions(+), 5 deletions(-)
rename dspace-server-webapp/src/test/java/org/dspace/app/{ => rest}/TestApplication.java (70%)
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/TestApplication.java
similarity index 70%
rename from dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java
rename to dspace-server-webapp/src/test/java/org/dspace/app/rest/TestApplication.java
index 0f80e866edd..e387e3f0024 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/TestApplication.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/TestApplication.java
@@ -5,12 +5,11 @@
*
* http://www.dspace.org/license/
*/
-package org.dspace.app;
+package org.dspace.app.rest;
-import org.dspace.app.rest.WebApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
-@SpringBootApplication(scanBasePackageClasses = WebApplication.class)
+@SpringBootApplication
public class TestApplication {
}
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java
index 4ec66fb0008..a27e0ab75c8 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractControllerIntegrationTest.java
@@ -23,7 +23,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.dspace.AbstractIntegrationTestWithDatabase;
-import org.dspace.app.TestApplication;
+import org.dspace.app.rest.TestApplication;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
import org.dspace.app.rest.utils.DSpaceKernelInitializer;
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java
index be0a27b4ebd..7f58a9999dd 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/AbstractWebClientIntegrationTest.java
@@ -9,7 +9,7 @@
import org.apache.commons.lang3.StringUtils;
import org.dspace.AbstractIntegrationTestWithDatabase;
-import org.dspace.app.TestApplication;
+import org.dspace.app.rest.TestApplication;
import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
import org.dspace.app.rest.utils.DSpaceKernelInitializer;
import org.junit.runner.RunWith;
From 4748163eafd04e728f5201f31b29c9c0b8d8b9e8 Mon Sep 17 00:00:00 2001
From: Agustina Martinez
Date: Sat, 11 Mar 2023 15:49:02 +0000
Subject: [PATCH 051/693] Fix #8714 and #8715
---
.../org/dspace/discovery/IndexClient.java | 62 ++++++++++---------
.../org/dspace/discovery/IndexingUtils.java | 4 ++
.../org/dspace/discovery/SolrServiceImpl.java | 7 ++-
3 files changed, 44 insertions(+), 29 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java
index fcb3e79d1d4..661c48d91cf 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java
@@ -56,37 +56,18 @@ public void internalRun() throws Exception {
* new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer");
*/
- if (indexClientOptions == IndexClientOptions.REMOVE) {
- handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index");
- indexer.unIndexContent(context, commandLine.getOptionValue("r"));
- } else if (indexClientOptions == IndexClientOptions.CLEAN) {
- handler.logInfo("Cleaning Index");
- indexer.cleanIndex();
- } else if (indexClientOptions == IndexClientOptions.DELETE) {
- handler.logInfo("Deleting Index");
- indexer.deleteIndex();
- } else if (indexClientOptions == IndexClientOptions.BUILD ||
- indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
- handler.logInfo("(Re)building index from scratch.");
- indexer.deleteIndex();
- indexer.createIndex(context);
- if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
- checkRebuildSpellCheck(commandLine, indexer);
- }
- } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) {
- handler.logInfo("Optimizing search core.");
- indexer.optimize();
- } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) {
- checkRebuildSpellCheck(commandLine, indexer);
- } else if (indexClientOptions == IndexClientOptions.INDEX) {
- final String param = commandLine.getOptionValue('i');
+ Optional indexableObject = Optional.empty();
+
+ if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) {
+ final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r') :
+ commandLine.getOptionValue('i');
UUID uuid = null;
try {
uuid = UUID.fromString(param);
} catch (Exception e) {
- // nothing to do, it should be an handle
+ // nothing to do, it should be a handle
}
- Optional indexableObject = Optional.empty();
+
if (uuid != null) {
final Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid);
if (item != null) {
@@ -118,7 +99,32 @@ public void internalRun() throws Exception {
if (!indexableObject.isPresent()) {
throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object");
}
- handler.logInfo("Indexing " + param + " force " + commandLine.hasOption("f"));
+ }
+
+ if (indexClientOptions == IndexClientOptions.REMOVE) {
+ handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index");
+ indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID());
+ } else if (indexClientOptions == IndexClientOptions.CLEAN) {
+ handler.logInfo("Cleaning Index");
+ indexer.cleanIndex();
+ } else if (indexClientOptions == IndexClientOptions.DELETE) {
+ handler.logInfo("Deleting Index");
+ indexer.deleteIndex();
+ } else if (indexClientOptions == IndexClientOptions.BUILD ||
+ indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
+ handler.logInfo("(Re)building index from scratch.");
+ indexer.deleteIndex();
+ indexer.createIndex(context);
+ if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
+ checkRebuildSpellCheck(commandLine, indexer);
+ }
+ } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) {
+ handler.logInfo("Optimizing search core.");
+ indexer.optimize();
+ } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) {
+ checkRebuildSpellCheck(commandLine, indexer);
+ } else if (indexClientOptions == IndexClientOptions.INDEX) {
+ handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f"));
final long startTimeMillis = System.currentTimeMillis();
final long count = indexAll(indexer, ContentServiceFactory.getInstance().
getItemService(), context, indexableObject.get());
@@ -179,7 +185,7 @@ private static long indexAll(final IndexingService indexingService,
indexingService.indexContent(context, dso, true, true);
count++;
if (dso.getIndexedObject() instanceof Community) {
- final Community community = (Community) dso;
+ final Community community = (Community) dso.getIndexedObject();
final String communityHandle = community.getHandle();
for (final Community subcommunity : community.getSubcommunities()) {
count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity));
diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java
index 8dd02f5d44e..aa90ccf4a37 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java
@@ -107,6 +107,10 @@ static List findDirectlyAuthorizedGroupAndEPersonPrefixedIds(
ArrayList prefixedIds = new ArrayList<>();
for (int auth : authorizations) {
for (ResourcePolicy policy : authService.getPoliciesActionFilter(context, obj, auth)) {
+ // Avoid NPE in cases where the policy does not have group or eperson
+ if (policy.getGroup() == null && policy.getEPerson() == null) {
+ continue;
+ }
String prefixedId = policy.getGroup() == null
? "e" + policy.getEPerson().getID()
: "g" + policy.getGroup().getID();
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java
index 68d3b48ec09..0cf2aa50af6 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java
@@ -256,7 +256,12 @@ public void unIndexContent(Context context, String searchUniqueID, boolean commi
try {
if (solrSearchCore.getSolr() != null) {
- indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID).delete(searchUniqueID);
+ IndexFactory index = indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID);
+ if (index != null) {
+ index.delete(searchUniqueID);
+ } else {
+ log.warn("Object not found in Solr index: " + searchUniqueID);
+ }
if (commit) {
solrSearchCore.getSolr().commit();
}
From fa651fea6d986c7433b93e93c4bb2f531b9eefc9 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Mon, 13 Mar 2023 10:33:06 +0100
Subject: [PATCH 052/693] [DSC-963] Added @Order on
AdminRestPermissionEvaluatorPlugin
---
.../app/rest/security/AdminRestPermissionEvaluatorPlugin.java | 3 +++
1 file changed, 3 insertions(+)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java
index 0d251f6400f..338eed4a734 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/AdminRestPermissionEvaluatorPlugin.java
@@ -20,6 +20,8 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
import org.springframework.security.core.Authentication;
import org.springframework.stereotype.Component;
@@ -29,6 +31,7 @@
* the authenticated EPerson is allowed to perform the requested action.
*/
@Component
+@Order(value = Ordered.HIGHEST_PRECEDENCE)
public class AdminRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin {
private static final Logger log = LoggerFactory.getLogger(RestObjectPermissionEvaluatorPlugin.class);
From bdf867541d99a61342e3723d8c47e5bc653ae1cd Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Fri, 3 Mar 2023 12:41:51 +0100
Subject: [PATCH 053/693] [DSC-968] Adding pagination on bitstream cleanup
---
.../dspace/content/BitstreamServiceImpl.java | 4 +-
.../org/dspace/content/dao/BitstreamDAO.java | 2 +-
.../content/dao/impl/BitstreamDAOImpl.java | 5 +-
.../content/service/BitstreamService.java | 2 +-
.../bitstore/BitstreamStorageServiceImpl.java | 134 ++++++++++--------
5 files changed, 82 insertions(+), 65 deletions(-)
diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java
index 071bf3972fc..cc89cea33a2 100644
--- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java
@@ -332,8 +332,8 @@ public void updateLastModified(Context context, Bitstream bitstream) {
}
@Override
- public List findDeletedBitstreams(Context context) throws SQLException {
- return bitstreamDAO.findDeletedBitstreams(context);
+ public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException {
+ return bitstreamDAO.findDeletedBitstreams(context, limit, offset);
}
@Override
diff --git a/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java
index c1ef9231312..0d7afaa3cd7 100644
--- a/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java
+++ b/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java
@@ -29,7 +29,7 @@ public interface BitstreamDAO extends DSpaceObjectLegacySupportDAO {
public Iterator findAll(Context context, int limit, int offset) throws SQLException;
- public List findDeletedBitstreams(Context context) throws SQLException;
+ public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException;
public List findDuplicateInternalIdentifier(Context context, Bitstream bitstream) throws SQLException;
diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java
index 02e3509c311..d6d77fe7f0c 100644
--- a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java
@@ -41,13 +41,14 @@ protected BitstreamDAOImpl() {
}
@Override
- public List findDeletedBitstreams(Context context) throws SQLException {
+ public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Bitstream.class);
Root bitstreamRoot = criteriaQuery.from(Bitstream.class);
criteriaQuery.select(bitstreamRoot);
+ criteriaQuery.orderBy(criteriaBuilder.desc(bitstreamRoot.get(Bitstream_.ID)));
criteriaQuery.where(criteriaBuilder.equal(bitstreamRoot.get(Bitstream_.deleted), true));
- return list(context, criteriaQuery, false, Bitstream.class, -1, -1);
+ return list(context, criteriaQuery, false, Bitstream.class, limit, offset);
}
diff --git a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java
index 4621c95e7c8..8effabf2843 100644
--- a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java
+++ b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java
@@ -183,7 +183,7 @@ public InputStream retrieve(Context context, Bitstream bitstream)
* @return a list of all bitstreams that have been "deleted"
* @throws SQLException if database error
*/
- public List findDeletedBitstreams(Context context) throws SQLException;
+ public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException;
/**
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
index b8a1a2e96ad..977b5b7b32b 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
@@ -17,6 +17,7 @@
import java.util.UUID;
import javax.annotation.Nullable;
+import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -224,25 +225,62 @@ public InputStream retrieve(Context context, Bitstream bitstream)
@Override
public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLException, IOException, AuthorizeException {
Context context = new Context(Context.Mode.BATCH_EDIT);
- int commitCounter = 0;
+
+ int offset = 0;
+ int limit = 100;
+
+ int cleanedBitstreamCount = 0;
+
+ int deletedBitstreamCount = bitstreamService.countDeletedBitstreams(context);
+ System.out.println("Found " + deletedBitstreamCount + " deleted bistream to cleanup");
try {
context.turnOffAuthorisationSystem();
- List storage = bitstreamService.findDeletedBitstreams(context);
- for (Bitstream bitstream : storage) {
- UUID bid = bitstream.getID();
- Map wantedMetadata = new HashMap();
- wantedMetadata.put("size_bytes", null);
- wantedMetadata.put("modified", null);
- Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata);
+ while (cleanedBitstreamCount < deletedBitstreamCount) {
+ List storage = bitstreamService.findDeletedBitstreams(context, limit, offset);
+
+ if (CollectionUtils.isEmpty(storage)) {
+ break;
+ }
+
+ for (Bitstream bitstream : storage) {
+ UUID bid = bitstream.getID();
+ Map wantedMetadata = new HashMap();
+ wantedMetadata.put("size_bytes", null);
+ wantedMetadata.put("modified", null);
+ Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata);
+
+
+ // Make sure entries which do not exist are removed
+ if (MapUtils.isEmpty(receivedMetadata)) {
+ log.debug("bitstore.about is empty, so file is not present");
+ if (deleteDbRecords) {
+ log.debug("deleting record");
+ if (verbose) {
+ System.out.println(" - Deleting bitstream information (ID: " + bid + ")");
+ }
+ checksumHistoryService.deleteByBitstream(context, bitstream);
+ if (verbose) {
+ System.out.println(" - Deleting bitstream record from database (ID: " + bid + ")");
+ }
+ bitstreamService.expunge(context, bitstream);
+ }
+ context.uncacheEntity(bitstream);
+ continue;
+ }
+
+ // This is a small chance that this is a file which is
+ // being stored -- get it next time.
+ if (isRecent(Long.valueOf(receivedMetadata.get("modified").toString()))) {
+ log.debug("file is recent");
+ context.uncacheEntity(bitstream);
+ continue;
+ }
- // Make sure entries which do not exist are removed
- if (MapUtils.isEmpty(receivedMetadata)) {
- log.debug("bitstore.about is empty, so file is not present");
if (deleteDbRecords) {
- log.debug("deleting record");
+ log.debug("deleting db record");
if (verbose) {
System.out.println(" - Deleting bitstream information (ID: " + bid + ")");
}
@@ -252,64 +290,42 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio
}
bitstreamService.expunge(context, bitstream);
}
- context.uncacheEntity(bitstream);
- continue;
- }
-
- // This is a small chance that this is a file which is
- // being stored -- get it next time.
- if (isRecent(Long.valueOf(receivedMetadata.get("modified").toString()))) {
- log.debug("file is recent");
- context.uncacheEntity(bitstream);
- continue;
- }
- if (deleteDbRecords) {
- log.debug("deleting db record");
- if (verbose) {
- System.out.println(" - Deleting bitstream information (ID: " + bid + ")");
+ if (isRegisteredBitstream(bitstream.getInternalId())) {
+ context.uncacheEntity(bitstream);
+ continue; // do not delete registered bitstreams
}
- checksumHistoryService.deleteByBitstream(context, bitstream);
- if (verbose) {
- System.out.println(" - Deleting bitstream record from database (ID: " + bid + ")");
+
+
+ // Since versioning allows for multiple bitstreams, check if the internal
+ // identifier isn't used on
+ // another place
+ if (bitstreamService.findDuplicateInternalIdentifier(context, bitstream).isEmpty()) {
+ this.getStore(bitstream.getStoreNumber()).remove(bitstream);
+
+ String message = ("Deleted bitstreamID " + bid + ", internalID " + bitstream.getInternalId());
+ if (log.isDebugEnabled()) {
+ log.debug(message);
+ }
+ if (verbose) {
+ System.out.println(message);
+ }
}
- bitstreamService.expunge(context, bitstream);
- }
- if (isRegisteredBitstream(bitstream.getInternalId())) {
context.uncacheEntity(bitstream);
- continue; // do not delete registered bitstreams
}
+ // Commit actual changes to DB after dispatch events
+ System.out.print("Performing incremental commit to the database...");
+ context.commit();
+ System.out.println(" Incremental commit done!");
- // Since versioning allows for multiple bitstreams, check if the internal identifier isn't used on
- // another place
- if (bitstreamService.findDuplicateInternalIdentifier(context, bitstream).isEmpty()) {
- this.getStore(bitstream.getStoreNumber()).remove(bitstream);
-
- String message = ("Deleted bitstreamID " + bid + ", internalID " + bitstream.getInternalId());
- if (log.isDebugEnabled()) {
- log.debug(message);
- }
- if (verbose) {
- System.out.println(message);
- }
- }
+ cleanedBitstreamCount = cleanedBitstreamCount + storage.size();
- // Make sure to commit our outstanding work every 100
- // iterations. Otherwise you risk losing the entire transaction
- // if we hit an exception, which isn't useful at all for large
- // amounts of bitstreams.
- commitCounter++;
- if (commitCounter % 100 == 0) {
- context.dispatchEvents();
- // Commit actual changes to DB after dispatch events
- System.out.print("Performing incremental commit to the database...");
- context.commit();
- System.out.println(" Incremental commit done!");
+ if (!deleteDbRecords) {
+ offset = offset + limit;
}
- context.uncacheEntity(bitstream);
}
System.out.print("Committing changes to the database...");
From f82834bd574feeffd00070571198957d5d3f5358 Mon Sep 17 00:00:00 2001
From: Agustina Martinez
Date: Mon, 13 Mar 2023 14:36:57 +0000
Subject: [PATCH 054/693] ItemOwningCollectionUpdateRestController: add support
for inheritPolicies flag whem moving items
---
...mOwningCollectionUpdateRestController.java | 24 ++++++++++++-------
1 file changed, 16 insertions(+), 8 deletions(-)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java
index b06360ee1dc..1a924f7e748 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java
@@ -40,6 +40,7 @@
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.bind.annotation.RequestParam;
/**
* This controller will handle all the incoming calls on the api/code/items/{uuid}/owningCollection endpoint
@@ -69,6 +70,7 @@ public class ItemOwningCollectionUpdateRestController {
* moving the item to the new collection.
*
* @param uuid The UUID of the item that will be moved
+ * @param inheritCollectionPolicies Boolean flag whether to inherit the target collection policies when moving the item
* @param response The response object
* @param request The request object
* @return The wrapped resource containing the new owning collection or null when the item was not moved
@@ -79,7 +81,9 @@ public class ItemOwningCollectionUpdateRestController {
@RequestMapping(method = RequestMethod.PUT, consumes = {"text/uri-list"})
@PreAuthorize("hasPermission(#uuid, 'ITEM','WRITE')")
@PostAuthorize("returnObject != null")
- public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response,
+ public CollectionRest move(@PathVariable UUID uuid,
+ @RequestParam(name = "inheritPolicies", defaultValue = "false") Boolean inheritCollectionPolicies,
+ HttpServletResponse response,
HttpServletRequest request)
throws SQLException, IOException, AuthorizeException {
Context context = ContextUtil.obtainContext(request);
@@ -91,7 +95,7 @@ public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response
"or the data cannot be resolved to a collection.");
}
- Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0));
+ Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0), inheritCollectionPolicies);
if (targetCollection == null) {
return null;
@@ -107,17 +111,19 @@ public CollectionRest move(@PathVariable UUID uuid, HttpServletResponse response
* @param item The item to be moved
* @param currentCollection The current owning collection of the item
* @param targetCollection The target collection of the item
+ * @param inheritPolicies Boolean flag whether to inherit the target collection policies when moving the item
* @return The target collection
* @throws SQLException If something goes wrong
* @throws IOException If something goes wrong
* @throws AuthorizeException If the user is not authorized to perform the move action
*/
private Collection moveItem(final Context context, final Item item, final Collection currentCollection,
- final Collection targetCollection)
+ final Collection targetCollection,
+ final boolean inheritPolicies)
throws SQLException, IOException, AuthorizeException {
- itemService.move(context, item, currentCollection, targetCollection);
- //Necessary because Controller does not pass through general RestResourceController, and as such does not do its
- // commit in DSpaceRestRepository.createAndReturn() or similar
+ itemService.move(context, item, currentCollection, targetCollection, inheritPolicies);
+ // Necessary because Controller does not pass through general RestResourceController, and as such does not do its
+ // commit in DSpaceRestRepository.createAndReturn() or similar
context.commit();
return context.reloadEntity(targetCollection);
@@ -129,12 +135,14 @@ private Collection moveItem(final Context context, final Item item, final Collec
* @param context The context Object
* @param itemUuid The uuid of the item to be moved
* @param targetCollection The target collection
+ * @param inheritPolicies Whether to inherit the target collection policies when moving the item
* @return The new owning collection of the item when authorized or null when not authorized
* @throws SQLException If something goes wrong
* @throws IOException If something goes wrong
* @throws AuthorizeException If the user is not authorized to perform the move action
*/
- private Collection performItemMove(final Context context, final UUID itemUuid, final Collection targetCollection)
+ private Collection performItemMove(final Context context, final UUID itemUuid, final Collection targetCollection,
+ boolean inheritPolicies)
throws SQLException, IOException, AuthorizeException {
Item item = itemService.find(context, itemUuid);
@@ -153,7 +161,7 @@ private Collection performItemMove(final Context context, final UUID itemUuid, f
if (authorizeService.authorizeActionBoolean(context, currentCollection, Constants.ADMIN)) {
- return moveItem(context, item, currentCollection, targetCollection);
+ return moveItem(context, item, currentCollection, targetCollection, inheritPolicies);
}
return null;
From 11d2d5a3c72bb6122973263abed94fef655ea9fa Mon Sep 17 00:00:00 2001
From: Agustina Martinez
Date: Mon, 13 Mar 2023 15:07:04 +0000
Subject: [PATCH 055/693] Fix Checkstyle issues
---
.../ItemOwningCollectionUpdateRestController.java | 15 +++++++++------
1 file changed, 9 insertions(+), 6 deletions(-)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java
index 1a924f7e748..b5a0c957f26 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java
@@ -39,8 +39,8 @@
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
-import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.RequestParam;
+import org.springframework.web.bind.annotation.RestController;
/**
* This controller will handle all the incoming calls on the api/code/items/{uuid}/owningCollection endpoint
@@ -70,7 +70,8 @@ public class ItemOwningCollectionUpdateRestController {
* moving the item to the new collection.
*
* @param uuid The UUID of the item that will be moved
- * @param inheritCollectionPolicies Boolean flag whether to inherit the target collection policies when moving the item
+ * @param inheritCollectionPolicies Boolean flag whether to inherit the target collection policies when
+ * moving the item
* @param response The response object
* @param request The request object
* @return The wrapped resource containing the new owning collection or null when the item was not moved
@@ -82,7 +83,8 @@ public class ItemOwningCollectionUpdateRestController {
@PreAuthorize("hasPermission(#uuid, 'ITEM','WRITE')")
@PostAuthorize("returnObject != null")
public CollectionRest move(@PathVariable UUID uuid,
- @RequestParam(name = "inheritPolicies", defaultValue = "false") Boolean inheritCollectionPolicies,
+ @RequestParam(name = "inheritPolicies", defaultValue = "false")
+ Boolean inheritCollectionPolicies,
HttpServletResponse response,
HttpServletRequest request)
throws SQLException, IOException, AuthorizeException {
@@ -95,7 +97,8 @@ public CollectionRest move(@PathVariable UUID uuid,
"or the data cannot be resolved to a collection.");
}
- Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0), inheritCollectionPolicies);
+ Collection targetCollection = performItemMove(context, uuid, (Collection) dsoList.get(0),
+ inheritCollectionPolicies);
if (targetCollection == null) {
return null;
@@ -122,8 +125,8 @@ private Collection moveItem(final Context context, final Item item, final Collec
final boolean inheritPolicies)
throws SQLException, IOException, AuthorizeException {
itemService.move(context, item, currentCollection, targetCollection, inheritPolicies);
- // Necessary because Controller does not pass through general RestResourceController, and as such does not do its
- // commit in DSpaceRestRepository.createAndReturn() or similar
+ // Necessary because Controller does not pass through general RestResourceController, and as such does not do
+ // its commit in DSpaceRestRepository.createAndReturn() or similar
context.commit();
return context.reloadEntity(targetCollection);
From 4436549f0b33e90de3069d38a5a5272d889db3fe Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Mon, 13 Mar 2023 11:46:20 +0100
Subject: [PATCH 056/693] [DSC-963] Minor improvements
---
dspace-server-webapp/pom.xml | 31 +++++++------------
.../org/dspace/app/rest/WebApplication.java | 10 ++----
.../app/{rest => }/TestApplication.java | 11 +++++--
.../AbstractControllerIntegrationTest.java | 2 +-
.../AbstractWebClientIntegrationTest.java | 2 +-
.../main/java/org/dspace/app/Application.java | 13 ++++++++
dspace/config/log4j2-console.xml | 2 +-
7 files changed, 39 insertions(+), 32 deletions(-)
rename dspace-server-webapp/src/test/java/org/dspace/app/{rest => }/TestApplication.java (55%)
diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml
index fa607629e7d..c884ea7d57c 100644
--- a/dspace-server-webapp/pom.xml
+++ b/dspace-server-webapp/pom.xml
@@ -28,6 +28,18 @@
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+
+ test-jar
+
+
+
+
-
+
From 5cd9476fb812c56e0fe44b04d82ec1e73c857bbf Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Mon, 13 Mar 2023 17:44:07 +0100
Subject: [PATCH 057/693] [DSC-963] Fixed ItemRestRepositoryIT and
GenericAuthorizationFeatureIT integration tests
---
.../ExternalSourceItemUriListHandler.java | 8 +++++---
.../GenericAuthorizationFeatureIT.java | 18 ++++++++++++------
2 files changed, 17 insertions(+), 9 deletions(-)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java
index d619100bf67..201a7ba1633 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/ExternalSourceItemUriListHandler.java
@@ -30,16 +30,19 @@
@Component
public class ExternalSourceItemUriListHandler extends ExternalSourceEntryItemUriListHandler {
+ private Pattern pattern = Pattern.compile("\\/api\\/core\\/items\\/(.*)");
+
@Autowired
private ItemService itemService;
@Override
@SuppressWarnings("rawtypes")
public boolean supports(List uriList, String method,Class clazz) {
- if (clazz != Item.class) {
+ if (clazz != Item.class || uriList.size() != 1) {
return false;
}
- return true;
+
+ return pattern.matcher(uriList.get(0)).find();
}
@Override
@@ -61,7 +64,6 @@ public boolean validate(Context context, HttpServletRequest request, List uriList) {
Item item = null;
String url = uriList.get(0);
- Pattern pattern = Pattern.compile("\\/api\\/core\\/items\\/(.*)");
Matcher matcher = pattern.matcher(url);
if (!matcher.find()) {
throw new DSpaceBadRequestException("The uri: " + url + " doesn't resolve to an item");
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java
index 1d3b5b05160..e6ccf5954c7 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java
@@ -757,7 +757,8 @@ public void testCanMoveAdmin() throws Exception {
// Verify the general admin has this feature on item 1
getClient(adminToken).perform(
get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID()))
+ + "http://localhost/api/core/items/" + item1.getID())
+ .param("size", "1000"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
@@ -765,7 +766,8 @@ public void testCanMoveAdmin() throws Exception {
// Verify community A admin has this feature on item 1
getClient(communityAAdminToken).perform(
get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID()))
+ + "http://localhost/api/core/items/" + item1.getID())
+ .param("size", "1000"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
@@ -773,7 +775,8 @@ public void testCanMoveAdmin() throws Exception {
// Verify collection X admin has this feature on item 1
getClient(collectionXAdminToken).perform(
get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID()))
+ + "http://localhost/api/core/items/" + item1.getID())
+ .param("size", "1000"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
@@ -781,7 +784,8 @@ public void testCanMoveAdmin() throws Exception {
// Verify item 1 admin doesn’t have this feature on item 1
getClient(item1AdminToken).perform(
get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID()))
+ + "http://localhost/api/core/items/" + item1.getID())
+ .param("size", "1000"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
@@ -789,7 +793,8 @@ public void testCanMoveAdmin() throws Exception {
// Verify community A admin doesn’t have this feature on item 2
getClient(communityAAdminToken).perform(
get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item2.getID()))
+ + "http://localhost/api/core/items/" + item2.getID())
+ .param("size", "1000"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
@@ -808,7 +813,8 @@ public void testCanMoveAdmin() throws Exception {
// verify item 1 write has this feature on item 1
getClient(item1WriterToken).perform(
get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID()))
+ + "http://localhost/api/core/items/" + item1.getID())
+ .param("size", "1000"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]")
.exists());
From 4c303770d56ef863155d03c74c54267324f82172 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Tue, 14 Mar 2023 11:31:12 +0100
Subject: [PATCH 058/693] [DSC-963] Added size parameter on
GenericAuthorizationFeatureIT tests
---
.../GenericAuthorizationFeatureIT.java | 340 ++++++++++--------
1 file changed, 188 insertions(+), 152 deletions(-)
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java
index e6ccf5954c7..d59ef00018b 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java
+++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/GenericAuthorizationFeatureIT.java
@@ -209,7 +209,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
String siteId = ContentServiceFactory.getInstance().getSiteService().findSite(context).getID().toString();
// Verify the general admin has this feature on the site
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/sites/" + siteId))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -217,14 +217,14 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin doesn’t have this feature on the site
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/sites/" + siteId))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify the general admin has this feature on community A
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -232,7 +232,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin has this feature on community A
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -240,7 +240,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin has this feature on community AA
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -248,7 +248,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify collection X admin doesn’t have this feature on community A
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -256,7 +256,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin doesn’t have this feature on community B
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityB.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -264,7 +264,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify the general admin has this feature on collection X
getClient(adminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -272,7 +272,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin has this feature on collection X
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -280,7 +280,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify collection X admin has this feature on collection X
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -288,7 +288,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify item 1 admin doesn’t have this feature on collection X
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -296,7 +296,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify collection X admin doesn’t have this feature on collection Y
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionY.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -304,7 +304,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify the general admin has this feature on item 1
getClient(adminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -312,7 +312,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin has this feature on item 1
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -320,7 +320,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify collection X admin has this feature on item 1
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -328,7 +328,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify item 1 admin has this feature on item 1
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -336,7 +336,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify item 1 admin doesn’t have this feature on item 2
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -344,7 +344,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify the general admin has this feature on the bundle in item 1
getClient(adminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -352,7 +352,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin has this feature on the bundle in item 1
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -360,7 +360,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify collection X admin has this feature on the bundle in item 1
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -368,7 +368,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify item 1 admin has this feature on the bundle in item 1
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -376,7 +376,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify item 1 admin doesn’t have this feature on the bundle in item 2
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -384,7 +384,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify the general admin has this feature on the bitstream in item 1
getClient(adminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -392,7 +392,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify community A admin has this feature on the bitstream in item 1
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -400,7 +400,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify collection X admin has this feature on the bitstream in item 1
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -408,7 +408,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify item 1 admin has this feature on the bitstream in item 1
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -416,7 +416,7 @@ private void testAdminsHavePermissionsAllDso(String feature) throws Exception {
// Verify item 1 admin doesn’t have this feature on the bitstream in item 2
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -431,7 +431,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception {
// Verify the general admin has this feature on item 1
getClient(adminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -439,7 +439,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception {
// Verify community A admin has this feature on item 1
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -447,7 +447,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception {
// Verify collection X admin has this feature on item 1
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -455,7 +455,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception {
// Verify item 1 admin has this feature on item 1
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -463,7 +463,7 @@ private void testAdminsHavePermissionsItem(String feature) throws Exception {
// Verify community A admin doesn’t have this feature on item 2
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -480,14 +480,14 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// (or doesn’t have access otherwise)
if (hasDSOAccess) {
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
} else {
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -496,7 +496,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify community A write doesn’t have this feature on community AA
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -504,7 +504,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify community A write doesn’t have this feature on collection X
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -512,7 +512,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify community A write doesn’t have this feature on item 1
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -520,7 +520,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify community A write doesn’t have this feature on the bundle in item 1
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -528,7 +528,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify community A write doesn’t have this feature on the bitstream in item 1
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -536,7 +536,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify collection X write doesn’t have this feature on community A
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -544,7 +544,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify collection X write doesn’t have this feature on community AA
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -554,14 +554,14 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// (or doesn’t have access otherwise)
if (hasDSOAccess) {
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
} else {
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -570,7 +570,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify collection X write doesn’t have this feature on item 1
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -578,7 +578,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify collection X write doesn’t have this feature on the bundle in item 1
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -586,7 +586,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify collection X write doesn’t have this feature on the bitstream in item 1
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -594,7 +594,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify item 1 write doesn’t have this feature on community A
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -602,7 +602,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify item 1 write doesn’t have this feature on community AA
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -610,7 +610,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify item 1 write doesn’t have this feature on collection X
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -620,14 +620,14 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// (or doesn’t have access otherwise)
if (hasDSOAccess) {
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
} else {
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -636,7 +636,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify item 1 write doesn’t have this feature on the bundle in item 1
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -644,7 +644,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify item 1 write doesn’t have this feature on the bitstream in item 1
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -652,7 +652,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify community A write doesn’t have this feature on community B
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityB.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -660,7 +660,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify collection X write doesn’t have this feature on collection Y
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionY.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -668,7 +668,7 @@ private void testWriteUsersHavePermissionsAllDso(String feature, boolean hasDSOA
// Verify item 1 write doesn’t have this feature on item 2
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -682,7 +682,7 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc
// Verify community A write doesn’t have this feature on item 1
getClient(communityAWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -690,7 +690,7 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc
// Verify collection X write doesn’t have this feature on item 1
getClient(collectionXWriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -700,14 +700,14 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc
// (or doesn’t have access otherwise)
if (hasDSOAccess) {
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
} else {
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -716,7 +716,7 @@ private void testWriteUsersHavePermissionsItem(String feature, boolean hasDSOAcc
// Verify item 1 write doesn’t have this feature on item 2
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -756,45 +756,40 @@ public void testCanMoveAdmin() throws Exception {
// Verify the general admin has this feature on item 1
getClient(adminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID())
- .param("size", "1000"))
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ + "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on item 1
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID())
- .param("size", "1000"))
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ + "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify collection X admin has this feature on item 1
getClient(collectionXAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID())
- .param("size", "1000"))
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ + "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin doesn’t have this feature on item 1
getClient(item1AdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID())
- .param("size", "1000"))
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ + "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify community A admin doesn’t have this feature on item 2
getClient(communityAAdminToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item2.getID())
- .param("size", "1000"))
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ + "http://localhost/api/core/items/" + item2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
@@ -812,9 +807,8 @@ public void testCanMoveAdmin() throws Exception {
// verify item 1 write has this feature on item 1
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
- + "http://localhost/api/core/items/" + item1.getID())
- .param("size", "1000"))
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ + "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]")
.exists());
@@ -836,7 +830,7 @@ public void testCanMoveWriter() throws Exception {
String item1WriterToken = getAuthToken(item1Writer.getEmail(), password);
// verify item 1 write has this feature on item 1
getClient(item1WriterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='canMove')]")
@@ -873,28 +867,30 @@ public void testCanDeleteAdmin() throws Exception {
final String feature = "canDelete";
// Verify the general admin doesn’t have this feature on the site
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/sites/" + siteId))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify the general admin has this feature on community A
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on community A
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on community AA
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -914,161 +910,173 @@ public void testCanDeleteAdmin() throws Exception {
.build();
context.restoreAuthSystemState();
String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password);
- getClient(communityAAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify collection X admin doesn’t have this feature on community A
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify community A admin doesn’t have this feature on community B
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityB.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify the general admin has this feature on collection X
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on collection X
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify collection X admin doesn’t have this feature on collection X
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify item 1 admin doesn’t have this feature on collection X
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify collection X admin doesn’t have this feature on collection Y
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionY.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify the general admin has this feature on item 1
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on item 1
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify collection X admin has this feature on item 1
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin doesn’t have this feature on item 1
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify item 1 admin doesn’t have this feature on item 2
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify the general admin has this feature on the bundle in item 1
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on the bundle in item 1
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify collection X admin has this feature on the bundle in item 1
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin has this feature on the bundle in item 1
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin doesn’t have this feature on the bundle in item 2
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify the general admin has this feature on the bitstream in item 1
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on the bitstream in item 1
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify collection X admin has this feature on the bitstream in item 1
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin has this feature on the bitstream in item 1
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin doesn’t have this feature on the bitstream in item 2
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1096,7 +1104,8 @@ public void testCanDeleteAdminParent() throws Exception {
context.restoreAuthSystemState();
String communityAAAdminToken = getAuthToken(communityAAAdmin.getEmail(), password);
//verify the community AA admin has this feature on community AA
- getClient(communityAAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1111,7 +1120,8 @@ public void testCanDeleteAdminParent() throws Exception {
.build();
context.restoreAuthSystemState();
// verify collection X admin has this feature on collection X
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1126,7 +1136,7 @@ public void testCanDeleteAdminParent() throws Exception {
.build();
context.restoreAuthSystemState();
// verify item 1 admin has this feature on item 1
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1157,13 +1167,15 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String communityADeleterToken = getAuthToken(communityADeleter.getEmail(), password);
// Verify the user has this feature on community A
- getClient(communityADeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityADeleterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify this user doesn’t have this feature on community AA
- getClient(communityADeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityADeleterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1185,19 +1197,22 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String communityARemoverToken = getAuthToken(communityARemover.getEmail(), password);
// Verify the user has this feature on community AA
- getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityARemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify this user doesn’t have this feature on community A
- getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityARemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify this user doesn’t have this feature on collection X
- getClient(communityARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityARemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1218,19 +1233,22 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String communityAARemoverToken = getAuthToken(communityAARemover.getEmail(), password);
// Verify the user has this feature on collection X
- getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAARemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify this user doesn’t have this feature on community AA
- getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAARemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/communities/" + communityAA.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify this user doesn’t have this feature on item 1
- getClient(communityAARemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAARemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1251,7 +1269,8 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String collectionXRemoverToken = getAuthToken(collectionXRemover.getEmail(), password);
// Verify the user doesn’t have this feature on item 1
- getClient(collectionXRemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXRemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1272,7 +1291,7 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String item1DeleterToken = getAuthToken(item1Deleter.getEmail(), password);
// Verify the user doesn’t have this feature on item 1
- getClient(item1DeleterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1DeleterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1299,21 +1318,21 @@ public void testCanDeleteMinimalPermissions() throws Exception {
String collectionXRemoverItem1DeleterToken = getAuthToken(collectionXRemoverItem1Deleter.getEmail(), password);
// Verify the user has this feature on item 1
getClient(collectionXRemoverItem1DeleterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify this user doesn’t have this feature on collection X
getClient(collectionXRemoverItem1DeleterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/collections/" + collectionX.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify this user doesn’t have this feature on the bundle in item 1
getClient(collectionXRemoverItem1DeleterToken).perform(
- get("/api/authz/authorizations/search/object?embed=feature&uri="
+ get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1334,19 +1353,19 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String item1RemoverToken = getAuthToken(item1Remover.getEmail(), password);
// Verify the user has this feature on the bundle in item 1
- getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify this user doesn’t have this feature on item 1
- getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify this user doesn’t have this feature on the bitstream in item 1
- getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1RemoverToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1367,7 +1386,8 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String bundle1RemoverToken = getAuthToken(bundle1Remover.getEmail(), password);
// Verify the user doesn’t have this feature on the bitstream in item 1
- getClient(bundle1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(bundle1RemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1394,7 +1414,8 @@ public void testCanDeleteMinimalPermissions() throws Exception {
context.restoreAuthSystemState();
String bundle1item1RemoverToken = getAuthToken(bundle1item1Remover.getEmail(), password);
// Verify the user has this feature on the bitstream in item 1
- getClient(bundle1item1RemoverToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(bundle1item1RemoverToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bitstreams/" + bitstream1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1410,35 +1431,38 @@ public void testCanReorderBitstreamsAdmin() throws Exception {
final String feature = "canReorderBitstreams";
// Verify the general admin has this feature on the bundle in item 1
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on the bundle in item 1
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify collection X admin has this feature on the bundle in item 1
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin has this feature on the bundle in item 1
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin doesn’t have this feature on the bundle in item 2
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1453,19 +1477,21 @@ public void testCanReorderBitstreamsWriter() throws Exception {
final String feature = "canReorderBitstreams";
// Verify community A write doesn’t have this feature on the bundle in item 1
- getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify collection X write doesn’t have this feature on the bundle in item 1
- getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify item 1 write doesn’t have this feature on the bundle in item 1
- getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1473,7 +1499,8 @@ public void testCanReorderBitstreamsWriter() throws Exception {
// Create a new user, grant WRITE permissions on the bundle in item 1 to this user
// Verify the user has this feature on the bundle in item 1
- getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1489,35 +1516,38 @@ public void testCanCreateBitstreamAdmin() throws Exception {
final String feature = "canCreateBitstream";
// Verify the general admin has this feature on the bundle in item 1
- getClient(adminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(adminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin has this feature on the bundle in item 1
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify collection X admin has this feature on the bundle in item 1
- getClient(collectionXAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify item 1 admin has this feature on the bundle in item 1
- getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdminToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").exists());
// Verify community A admin doesn’t have this feature on the bundle in item 2
- getClient(communityAAdminToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAAdminToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle2.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1532,21 +1562,23 @@ public void testCanCreateBitstreamWriter() throws Exception {
final String feature = "canCreateBitstream";
// Verify community A write doesn’t have this feature on the bundle in item 1
- getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify collection X write doesn’t have this feature on the bundle in item 1
- getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify item 1 write doesn’t have this feature on the bundle in item 1
- getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1567,7 +1599,7 @@ public void testCanCreateBitstreamWriter() throws Exception {
context.restoreAuthSystemState();
String bundle1WriterToken = getAuthToken(bundle1Writer.getEmail(), password);
// Verify the user doesn’t have this feature on the bundle in item 1
- getClient(bundle1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(bundle1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1588,7 +1620,7 @@ public void testCanCreateBitstreamWriter() throws Exception {
context.restoreAuthSystemState();
String bundle1AdderToken = getAuthToken(bundle1Adder.getEmail(), password);
// Verify the user doesn’t have this feature on the bundle in item 1
- getClient(bundle1AdderToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(bundle1AdderToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1625,7 +1657,8 @@ public void testCanCreateBitstreamWriter() throws Exception {
context.restoreAuthSystemState();
String bundle1WriterAdderToken = getAuthToken(bundle1WriterAdder.getEmail(), password);
// Verify the user has this feature on the bundle in item 1
- getClient(bundle1WriterAdderToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(bundle1WriterAdderToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/bundles/" + bundle1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1645,21 +1678,23 @@ public void testCanCreateBundleWriter() throws Exception {
final String feature = "canCreateBundle";
// Verify community A write doesn’t have this feature on item 1
- getClient(communityAWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(communityAWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify collection X write doesn’t have this feature on item 1
- getClient(collectionXWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(collectionXWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
+ feature + "')]").doesNotExist());
// Verify item 1 write doesn’t have this feature on item 1
- getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1WriterToken).perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
@@ -1685,7 +1720,8 @@ public void testCanCreateBundleWriter() throws Exception {
context.restoreAuthSystemState();
String item1AdderWriterToken = getAuthToken(item1AdderWriter.getEmail(), password);
// Verify the user has this feature on item 1
- getClient(item1AdderWriterToken).perform(get("/api/authz/authorizations/search/object?embed=feature&uri="
+ getClient(item1AdderWriterToken)
+ .perform(get("/api/authz/authorizations/search/object?size=1000&embed=feature&uri="
+ "http://localhost/api/core/items/" + item1.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.authorizations[?(@._embedded.feature.id=='"
From 76fdd16a9b3d374c3fe7f47f8e732bd9fd57025b Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Tue, 14 Mar 2023 12:57:50 +0100
Subject: [PATCH 059/693] [DSC-963] Fixed SubmissionCCLicenseUrlRepositoryIT
tests
---
.../org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java | 3 +++
1 file changed, 3 insertions(+)
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java
index c306691eb35..30404e030ab 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/DSpaceResourceHalLinkFactory.java
@@ -21,6 +21,8 @@
import org.dspace.app.rest.model.hateoas.DSpaceResource;
import org.dspace.app.rest.utils.Utils;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
import org.springframework.data.domain.Pageable;
import org.springframework.hateoas.IanaLinkRelations;
import org.springframework.hateoas.Link;
@@ -33,6 +35,7 @@
* @author Tom Desair (tom dot desair at atmire dot com)
*/
@Component
+@Order(Ordered.HIGHEST_PRECEDENCE)
public class DSpaceResourceHalLinkFactory extends HalLinkFactory {
@Autowired
From fd955c49884073474842fb75d4ced2761207f83e Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Tue, 14 Mar 2023 17:12:09 +0100
Subject: [PATCH 060/693] [DSC-963] Refactoring to maintain server module
---
dspace-server-webapp/pom.xml | 17 +
.../src/main/resources/static/index.html | 0
.../resources/static/js/hal/http/client.js | 0
.../static/js/vendor/CustomPostForm.js | 0
.../src/main/resources/static/login.html | 0
.../src/main/resources/static/styles.css | 0
dspace/modules/pom.xml | 11 +
.../modules/server-boot}/pom.xml | 28 +-
.../org/dspace/app/ServerBootApplication.java | 36 ++
dspace/modules/server/pom.xml | 349 ++++++++++++++++++
.../org/dspace/app/ServerApplication.java | 10 +-
.../modules/server/src/main/webapp/.gitignore | 0
.../app/rest/example/ExampleController.java | 0
.../app/rest/example/ExampleControllerIT.java | 0
dspace/pom.xml | 6 -
pom.xml | 21 --
16 files changed, 419 insertions(+), 59 deletions(-)
rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/index.html (100%)
rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/js/hal/http/client.js (100%)
rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/js/vendor/CustomPostForm.js (100%)
rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/login.html (100%)
rename {dspace-webapp-boot => dspace-server-webapp}/src/main/resources/static/styles.css (100%)
rename {dspace-webapp-boot => dspace/modules/server-boot}/pom.xml (73%)
create mode 100644 dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java
create mode 100644 dspace/modules/server/pom.xml
rename dspace-webapp-boot/src/main/java/org/dspace/app/Application.java => dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java (85%)
create mode 100644 dspace/modules/server/src/main/webapp/.gitignore
rename {dspace-webapp-boot => dspace/modules/server}/src/test/java/org/dspace/app/rest/example/ExampleController.java (100%)
rename {dspace-webapp-boot => dspace/modules/server}/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java (100%)
diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml
index c884ea7d57c..0d3458199d3 100644
--- a/dspace-server-webapp/pom.xml
+++ b/dspace-server-webapp/pom.xml
@@ -40,6 +40,23 @@
+
+
+ com.mycila
+ license-maven-plugin
+
+
+ **/src/test/resources/**
+ **/src/test/data/**
+
+ src/main/resources/static/index.html
+ src/main/resources/static/login.html
+ src/main/resources/static/styles.css
+ src/main/resources/static/js/hal/**
+ src/main/resources/static/js/vendor/**
+
+
+
+ modulesorg.dspace
- dspace-parentcris-2022.03.01-SNAPSHOT..
- ${basedir}/..
-
- @
+ ${basedir}/../../..
@@ -108,23 +105,6 @@
-
-
- com.mycila
- license-maven-plugin
-
-
- **/src/test/resources/**
- **/src/test/data/**
-
- src/main/resources/static/index.html
- src/main/resources/static/login.html
- src/main/resources/static/styles.css
- src/main/resources/static/js/hal/**
- src/main/resources/static/js/vendor/**
-
-
- org.springframework.bootspring-boot-maven-plugin
diff --git a/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java
new file mode 100644
index 00000000000..f46532ff14b
--- /dev/null
+++ b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java
@@ -0,0 +1,36 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.app;
+
+import org.dspace.app.rest.WebApplication;
+import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
+import org.dspace.app.rest.utils.DSpaceKernelInitializer;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.builder.SpringApplicationBuilder;
+
+/**
+ * Define the Spring Boot Application settings itself to be runned using an
+ * embedded application server.
+ *
+ * @author Luca Giamminonni (luca.giamminonni at 4science.it)
+ *
+ */
+@SpringBootApplication(scanBasePackageClasses = WebApplication.class)
+public class ServerBootApplication {
+
+ private ServerBootApplication() {
+
+ }
+
+ public static void main(String[] args) {
+ new SpringApplicationBuilder(ServerBootApplication.class)
+ .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer())
+ .run(args);
+ }
+
+}
diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml
new file mode 100644
index 00000000000..65849295e8c
--- /dev/null
+++ b/dspace/modules/server/pom.xml
@@ -0,0 +1,349 @@
+
+ 4.0.0
+ org.dspace.modules
+ server
+ war
+ DSpace Server Webapp:: Local Customizations
+
+ modules
+ org.dspace
+ cris-2022.03.01-SNAPSHOT
+ ..
+
+
+
+
+ ${basedir}/../../..
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-dependency-plugin
+
+
+ unpack
+ prepare-package
+
+ unpack-dependencies
+
+
+ org.dspace.modules
+ additions
+
+ ${project.build.directory}/additions
+ META-INF/**
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-war-plugin
+
+ false
+
+ true
+
+
+
+ ${project.build.directory}/additions
+ WEB-INF/classes
+
+
+
+
+
+ prepare-package
+
+
+
+
+
+ org.codehaus.gmaven
+ groovy-maven-plugin
+
+
+ setproperty
+ initialize
+
+ execute
+
+
+
+
+
+
+
+
+
+
+
+
+
+ unit-test-environment
+
+ false
+
+ skipUnitTests
+ false
+
+
+
+
+
+
+ maven-dependency-plugin
+
+ ${project.build.directory}/testing
+
+
+ org.dspace
+ dspace-parent
+ ${project.version}
+ zip
+ testEnvironment
+
+
+
+
+
+ setupUnitTestEnvironment
+ generate-test-resources
+
+ unpack
+
+
+
+
+
+
+
+ maven-surefire-plugin
+
+
+
+
+
+ ${agnostic.build.dir}/testing/dspace
+
+ true
+ ${agnostic.build.dir}/testing/dspace/solr/
+
+
+
+
+
+
+
+
+ org.dspace
+ dspace-server-webapp
+ test-jar
+ test
+
+
+
+
+
+
+ integration-test-environment
+
+ false
+
+ skipIntegrationTests
+ false
+
+
+
+
+
+
+ maven-dependency-plugin
+
+ ${project.build.directory}/testing
+
+
+ org.dspace
+ dspace-parent
+ ${project.version}
+ zip
+ testEnvironment
+
+
+
+
+
+ setupIntegrationTestEnvironment
+ pre-integration-test
+
+ unpack
+
+
+
+
+
+
+
+ maven-failsafe-plugin
+
+
+
+
+ ${agnostic.build.dir}/testing/dspace
+
+ true
+ ${agnostic.build.dir}/testing/dspace/solr/
+
+
+
+
+
+
+
+
+ org.dspace
+ dspace-server-webapp
+ test-jar
+ test
+
+
+
+
+
+ oracle-support
+
+
+ db.name
+ oracle
+
+
+
+
+ com.oracle
+ ojdbc6
+
+
+
+
+
+
+
+
+ org.dspace.modules
+ additions
+
+
+ org.dspace
+ dspace-server-webapp
+
+
+ org.springframework.boot
+ spring-boot-starter-tomcat
+ provided
+ ${spring-boot.version}
+
+
+ org.apache.solr
+ solr-solrj
+ ${solr.client.version}
+
+
+
+
+ org.dspace
+ dspace-api
+ test-jar
+ test
+
+
+ org.dspace
+ dspace-server-webapp
+ test-jar
+ test
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ org.springframework.security
+ spring-security-test
+ ${spring-security.version}
+ test
+
+
+ com.jayway.jsonpath
+ json-path-assert
+ ${json-path.version}
+ test
+
+
+ junit
+ junit
+ test
+
+
+ com.h2database
+ h2
+ test
+
+
+ org.mockito
+ mockito-inline
+ test
+
+
+
+
+ org.apache.solr
+ solr-core
+ ${solr.client.version}
+ test
+
+
+
+ org.apache.commons
+ commons-text
+
+
+
+
+ org.apache.lucene
+ lucene-analyzers-icu
+ test
+
+
+
+
+
diff --git a/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java b/dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java
similarity index 85%
rename from dspace-webapp-boot/src/main/java/org/dspace/app/Application.java
rename to dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java
index dc84b29a562..34acc778b7f 100644
--- a/dspace-webapp-boot/src/main/java/org/dspace/app/Application.java
+++ b/dspace/modules/server/src/main/java/org/dspace/app/ServerApplication.java
@@ -28,13 +28,7 @@
*
*/
@SpringBootApplication(scanBasePackageClasses = WebApplication.class)
-public class Application extends SpringBootServletInitializer {
-
- public static void main(String[] args) {
- new SpringApplicationBuilder(Application.class)
- .initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer())
- .run(args);
- }
+public class ServerApplication extends SpringBootServletInitializer {
/**
* Override the default SpringBootServletInitializer.configure() method,
@@ -52,7 +46,7 @@ public static void main(String[] args) {
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
// Pass this Application class, and our initializers for DSpace Kernel and Configuration
// NOTE: Kernel must be initialized before Configuration
- return application.sources(Application.class)
+ return application.sources(ServerApplication.class)
.initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer());
}
}
diff --git a/dspace/modules/server/src/main/webapp/.gitignore b/dspace/modules/server/src/main/webapp/.gitignore
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java b/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java
similarity index 100%
rename from dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleController.java
rename to dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleController.java
diff --git a/dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java b/dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java
similarity index 100%
rename from dspace-webapp-boot/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java
rename to dspace/modules/server/src/test/java/org/dspace/app/rest/example/ExampleControllerIT.java
diff --git a/dspace/pom.xml b/dspace/pom.xml
index 0dba032e688..d5e7108fa52 100644
--- a/dspace/pom.xml
+++ b/dspace/pom.xml
@@ -219,12 +219,6 @@
dspace-server-webappcompile
-
- org.dspace
- dspace-webapp-boot
- war
- compile
- org.dspacedspace-sword
diff --git a/pom.xml b/pom.xml
index e17dfbf384c..a5d5b12f079 100644
--- a/pom.xml
+++ b/pom.xml
@@ -798,21 +798,6 @@
-
-
- dspace-webapp-boot
-
-
- dspace-webapp-boot/pom.xml
-
-
-
- dspace-webapp-boot
-
-
-
@@ -1167,12 +1152,6 @@
dspace-server-webappcris-2022.03.01-SNAPSHOT
-
- org.dspace
- dspace-webapp-boot
- cris-2022.03.01-SNAPSHOT
- war
- org.dspace
From 47fc9169179d738739cd9d0a56b51bc852a6e6b6 Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Tue, 14 Mar 2023 18:36:50 +0100
Subject: [PATCH 061/693] [DSC-963] Configured spring boot maven plugin
---
dspace/modules/server-boot/pom.xml | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/dspace/modules/server-boot/pom.xml b/dspace/modules/server-boot/pom.xml
index 313cf0e78f6..a1dd702f97c 100644
--- a/dspace/modules/server-boot/pom.xml
+++ b/dspace/modules/server-boot/pom.xml
@@ -108,6 +108,14 @@
org.springframework.bootspring-boot-maven-plugin
+ ${spring-boot.version}
+
+
+
+ repackage
+
+
+
From 06e77f354ca4d4ca1cdbd75200e97d824954c70b Mon Sep 17 00:00:00 2001
From: Luca Giamminonni
Date: Wed, 15 Mar 2023 17:58:16 +0100
Subject: [PATCH 062/693] [DSC-963] Suppress checkstyle warning
---
.../src/main/java/org/dspace/app/ServerBootApplication.java | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java
index f46532ff14b..5efa79a02ac 100644
--- a/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java
+++ b/dspace/modules/server-boot/src/main/java/org/dspace/app/ServerBootApplication.java
@@ -20,13 +20,10 @@
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
+@SuppressWarnings({ "checkstyle:hideutilityclassconstructor" })
@SpringBootApplication(scanBasePackageClasses = WebApplication.class)
public class ServerBootApplication {
- private ServerBootApplication() {
-
- }
-
public static void main(String[] args) {
new SpringApplicationBuilder(ServerBootApplication.class)
.initializers(new DSpaceKernelInitializer(), new DSpaceConfigurationInitializer())
From 372b7e339461a11e57937a945e027b1c42e429db Mon Sep 17 00:00:00 2001
From: aroman-arvo
Date: Fri, 17 Mar 2023 11:00:02 +0100
Subject: [PATCH 063/693] fix #8719 update sequences
---
...7.6_2023.03.17__Remove_unused_sequence.sql | 13 ++++++
...7.6_2023.03.17__Remove_unused_sequence.sql | 13 ++++++
.../postgres/update-sequences.sql | 40 ++++++++++++++-----
3 files changed, 56 insertions(+), 10 deletions(-)
create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql
create mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql
diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql
new file mode 100644
index 00000000000..47cd157336a
--- /dev/null
+++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql
@@ -0,0 +1,13 @@
+--
+-- The contents of this file are subject to the license and copyright
+-- detailed in the LICENSE and NOTICE files at the root of the source
+-- tree and available online at
+--
+-- http://www.dspace.org/license/
+--
+
+-----------------------------------------------------------------------------------
+-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5)
+-----------------------------------------------------------------------------------
+
+DROP SEQUENCE history_seq;
\ No newline at end of file
diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql
new file mode 100644
index 00000000000..47cd157336a
--- /dev/null
+++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql
@@ -0,0 +1,13 @@
+--
+-- The contents of this file are subject to the license and copyright
+-- detailed in the LICENSE and NOTICE files at the root of the source
+-- tree and available online at
+--
+-- http://www.dspace.org/license/
+--
+
+-----------------------------------------------------------------------------------
+-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5)
+-----------------------------------------------------------------------------------
+
+DROP SEQUENCE history_seq;
\ No newline at end of file
diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql
index 749f82382c9..f96434f1ba8 100644
--- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql
+++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql
@@ -19,21 +19,41 @@
-- JVMs. The SQL code below will typically only be required after a direct
-- SQL data dump from a backup or somesuch.
-
+SELECT setval('alert_id_seq', max(alert_id)) FROM systemwidealert;
SELECT setval('bitstreamformatregistry_seq', max(bitstream_format_id)) FROM bitstreamformatregistry;
+SELECT setval('checksum_history_check_id_seq', max(check_id)) FROM checksum_history;
+SELECT setval('cwf_claimtask_seq', max(claimtask_id)) FROM cwf_claimtask;
+SELECT setval('cwf_collectionrole_seq', max(collectionrole_id)) FROM cwf_collectionrole;
+SELECT setval('cwf_in_progress_user_seq', max(in_progress_user_id)) FROM cwf_in_progress_user;
+SELECT setval('cwf_pooltask_seq', max(pooltask_id)) FROM cwf_pooltask;
+SELECT setval('cwf_workflowitem_seq', max(workflowitem_id)) FROM cwf_workflowitem;
+SELECT setval('cwf_workflowitemrole_seq', max(workflowitemrole_id)) FROM cwf_workflowitemrole;
+SELECT setval('doi_seq', max(doi_id)) FROM doi;
+SELECT setval('entity_type_id_seq', max(id)) FROM entity_type;
SELECT setval('fileextension_seq', max(file_extension_id)) FROM fileextension;
-SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy;
-SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem;
-SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata;
-SELECT setval('subscription_seq', max(subscription_id)) FROM subscription;
-SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry;
-SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue;
-SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry;
+SELECT setval('handle_id_seq', max(handle_id)) FROM handle;
SELECT setval('harvested_collection_seq', max(id)) FROM harvested_collection;
SELECT setval('harvested_item_seq', max(id)) FROM harvested_item;
-SELECT setval('webapp_seq', max(webapp_id)) FROM webapp;
+SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry;
+SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry;
+SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue;
+SELECT setval('openurltracker_seq', max(tracker_id)) FROM openurltracker;
+SELECT setval('orcid_history_id_seq', max(id)) FROM orcid_history;
+SELECT setval('orcid_queue_id_seq', max(id)) FROM orcid_queue;
+SELECT setval('orcid_token_id_seq', max(id)) FROM orcid_token;
+SELECT setval('process_id_seq', max(process_id)) FROM process;
+SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata;
+SELECT setval('relationship_id_seq', max(id)) FROM relationship;
+SELECT setval('relationship_type_id_seq', max(id)) FROM relationship_type;
SELECT setval('requestitem_seq', max(requestitem_id)) FROM requestitem;
-SELECT setval('handle_id_seq', max(handle_id)) FROM handle;
+SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy;
+SELECT setval('subscription_parameter_seq', max(subscription_id)) FROM subscription_parameter;
+SELECT setval('subscription_seq', max(subscription_id)) FROM subscription;
+SELECT setval('supervision_orders_seq', max(id)) FROM supervision_orders;
+SELECT setval('versionhistory_seq', max(versionhistory_id)) FROM versionhistory;
+SELECT setval('versionitem_seq', max(versionitem_id)) FROM versionitem;
+SELECT setval('webapp_seq', max(webapp_id)) FROM webapp;
+SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem;
-- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq',
-- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq'
From 5d52a2a512ca935fc76028ca1c872883d919d8fb Mon Sep 17 00:00:00 2001
From: aroman-arvo
Date: Mon, 20 Mar 2023 17:11:05 +0100
Subject: [PATCH 064/693] DS-8719 - added if exist to drop sequence
---
.../postgres/V7.6_2023.03.17__Remove_unused_sequence.sql | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql
index 47cd157336a..e4544e1de72 100644
--- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql
+++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql
@@ -10,4 +10,4 @@
-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5)
-----------------------------------------------------------------------------------
-DROP SEQUENCE history_seq;
\ No newline at end of file
+DROP SEQUENCE IF EXISTS history_seq;
\ No newline at end of file
From e0846452e57113c930e42b3fcceb455636bd6d5e Mon Sep 17 00:00:00 2001
From: Michael Spalti
Date: Wed, 8 Mar 2023 10:42:15 -0800
Subject: [PATCH 065/693] Added test and format registry entry
Added jp2 resource
---
.../config/spring/api/iiif-processing.xml | 5 ++-
.../canvasdimension/CanvasDimensionsIT.java | 34 ++++++++++++++++++
.../org/dspace/iiif/canvasdimension/cat.jp2 | Bin 0 -> 72794 bytes
.../config/registries/bitstream-formats.xml | 9 +++++
4 files changed, 45 insertions(+), 3 deletions(-)
create mode 100644 dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2
diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml
index ea2c6546085..fa203fe568e 100644
--- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml
+++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml
@@ -6,7 +6,6 @@
-
-
-
\ No newline at end of file
+
diff --git a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java
index 038654af438..7dba38c987b 100644
--- a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java
+++ b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java
@@ -353,6 +353,40 @@ public void processItemWithExistingMetadata() throws Exception {
}
+
+ @Test
+ public void processItemWithJp2File() throws Exception {
+ context.turnOffAuthorisationSystem();
+ // Create a new Item
+ iiifItem = ItemBuilder.createItem(context, col1)
+ .withTitle("Test Item")
+ .withIssueDate("2017-10-17")
+ .enableIIIF()
+ .build();
+
+ // Add jp2 image to verify image server call for dimensions
+ InputStream input = this.getClass().getResourceAsStream("cat.jp2");
+ bitstream = BitstreamBuilder
+ .createBitstream(context, iiifItem, input)
+ .withName("Bitstream2.jp2")
+ .withMimeType("image/jp2")
+ .build();
+
+ context.restoreAuthSystemState();
+
+ String id = iiifItem.getID().toString();
+
+ execCanvasScript(id);
+
+ assertTrue(bitstream.getMetadata().stream()
+ .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
+ .anyMatch(m -> m.getValue().contentEquals("64")));
+ assertTrue(bitstream.getMetadata().stream()
+ .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
+ .anyMatch(m -> m.getValue().contentEquals("64")));
+
+ }
+
@Test
public void processParentCommunityWithMaximum() throws Exception {
context.turnOffAuthorisationSystem();
diff --git a/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2
new file mode 100644
index 0000000000000000000000000000000000000000..a6649c088643cfa44dae80c03b1fbe095880b724
GIT binary patch
literal 72794
zcmeFXby!u;*Ef9V?k)i(rIAoTx}{sX8xC;*B}JqKB&55$yQI52r9m1gk$lcU@f*MU
zzMuEH-s}DAy*KPVv(}nfv(}oKwPAk-005v`Do{|MA^Rf(002@`&|5o8J9Y|i9RMjU
z?cNxIZwMXk;9z18eRp?p1b_oTy#oN?1m6KrDFgx7m=pjeL`(oY4;nNf^0p9Oh4ZUE
zp&@j8SXhZ(#(~i^G4DFq=M5@m_PnsLjbb^VMFqL8yz`SZ?o-cCu&~oTorj;vMauov
zVW|OU&%eRKBBja2v{VW&H-F(;0-~+~VPP3%pa8zGf_16wOS`bJV1p#U2AYFQ
zd<%192XNT~06;MUU=R?Xzytup2x#p91%T8s!8fski6aHL#0Hl}DA)if$PK0e03s(R
z3*A{zvd{42$*mDAp5sVU_LweCWrL008(G0HDQ&i2v>Bb73jj
zdv5^%gomd0JZeZE0Oq&v+9xcnglXw+W<{OGllAv9o_pB_L=L`xxrt)uU||cAcLLct
zfxt{SND~D|kg_2VXno&8$`)i|V(VlB5kTsYQpDN;LSx+bRknBoX(4O@g7krif9W)F
zkTb9`Q4!Nn0keJ$_N#+}0}yDcYyxt!lQ*<90t-|)r!|}|IK50
zJCFs?))6cxfV32`1KB<}0d~^R;SZr?<|zAz5OJ^-`HQdx{YA(aT08!c$eDqh{}5s}
z)}nt1a2)?qEo@|E_B#;2q6AQq5EcPj0|PNh$(Vw|80cgu2waAc_m_l&?SJrut^c!~
zu!FIRsx3(TiIO#BZvR}u*2Wb76<^8G8g$QBcw;T7^rtzpvxyN1=pbqUGJtq+Z=`~m
zg5$%gfYc$4!6n3OfQ*UhFE#%fRt0GHUkRwX5h#Ie5(a`mKpShIt=aD`s1Fi|34i0GJ@Az*%*_8TKzR^=ZTr(6vT|Vd
zf7t+#1ecI>@w>cVRW$c9^xuPm3xRil`}GEMe9r@?%)9&R7Z(=oo(suMkM8>u{gZ|{
zxu=otpWp}7y)IhFiUqI#Uuz!1zPIb_o+}ApF*OBCtiezCdk)lZPFh_29}XnBKmjzt
z&xbag_Z-N6_|3W3c>$&Ykp1?{>VH3BzCpzQ=KiAp-^lJ`fe6w4usfjz!MhcF|Bl-u
zNDTmq+ppbB45>k%gCCGyf9+_1t&=q*@jx~VqM?DK$=?zW96%M|2+k-DU>VqJL!gtb
zvE#qCn-Pc&EV~~EvPSAGObwjw
z^B|J5i38}rXs-6a{%3O(Lo*SeHPGR2!NvNeUs&Rg0Ne#22egI68VLln1HVrlP5z!}
zkgdTP=|8xrhW8olKlmu%DK`IC^ZVUG2N?(az65UrfusBSubqK=Ux%#V`z!$qX#v2>
zfY9JO6?|KPtIOa&hQM_!e*hr>>Tc$4AAl-k3^X*+6Ojee2LSYY3Sj&W0B(FY5BLB;
zMMOkILPSMELd8NsM!~|vKt;vCBgDbM!@(iMLcPC!7r%&qKcUc2P|(oPFwxO5anaGy
zaUlG4k!Vzm2gn^7uaCP1p|PB
zhJ}MiKtw`D0VnN$3ZVedFb_gZ02C|~3^Xh>90EKd91Q9+un-FdmJ*u{PDs%J=dryv
zJ3MZ9TAA<@Dmo}s_LBFI8!rq_KVK0?wZ;>=ax`6bMcSKsp+2H`hf+2h6Y;;
zdv6m0JRIk}36yNuuwV=9ALGEWdxzsfENE34J$goExZ?0WLPQzQ$dThaH9W)s1RBn^
zF|ZLC1jb^jTp*LxKPLS57SR4>!rdGI73O|SEPw!DbDzX6-YY`7+tj!Ch{5dH{D-BB
zx~r+@*psBMsT380L@1tQy1(oA8dS1GL^=-2uewxoV{VJ4)CH3UPuRqqv=^u9@M8U429Tda&Geg-^U5J#8AcB5R5d
zyaVX#qG#U6<|CJhU}96zi*{ZuZAs(o5@p^;$N1XAK?#7o6Kei^Rgm
zhlMX+))J|&AXlp)IZ^OPWUtj1-vJV?mu_rCKB?!L4+hMv@HE_TUt64BB{J*~4tGXA
zs$xFdy{$;7F1P~>6mMiNefK#tSss%8cr%+lY?rK6ReY(}udBkgZ8O9aGR#~1X>4hT
zd`h(J2gklSg=otr!l9@026+>U0O#!hpG4$#_wE
z*LZs(ou`j-&UPQ|8K{fdq}Vjf&F@<|k1e_^Ryf{RRo?+vmo{B)!|wotu4?+OPsl=@
zGMf&l56jzLC9Zl*y*y-g-1FMfI1|YZ6F$D80FwLPyi(*TDUw?V@=20SJ#f_YsOA>g
zkr-W6uWM*d^7?M^I_``trt2}WM2Q*<%Xm-mIoTJ}>H-C*Q%N6#%H;|&)T
z&5{$U+nVFqXBe2x8fhDHgdHdoTQQvn^*e_kr5wt}$X?&i={8%?@tksl^!U7d#px%*`$()Je8^t&E9R2PLt=D;T92anm&PLG(+0PQK
zBJ^=AXySn{RBwZCCh-!6=Y*$7_CTGH-j|b8#FcHre_7dHL(m`pK4octIh8E
z#k`y%z(Atlmfz#H6j0hByqKA~JeY5;seFz;@8ohrO^BC%RetKL$vl$4oN9cua_B$*JPAcQsn-3-JitkjS00NEG^EA3ImdU;t{D
z^3n4f*mkUWrd>lyk5>TrUJSN}ne`1d{0IPt!j*j5I?EH-IH;LtHJn4yZ1rT^ctV47
z`+fZ$NttwhRYR2R5m`&(#A09Eq0)$-Ol!p*#396i^ByO;gx+YtQ~ia$Ub|0AT?oeX
z@^@qFOIEYS5iXBJza8#-(aYAUv9@cA$Q8kEU)%wfBDJ=4G#o*UG+HYH4Fb!bB`^iY
z^;kNUSDCsc&a1_2;6mpl0?=_b$L|0h;uX6H8g&VBI)$h{X!h1vr}EYKLX+K%R^61O
zfHAV}YR+3aqW=0^XVscT$Hc7&-9xuP>iHZAUdJegriqy`naUPModlMWTf=a3vUPdzf+B6wHw4q1RtP
zq2<^8T5YqDEfm%55&U?K*99-$L0E$9oBj9P{7@6-64ks6HxjvDNJ!4?7zZl-jM!runQ
z+%%yv?ax_ET?Ae^_92*)8CofBH{!Nw36d<10bk#upEief-QF&Z6^G@4t{BrTKuy`N
z5Mjx83NeJczAPT_oy>1WWo3IShSpe(I~GLiRSx*gJQtV>DOSsK)RlSU^lSl*D}S%k
zsmPIZ3CQlny_qBa_>t
zBy1RT<5v1_$qu!gLT~Qq@A?!%vQxj}V?FW^S8vS
z)aOp)%eQowEmrZ(@@f&k=K#@1<%B%pfRmlBJ{kC(DP}E>NXKgva764+cT8y
z5?qA3!TJ_4bw8Z4@7KHhp_{jcn^q)CJA8*v{Bf$%IcIg3HuH@n@|^Af@@6Nz{P7iX
zRt`kluP=r916{`0u8M%wUv_c`zajTelw}BT&2#C(%(WgMh*!wliqzel<@oyeTP?62
zx0>ja4cm_AF8&;{(m7D+tX4Z;dms6-
zON9zumu@gZ>sWUHmEu(+!vvUqD3@pRVxL&-*_YDgnO3YotB4CSQKHtV@veNV6i`d$
zi$DNeug;{_`>yD#V&@{;u~opv70DeyOta;3Qf*T`lBK-YkHf89;&6Xn)vNDJV00!Y
z1dVVoNjKnNk6P?lJRP%?CmY?!$MP*)%Zr8#o$*tn`T5zaosFm-8jzfp&12LLvL#I6
zZKmN3Dw<1Z{D_k2OLr3|)y>+1KLC7Krmt(iy?J?Y$@lc#yJev%%FmptNA(2Rgjs~Xz3sE1nQiAH
z>*6MQ1EWOpvte-`c;K}hfFF4PZ*o~1Fy&@-`uY^FpAZe5!Ui9U`9RcsQNT&LtRaAL
z%E(GQa~GV+Wj*?zaGNPc#qdI@&aoOrtt(-GcBcmXs
z!{rp`x4dDtQ-
zqAy;){L*)6mDq(_U7x7&Dt=#cOC%nn(#Gy)DdX^Zc#Pp<`q`M#BatEt{*X*b*elC~
zv<8p;>j}{j!_yz!Rz0L|X!0AtZ)H43^lSnAn3-X>*s*QikKHb}4%}(#rft!CtmAi+
zCsIi-FpEcv9B$jFc5{xsPAcJD-NX;2-GFDbSNth8*Bfpy<30^zho?1nfMw5=&D&!?
zt!I8EnI5vKgtR_K*V}6584F17PqK@~4<&L&&lbteE@^KVy~eK3Jd#s#^iD`aUAl*F
zFXC++^%$^*=3WJ&{dkT+jsdz_$kp+89=qJ3>Dr-?IX}Hv*zeCBAHN>X`f79s;1MhK
zNO{$06Y^zNP^TcH&**evH~I2gx99XbAs48Q-TGnRm}Mmoyr*By)T%^XLKQ-bMb*$k
z{VUor&}DdoWWY6^+HihP()^d!u3O%N8)B;D9&~3G(P67s$E0)du-#+Mhdc5&H=|KKfZu78gDA$;f5y-EpeVYrRW>#Q`atX`&AgTV1otMjO#SO85CM6^|$md%V*$+%^mjoRh;@w`{6(<5-*R)Y67Wj}d)+`dcBrCxTmeY=Twx(8d;CJ2_p)9jGjc_Sfb
z=Cwf2rSi6nz_!gDpydv*>y=!^_QLuggg=GU)9d;vrPwadTUz9}o7TSaf@Lp}g3J82
zQZ6*dhP&Bc-)|`IKVPVWpHPqwHW&aQFlqz@7=k}sQh>n`b8sC5z8&vt;19I`-2Y1p
z3NW(73cev9Hh+Cj#z+NM0r2=XKsyK+0Qpu5847YCfbU;*@blq2?{6LygoZTw+jZae
zfe-so`R__dM~H_1z5h1`|GNhM|G5VKo;;$!#9!DAx{85=nF$CC2SXq}2x@YVgUNzG
zBL6^7kmU?O7S1M$P6iO91CqS}5Xkd4esZ6oAh77)X$W-scNz@#+B^UW07Yxi+uuS+
zIzu+Jc5)E10i#182-*r!|NFu)u`x6;mJ$&GW6Y)&)+P@qBr=2$2Rhh*!Ixj~II{R3
z4y3QNf|vxjK(z(R8h}71_fWw@XD~tzGy@+wa0I*bz(aKf;|IoI2dynYZ~x&tk|snV
zWorrq>w|pcM=~-uu(dU@{sk8OPP}L)Hg@I)jux&a5ZvTJE9M`r+C7T<2cCvAwFUwm
zeuv=xErgK