diff --git a/src/main/java/org/phoebus/olog/ElasticConfig.java b/src/main/java/org/phoebus/olog/ElasticConfig.java index 7ebb01e..ca9e922 100644 --- a/src/main/java/org/phoebus/olog/ElasticConfig.java +++ b/src/main/java/org/phoebus/olog/ElasticConfig.java @@ -57,6 +57,9 @@ public class ElasticConfig { private String ES_LOG_INDEX; @Value("${elasticsearch.sequence.index:olog_sequence}") private String ES_SEQ_INDEX; + @Value("${elasticsearch.log.archive.index:olog_archived_logs}") + private String ES_LOG_ARCHIVE_INDEX; + @Value("${elasticsearch.cluster.name:elasticsearch}") private String clusterName; @@ -175,7 +178,18 @@ void elasticIndexValidation(ElasticsearchClient client) { } catch (IOException e) { logger.log(Level.WARNING, "Failed to create index " + ES_LOG_INDEX, e); } - + // Olog Archived Log Template + try (InputStream is = ElasticConfig.class.getResourceAsStream("/log_entry_mapping.json")) { + BooleanResponse exits = client.indices().exists(ExistsRequest.of(e -> e.index(ES_LOG_ARCHIVE_INDEX))); + if (!exits.value()) { + CreateIndexResponse result = client.indices().create( + CreateIndexRequest.of( + c -> c.index(ES_LOG_ARCHIVE_INDEX).withJson(is))); + logger.info("Created index: " + "archived_" + ES_LOG_ARCHIVE_INDEX + " : acknowledged " + result.acknowledged()); + } + } catch (IOException e) { + logger.log(Level.WARNING, "Failed to create index " + ES_LOG_ARCHIVE_INDEX, e); + } } private static final ObjectMapper mapper = new ObjectMapper(); diff --git a/src/main/java/org/phoebus/olog/LogRepository.java b/src/main/java/org/phoebus/olog/LogRepository.java index 5646942..f07d7cc 100644 --- a/src/main/java/org/phoebus/olog/LogRepository.java +++ b/src/main/java/org/phoebus/olog/LogRepository.java @@ -6,8 +6,13 @@ package org.phoebus.olog; import co.elastic.clients.elasticsearch.ElasticsearchClient; +import co.elastic.clients.elasticsearch._types.FieldSort; import co.elastic.clients.elasticsearch._types.Refresh; import co.elastic.clients.elasticsearch._types.Result; +import co.elastic.clients.elasticsearch._types.SortOptions; +import co.elastic.clients.elasticsearch._types.SortOrder; +import co.elastic.clients.elasticsearch._types.query_dsl.WildcardQuery; +import co.elastic.clients.elasticsearch.core.ExistsRequest; import co.elastic.clients.elasticsearch.core.GetRequest; import co.elastic.clients.elasticsearch.core.GetResponse; import co.elastic.clients.elasticsearch.core.IndexRequest; @@ -18,6 +23,7 @@ import co.elastic.clients.elasticsearch.core.SearchResponse; import co.elastic.clients.elasticsearch.core.mget.MultiGetResponseItem; import co.elastic.clients.elasticsearch.core.search.Hit; +import org.apache.logging.log4j.util.Strings; import org.phoebus.olog.entity.Attachment; import org.phoebus.olog.entity.Log; import org.phoebus.olog.entity.Log.LogBuilder; @@ -51,6 +57,9 @@ public class LogRepository implements CrudRepository { @Value("${elasticsearch.log.index:olog_logs}") private String ES_LOG_INDEX; + @Value("${elasticsearch.log.archive.index:olog_archived_logs}") + private String ES_LOG_ARCHIVE_INDEX; + @SuppressWarnings("unused") @Autowired @Qualifier("client") @@ -112,10 +121,9 @@ public Iterable saveAll(Iterable logs) { public Log update(Log log) { try { Log document = LogBuilder.createLog(log).build(); - IndexRequest indexRequest = IndexRequest.of(i -> - i.index(ES_LOG_INDEX) + i.index(ES_LOG_INDEX) .id(String.valueOf(document.getId())) .document(document)); @@ -131,16 +139,70 @@ public Log update(Log log) { } } catch (Exception e) { logger.log(Level.SEVERE, "Failed to save log entry: " + log, e); - throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Failed to save log entry: " + log); + throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Failed to update log entry: " + log); } return null; } + public Log archive(Log log) { + try { + // retrieve the log version from elastic + GetResponse resp = client.get(GetRequest.of(g -> + g.index(ES_LOG_INDEX).id(String.valueOf(log.getId()))), Log.class); + if(!resp.found()) { + logger.log(Level.SEVERE, "Failed to archive log with id: " + log.getId()); + } else { + Log originalDocument = resp.source(); + String updatedVersion = originalDocument.getId() + "_v" + resp.version(); + IndexRequest indexRequest = + IndexRequest.of(i -> + i.index(ES_LOG_ARCHIVE_INDEX) + .id(updatedVersion) + .document(originalDocument) + .refresh(Refresh.True)); + IndexResponse response = client.index(indexRequest); + if (response.result().equals(Result.Created)) { + GetRequest getRequest = + GetRequest.of(g -> + g.index(ES_LOG_ARCHIVE_INDEX).id(response.id())); + return client.get(getRequest, Log.class).source(); + } else { + logger.log(Level.SEVERE, "Failed to archiver log with id: " + updatedVersion); + } + } + } catch (IOException e) { + logger.log(Level.SEVERE, "Failed to archiver log with id: " + log.getId(), e); + } + return null; + } + + public SearchResult findArchivedById(String id) { + FieldSort.Builder fb = new FieldSort.Builder(); + fb.field("modifyDate"); + fb.order(SortOrder.Desc); + + SearchRequest searchRequest = SearchRequest.of(s -> s.index(ES_LOG_ARCHIVE_INDEX) + .query(WildcardQuery.of(q -> q.field("id").caseInsensitive(true).value(id+"*"))._toQuery()) + .timeout("60s") + .sort(SortOptions.of(so -> so.field(fb.build())))); + try { + final SearchResponse searchResponse = client.search(searchRequest, Log.class); + List result = searchResponse.hits().hits().stream().map(Hit::source).collect(Collectors.toList()); + SearchResult searchResult = new SearchResult(); + searchResult.setHitCount(searchResponse.hits().total().value()); + searchResult.setLogs(result); + return searchResult; + } catch (IOException | IllegalArgumentException e) { + logger.log(Level.SEVERE, "Failed to complete search for archived logs", e); + throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Failed to complete search archived logs"); + } + } + @Override public Optional findById(String id) { try { GetRequest getRequest = - co.elastic.clients.elasticsearch.core.GetRequest.of(g -> + GetRequest.of(g -> g.index(ES_LOG_INDEX).id(id)); GetResponse resp = client.get(getRequest, Log.class); @@ -159,12 +221,8 @@ public Optional findById(String id) { @Override public boolean existsById(String logId) { try { - GetRequest getRequest = - GetRequest.of(g -> - g.index(ES_LOG_INDEX).id(logId)); - GetResponse resp = - client.get(getRequest, Log.class); - return resp.found(); + ExistsRequest existsRequest = ExistsRequest.of(e -> e.index(ES_LOG_INDEX).id(logId)); + return client.exists(existsRequest).value(); } catch (IOException e) { logger.log(Level.SEVERE, "Failed to check existence of log with id: " + logId, e); throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Failed to check existence of log with id: " + logId); diff --git a/src/main/java/org/phoebus/olog/LogResource.java b/src/main/java/org/phoebus/olog/LogResource.java index 766ea0f..80bf509 100644 --- a/src/main/java/org/phoebus/olog/LogResource.java +++ b/src/main/java/org/phoebus/olog/LogResource.java @@ -12,6 +12,7 @@ import org.phoebus.olog.notification.LogEntryNotifier; import org.phoebus.util.time.TimeParser; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.core.io.InputStreamResource; import org.springframework.core.io.Resource; import org.springframework.core.task.TaskExecutor; @@ -100,6 +101,13 @@ public Log getLog(@PathVariable String logId) { } } + @GetMapping("archived/{logId}") + @SuppressWarnings("unused") + public SearchResult getArchivedLog(@PathVariable String logId) { + SearchResult searchResult = logRepository.findArchivedById(logId); + return searchResult; + } + @GetMapping("/attachments/{logId}/{attachmentName}") public ResponseEntity findResources(@PathVariable String logId, @PathVariable String attachmentName) { Optional log = logRepository.findById(logId); @@ -301,6 +309,15 @@ public Log createLog(@RequestHeader(value = OLOG_CLIENT_INFO_HEADER, required = } + /** + * Add an attachment to log entry identified by logId + * @param logId log entry ID + * @param file the file to be attached + * @param filename name of file + * @param id UUID for file in mongo + * @param fileMetadataDescription file metadata + * @return + */ @PostMapping("/attachments/{logId}") public Log uploadAttachment(@PathVariable String logId, @RequestPart("file") MultipartFile file, @@ -332,7 +349,6 @@ public Log uploadAttachment(@PathVariable String logId, * of logbooks or tags, the updated log record will reflect that. However, the following data is NOT updated: *
    *
  • Attachments
  • - *
  • Owner (author)
  • *
  • Created date
  • *
  • Events
  • *
@@ -341,6 +357,7 @@ public Log uploadAttachment(@PathVariable String logId, * @param logId The log id of the entry subject to update. It must exist, i.e. it is not created of not found. * @param markup Markup strategy, if any. * @param log The log record data as sent by client. + * @param principal The authenticated {@link Principal} of the request. * @return The updated log record, or HTTP status 404 if the log record does not exist. If the path * variable does not match the id in the log record, HTTP status 400 (bad request) is returned. */ @@ -348,15 +365,20 @@ public Log uploadAttachment(@PathVariable String logId, @PostMapping("/{logId}") public Log updateLog(@PathVariable String logId, @RequestParam(value = "markup", required = false) String markup, - @RequestBody Log log) { + @RequestBody Log log, + @AuthenticationPrincipal Principal principal) { + + // In case a client sends a log record where the id does not match the path variable, return HTTP 400 (bad request) + if (!logId.equals(Long.toString(log.getId()))) { + throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Log entry id does not match path variable"); + } Optional foundLog = logRepository.findById(logId); if (foundLog.isPresent()) { Log persistedLog = foundLog.get(); - // In case a client sends a log record where the id does not match the path variable, return HTTP 400 (bad request) - if (!logId.equals(Long.toString(log.getId()))) { - throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Log entry id does not match path variable"); - } + logRepository.archive(persistedLog); + + persistedLog.setOwner(principal.getName()); persistedLog.setLevel(log.getLevel()); persistedLog.setProperties(log.getProperties()); persistedLog.setModifyDate(Instant.now()); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index a20d753..0999452 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -113,6 +113,9 @@ elasticsearch.log.index: olog_logs elasticsearch.sequence.index: olog_sequence +# Archive modified log entries +elasticsearch.log.archive.index: olog_archived_logs + ############################## Mongo gridfs client ############################### mongo.database:ologAttachments diff --git a/src/site/sphinx/index.rst b/src/site/sphinx/index.rst index 76bdf9b..d1e04ea 100644 --- a/src/site/sphinx/index.rst +++ b/src/site/sphinx/index.rst @@ -304,6 +304,29 @@ Find entries with at least one attachment of type 'image' **GET** https://localhost:8181/Olog/logs/search?attachments=image +Updating a Log Entry +******************** + +**POST** https://localhost:8181/Olog/logs/{logId} + +Update a log entry, the orginal log entry is archived in a seperate elastic index before any of the changes are applied. + +Note: the create date, attachments, and events cannot be modified. + +.. code-block:: json + + { + "owner":"log", + "description":"Beam Dump due to Major power dip Current Alarms Booster transmitter switched back to lower state. + New important info appended", + "level":"Info", + "title":"A new title", + "logbooks":[ + { + "name":"Operations" + } + ] + } Managing Logbooks & Tags ************************ diff --git a/src/test/java/org/phoebus/olog/LogRepositoryIT.java b/src/test/java/org/phoebus/olog/LogRepositoryIT.java index 6585677..1901479 100644 --- a/src/test/java/org/phoebus/olog/LogRepositoryIT.java +++ b/src/test/java/org/phoebus/olog/LogRepositoryIT.java @@ -3,6 +3,10 @@ import co.elastic.clients.elasticsearch.ElasticsearchClient; import co.elastic.clients.elasticsearch._types.Refresh; import co.elastic.clients.elasticsearch.core.DeleteRequest; +import co.elastic.clients.elasticsearch.core.ExistsRequest; +import co.elastic.clients.elasticsearch.core.GetRequest; +import co.elastic.clients.elasticsearch.core.GetResponse; +import co.elastic.clients.transport.endpoints.BooleanResponse; import com.mongodb.client.gridfs.GridFSBucket; import com.mongodb.client.gridfs.model.GridFSFile; @@ -48,6 +52,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.springframework.data.mongodb.core.query.Criteria.where; @ExtendWith(SpringExtension.class) @@ -98,17 +103,20 @@ public class LogRepositoryIT { private String ES_PROPERTY_TYPE; @Value("${elasticsearch.log.index:olog_logs}") private String ES_LOG_INDEX; + @Value("${elasticsearch.log.archive.index:olog_archived_logs}") + private String ES_LOG_ARCHIVE_INDEX; + @Value("${elasticsearch.log.type:olog_log}") private String ES_LOG_TYPE; - private static final String testOwner = "test-owner"; - private static final Logbook testLogbook = new Logbook("test-logbook-1", testOwner, State.Active); - private static final Tag testTag = new Tag("test-tag-1", State.Active); + private static final String TEST_OWNER = "test-owner"; + private static final Logbook TEST_LOGBOOK_1 = new Logbook("test-logbook-1", TEST_OWNER, State.Active); + private static final Tag TEST_TAG_1 = new Tag("test-tag-1", State.Active); - private static final Attribute attribute1 = new Attribute("test-attribute-1"); - private static final Attribute attribute2 = new Attribute("test-attribute-2"); - private static final Set attributes = new HashSet<>(List.of(attribute1, attribute2)); - private static final Property testProperty = new Property("test-property-1", testOwner, State.Active, attributes); + private static final Attribute ATTRIBUTE_1 = new Attribute("test-attribute-1"); + private static final Attribute ATTRIBUTE_2 = new Attribute("test-attribute-2"); + private static final Set ATTRIBUTES = new HashSet<>(List.of(ATTRIBUTE_1, ATTRIBUTE_2)); + private static final Property TEST_PROPERTY_1 = new Property("test-property-1", TEST_OWNER, State.Active, ATTRIBUTES); /** @@ -119,39 +127,39 @@ public class LogRepositoryIT { @Test public void createLog() throws IOException { try { - logbookRepository.save(testLogbook); - tagRepository.save(testTag); - propertyRepository.save(testProperty); + logbookRepository.save(TEST_LOGBOOK_1); + tagRepository.save(TEST_TAG_1); + propertyRepository.save(TEST_PROPERTY_1); // create a log entry with a logbook only - Log log1 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner).withLogbook(testLogbook).build(); + Log log1 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withLogbook(TEST_LOGBOOK_1).build(); Log createdLog1 = logRepository.save(log1); assertNotNull(createdLog1.getId(), "Failed to create a log entry with a valid id"); - assertTrue(createdLog1.getLogbooks().contains(testLogbook)); + assertTrue(createdLog1.getLogbooks().contains(TEST_LOGBOOK_1)); Log retrievedLog1 = logRepository.findById(String.valueOf(createdLog1.getId())).get(); assertNotNull(retrievedLog1.getId(), "Failed to create a log entry with a valid id"); - assertTrue(retrievedLog1.getLogbooks().contains(testLogbook)); + assertTrue(retrievedLog1.getLogbooks().contains(TEST_LOGBOOK_1)); - Log log2 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner).withTag(testTag) - .withLogbook(testLogbook).build(); + Log log2 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withTag(TEST_TAG_1) + .withLogbook(TEST_LOGBOOK_1).build(); Log createdLog2 = logRepository.save(log2); - assertTrue(createdLog2.getLogbooks().contains(testLogbook)); - assertTrue(createdLog2.getTags().contains(testTag)); + assertTrue(createdLog2.getLogbooks().contains(TEST_LOGBOOK_1)); + assertTrue(createdLog2.getTags().contains(TEST_TAG_1)); - Log log3 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner).withTag(testTag) - .withLogbook(testLogbook).withProperty(testProperty).build(); + Log log3 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withTag(TEST_TAG_1) + .withLogbook(TEST_LOGBOOK_1).withProperty(TEST_PROPERTY_1).build(); Log createdLog3 = logRepository.save(log3); - assertTrue(createdLog3.getLogbooks().contains(testLogbook)); - assertTrue(createdLog3.getTags().contains(testTag)); - assertTrue(createdLog3.getProperties().contains(testProperty)); + assertTrue(createdLog3.getLogbooks().contains(TEST_LOGBOOK_1)); + assertTrue(createdLog3.getTags().contains(TEST_TAG_1)); + assertTrue(createdLog3.getProperties().contains(TEST_PROPERTY_1)); client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(createdLog1.getId().toString()).refresh(Refresh.True))); client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(createdLog2.getId().toString()).refresh(Refresh.True))); client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(createdLog3.getId().toString()).refresh(Refresh.True))); } finally { - client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( testLogbook.getName()).refresh(Refresh.True))); - client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( testTag.getName()).refresh(Refresh.True))); - client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( testProperty.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( TEST_LOGBOOK_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( TEST_TAG_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( TEST_PROPERTY_1.getName()).refresh(Refresh.True))); } } @@ -164,15 +172,15 @@ public void createLog() throws IOException { @Test public void createLogWithEvents() throws IOException { try { - logbookRepository.save(testLogbook); - tagRepository.save(testTag); - propertyRepository.save(testProperty); + logbookRepository.save(TEST_LOGBOOK_1); + tagRepository.save(TEST_TAG_1); + propertyRepository.save(TEST_PROPERTY_1); List testEvents = List.of(new Event("now", Instant.ofEpochMilli(System.currentTimeMillis()))); // create a log entry with a logbook only Log log1 = Log.LogBuilder.createLog("This is a test entry") - .owner(testOwner) - .withLogbook(testLogbook) + .owner(TEST_OWNER) + .withLogbook(TEST_LOGBOOK_1) .withEvents(testEvents) .build(); Log createdLog1 = logRepository.save(log1); @@ -184,9 +192,9 @@ public void createLogWithEvents() throws IOException { assertTrue(retrievedLog1.getEvents().containsAll(testEvents)); client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(createdLog1.getId().toString()).refresh(Refresh.True))); } finally { - client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( testLogbook.getName()).refresh(Refresh.True))); - client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( testTag.getName()).refresh(Refresh.True))); - client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( testProperty.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( TEST_LOGBOOK_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( TEST_TAG_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( TEST_PROPERTY_1.getName()).refresh(Refresh.True))); } } @@ -197,9 +205,9 @@ public void createLogWithEvents() throws IOException { */ @Test public void createLogWithAttachment() throws IOException { - logbookRepository.save(testLogbook); - tagRepository.save(testTag); - propertyRepository.save(testProperty); + logbookRepository.save(TEST_LOGBOOK_1); + tagRepository.save(TEST_TAG_1); + propertyRepository.save(TEST_PROPERTY_1); try { File testFile = new File("src/test/resources/Tulips.jpg"); @@ -208,10 +216,10 @@ public void createLogWithAttachment() throws IOException { Attachment testAttachment = new Attachment(mock, "Tulips.jpg", ""); Log log = Log.LogBuilder.createLog("This is a test entry") - .owner(testOwner) - .withTag(testTag) - .withLogbook(testLogbook) - .withProperty(testProperty) + .owner(TEST_OWNER) + .withTag(TEST_TAG_1) + .withLogbook(TEST_LOGBOOK_1) + .withProperty(TEST_PROPERTY_1) .withAttachment(testAttachment) .build(); Log createdLog = logRepository.save(log); @@ -232,15 +240,11 @@ public void createLogWithAttachment() throws IOException { } }); - assertTrue(createdLog.getLogbooks().contains(testLogbook)); - assertTrue(createdLog.getTags().contains(testTag)); - assertTrue(createdLog.getProperties().contains(testProperty)); - client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(createdLog.getId().toString()).refresh(Refresh.True))); } finally { - client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( testLogbook.getName()).refresh(Refresh.True))); - client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( testTag.getName()).refresh(Refresh.True))); - client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( testProperty.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( TEST_LOGBOOK_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( TEST_TAG_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( TEST_PROPERTY_1.getName()).refresh(Refresh.True))); } } @@ -251,17 +255,17 @@ public void createLogWithAttachment() throws IOException { */ @Test public void createLogs(){ - logbookRepository.save(testLogbook); - tagRepository.save(testTag); - propertyRepository.save(testProperty); + logbookRepository.save(TEST_LOGBOOK_1); + tagRepository.save(TEST_TAG_1); + propertyRepository.save(TEST_PROPERTY_1); // create a log entry with a logbook only - Log log1 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner) - .withLogbook(testLogbook).build(); - Log log2 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner) - .withLogbook(testLogbook).withTag(testTag).build(); - Log log3 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner) - .withLogbook(testLogbook).withTag(testTag).withProperty(testProperty).build(); + Log log1 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER) + .withLogbook(TEST_LOGBOOK_1).build(); + Log log2 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER) + .withLogbook(TEST_LOGBOOK_1).withTag(TEST_TAG_1).build(); + Log log3 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER) + .withLogbook(TEST_LOGBOOK_1).withTag(TEST_TAG_1).withProperty(TEST_PROPERTY_1).build(); List createdLogs = new ArrayList<>(); logRepository.saveAll(List.of(log1, log2, log3)).forEach(createdLogs::add); @@ -277,13 +281,137 @@ public void createLogs(){ }); } + /** + * Test the archive of a simple test log + * + * @throws IOException + */ + @Test + public void archiveLog() throws IOException { + try { + logbookRepository.save(TEST_LOGBOOK_1); + tagRepository.save(TEST_TAG_1); + propertyRepository.save(TEST_PROPERTY_1); + + Log originalLog = Log.LogBuilder.createLog("This is a test entry") + .owner(TEST_OWNER) + .withTag(TEST_TAG_1) + .withLogbook(TEST_LOGBOOK_1) + .withProperty(TEST_PROPERTY_1).build(); + Log createdOriginalLog = logRepository.save(originalLog); + + Log archivedLog = logRepository.archive(createdOriginalLog); + + ExistsRequest existsRequest = ExistsRequest.of(e -> e.index(ES_LOG_ARCHIVE_INDEX).id(archivedLog.getId()+"_v1")); + BooleanResponse response = client.exists(existsRequest); + assertTrue(response.value() , "Failed to archive log entries."); + + GetResponse archivedResponse = client.get(GetRequest.of(g -> g.index(ES_LOG_ARCHIVE_INDEX).id(archivedLog.getId()+"_v1")), Log.class); + assertTrue(containsLogs(List.of(createdOriginalLog), List.of(archivedResponse.source())), "Archived log is same as the created log"); + + client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(createdOriginalLog.getId().toString()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_LOG_ARCHIVE_INDEX).id(archivedLog.getId()+"_v1").refresh(Refresh.True))); + } finally { + client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( TEST_LOGBOOK_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( TEST_TAG_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( TEST_PROPERTY_1.getName()).refresh(Refresh.True))); + } + + } + + /** + * Test the archiving of a simple test log + * + * @throws IOException + */ + @Test + public void archiveLogs() throws IOException { + try { + logbookRepository.save(TEST_LOGBOOK_1); + tagRepository.save(TEST_TAG_1); + propertyRepository.save(TEST_PROPERTY_1); + + Log originalLog = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withLogbook(TEST_LOGBOOK_1).build(); + + Log originalCreatedLog = logRepository.save(originalLog); + Log archivedLog = logRepository.archive(originalCreatedLog); + logRepository.update(originalCreatedLog); + Log updatedLog1 = logRepository.archive(originalCreatedLog); + logRepository.update(originalCreatedLog); + Log updatedLog2 = logRepository.archive(originalCreatedLog); + + // Archiving the same log entry multiple times should result in newer "ids" + // Check that the log entry has been archived before updating + String archiveLogId0 = originalCreatedLog.getId() +"_v1"; + String archiveLogId1 = originalCreatedLog.getId() +"_v2"; + String archiveLogId2 = originalCreatedLog.getId() +"_v3"; + + List expectedArchivedLogs = List.of(archiveLogId0, archiveLogId1, archiveLogId2); + expectedArchivedLogs.stream().forEach(expectedArchivedLog -> { + try { + assertTrue(client.exists(ExistsRequest.of(e -> e.index(ES_LOG_ARCHIVE_INDEX).id(expectedArchivedLog))).value() , "Failed to archive log entries."); + client.delete(DeleteRequest.of(d -> d.index(ES_LOG_ARCHIVE_INDEX).id(expectedArchivedLog).refresh(Refresh.True))); + } catch (IOException e) { + fail("updated log " + expectedArchivedLog + " was not archived" , e); + } + }); + + + client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(originalCreatedLog.getId().toString()).refresh(Refresh.True))); + } finally { + client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( TEST_LOGBOOK_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( TEST_TAG_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( TEST_PROPERTY_1.getName()).refresh(Refresh.True))); + } + + } + + + /** + * Test the updating of a simple test log + * + * @throws IOException + */ + @Test + public void updateLog() throws IOException { + try { + logbookRepository.save(TEST_LOGBOOK_1); + tagRepository.save(TEST_TAG_1); + propertyRepository.save(TEST_PROPERTY_1); + + Log originalLog = Log.LogBuilder.createLog("This is a test entry") + .owner(TEST_OWNER) + .withTag(TEST_TAG_1).build(); + Log originalCreatedLog = logRepository.save(originalLog); + + String updatedSource = "This is an updated test entry"; + originalLog.setId(originalCreatedLog.getId()); + originalLog.setCreatedDate(originalCreatedLog.getCreatedDate()); + originalLog.setSource(updatedSource); + originalLog.setTags(Set.of(TEST_TAG_1)); + originalLog.setProperties(Set.of(TEST_PROPERTY_1)); + + Log updatedLog = logRepository.update(originalLog); + + assertTrue(updatedLog.getSource().equals(updatedSource), "Failed to update the source"); + assertTrue(updatedLog.getTags().contains(TEST_TAG_1) && updatedLog.getProperties().contains(TEST_PROPERTY_1), "Failed to update with new Tags and Properties"); + + client.delete(DeleteRequest.of(d -> d.index(ES_LOG_INDEX).id(originalCreatedLog.getId().toString()).refresh(Refresh.True))); + } finally { + client.delete(DeleteRequest.of(d -> d.index(ES_LOGBOOK_INDEX).id( TEST_LOGBOOK_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_TAG_INDEX).id( TEST_TAG_1.getName()).refresh(Refresh.True))); + client.delete(DeleteRequest.of(d -> d.index(ES_PROPERTY_INDEX).id( TEST_PROPERTY_1.getName()).refresh(Refresh.True))); + } + + } + @Test public void checkLogExists() throws IOException { // check for non existing log entry assertFalse(logRepository.existsById("123456789"), "Failed to check non existance of log entry 123456789"); // check for an existing log entry - Log log = Log.LogBuilder.createLog("This is a test entry").owner(testOwner).withLogbook(testLogbook).build(); + Log log = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withLogbook(TEST_LOGBOOK_1).build(); Log createdLog = logRepository.save(log); assertNotNull(createdLog.getId(), "Failed to create a log entry with a valid id"); @@ -296,7 +424,7 @@ public void checkLogExists() throws IOException { @Test public void findLogsById() throws IOException { // check for an existing log entry - Log log = Log.LogBuilder.createLog("This is a test entry").owner(testOwner).withLogbook(testLogbook).build(); + Log log = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withLogbook(TEST_LOGBOOK_1).build(); Log createdLog = logRepository.save(log); assertNotNull(createdLog.getId(), "Failed to create a log entry with a valid id"); @@ -314,8 +442,8 @@ public void findLogsByNonExistingId() throws IOException { @Test public void findLogsByIds() throws IOException { // check for an existing log entry - Log log1 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner).withLogbook(testLogbook).build(); - Log log2 = Log.LogBuilder.createLog("This is a test entry").owner(testOwner).withLogbook(testLogbook).build(); + Log log1 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withLogbook(TEST_LOGBOOK_1).build(); + Log log2 = Log.LogBuilder.createLog("This is a test entry").owner(TEST_OWNER).withLogbook(TEST_LOGBOOK_1).build(); List createdLogs = StreamSupport.stream(logRepository.saveAll(List.of(log1, log2)).spliterator(), false).collect(Collectors.toList()); assertTrue( containsLogs(logRepository.findAllById(createdLogs.stream().map(log -> String.valueOf(log.getId())).collect(Collectors.toList())) diff --git a/src/test/resources/test_application.properties b/src/test/resources/test_application.properties index cda512f..68674cc 100644 --- a/src/test/resources/test_application.properties +++ b/src/test/resources/test_application.properties @@ -1,10 +1,13 @@ # the server port for the rest service server.port: 9900 +server.http.enable=true +server.http.port=8080 + # Disable the spring banner #spring.main.banner-mode=off -# suppress the logging from spring boot +# suppress the logging from spring boot # during debugging this should be set to DEBUG logging.level.root=INFO logging.level.org.springframework=INFO @@ -13,6 +16,11 @@ logging.level.org.apache.kafka=INFO spring.main.allow-bean-definition-overriding=true +server.ssl.key-store-type=PKCS12 +server.ssl.key-store=classpath:keystore/newcf.p12 +server.ssl.key-store-password=password +server.ssl.key-alias=cf + security.require-ssl=false ############################## Elastic Search ############################### @@ -49,7 +57,9 @@ mongo.port:27017 spring.profiles.active=ITtest - ldap.enabled = false embedded_ldap.enabled = false demo_auth.enabled = true + +########################## Archive modified log entries ########################## +elasticsearch.log.archive.index: test_olog_archived_logs