Ajay Yadav commited on
Commit
1c19d33
·
1 Parent(s): a568667

Initial deployment of da-autoarchival-dev

Browse files
Files changed (49) hide show
  1. Dockerfile +27 -0
  2. README.md +34 -6
  3. build.gradle.kts +21 -0
  4. src/main/docker/Dockerfile +23 -0
  5. src/main/docker/Dockerfile.alpine-jlink +43 -0
  6. src/main/docker/Dockerfile.layered +34 -0
  7. src/main/docker/Dockerfile.native +20 -0
  8. src/main/java/com/dalab/autoarchival/DaAutoarchivalApplication.java +35 -0
  9. src/main/java/com/dalab/autoarchival/controller/ArchivalConfigController.java +68 -0
  10. src/main/java/com/dalab/autoarchival/controller/ArchivalTaskController.java +147 -0
  11. src/main/java/com/dalab/autoarchival/dto/ArchivalConfigDTO.java +43 -0
  12. src/main/java/com/dalab/autoarchival/dto/ArchivalTaskListResponse.java +41 -0
  13. src/main/java/com/dalab/autoarchival/dto/ArchivalTaskRequest.java +56 -0
  14. src/main/java/com/dalab/autoarchival/dto/ArchivalTaskResponse.java +27 -0
  15. src/main/java/com/dalab/autoarchival/dto/ArchivalTaskStatusDTO.java +52 -0
  16. src/main/java/com/dalab/autoarchival/dto/RestoreRequestDTO.java +32 -0
  17. src/main/java/com/dalab/autoarchival/dto/RestoreResponseDTO.java +30 -0
  18. src/main/java/com/dalab/autoarchival/dto/TaskApprovalRequest.java +21 -0
  19. src/main/java/com/dalab/autoarchival/exception/ArchivalException.java +15 -0
  20. src/main/java/com/dalab/autoarchival/kafka/consumer/PolicyActionEventListener.java +58 -0
  21. src/main/java/com/dalab/autoarchival/mapper/ArchivalConfigMapper.java +25 -0
  22. src/main/java/com/dalab/autoarchival/mapper/ArchivalTaskMapper.java +29 -0
  23. src/main/java/com/dalab/autoarchival/model/ArchivalConfigEntity.java +41 -0
  24. src/main/java/com/dalab/autoarchival/model/ArchivalResult.java +88 -0
  25. src/main/java/com/dalab/autoarchival/model/ArchivalStatus.java +17 -0
  26. src/main/java/com/dalab/autoarchival/model/ArchivalTask.java +67 -0
  27. src/main/java/com/dalab/autoarchival/model/ArchivalTaskEntity.java +98 -0
  28. src/main/java/com/dalab/autoarchival/model/RestoreResult.java +66 -0
  29. src/main/java/com/dalab/autoarchival/provider/AwsS3GlacierArchivalProvider.java +43 -0
  30. src/main/java/com/dalab/autoarchival/provider/GcsArchiveStorageProvider.java +40 -0
  31. src/main/java/com/dalab/autoarchival/provider/ICloudArchivalProvider.java +50 -0
  32. src/main/java/com/dalab/autoarchival/repository/ArchivalConfigRepository.java +11 -0
  33. src/main/java/com/dalab/autoarchival/repository/ArchivalTaskRepository.java +10 -0
  34. src/main/java/com/dalab/autoarchival/service/IArchivalConfigService.java +18 -0
  35. src/main/java/com/dalab/autoarchival/service/IArchivalTaskService.java +78 -0
  36. src/main/java/com/dalab/autoarchival/service/impl/ArchivalConfigServiceImpl.java +76 -0
  37. src/main/java/com/dalab/autoarchival/service/impl/ArchivalTaskNotFoundException.java +11 -0
  38. src/main/java/com/dalab/autoarchival/service/impl/ArchivalTaskServiceImpl.java +225 -0
  39. src/main/java/com/dalab/autoarchival/service/storage/ArchivalResult.java +33 -0
  40. src/main/java/com/dalab/autoarchival/service/storage/CostSavingsEstimate.java +26 -0
  41. src/main/java/com/dalab/autoarchival/service/storage/ICloudStorageService.java +74 -0
  42. src/main/java/com/dalab/autoarchival/service/storage/OperationStatus.java +13 -0
  43. src/main/java/com/dalab/autoarchival/service/storage/RestorationResult.java +32 -0
  44. src/main/java/com/dalab/autoarchival/service/storage/impl/AwsS3StorageService.java +272 -0
  45. src/main/resources/application.properties +67 -0
  46. src/test/java/com/dalab/autoarchival/controller/ArchivalConfigControllerTest.java +99 -0
  47. src/test/java/com/dalab/autoarchival/controller/ArchivalTaskControllerTest.java +192 -0
  48. src/test/java/com/dalab/autoarchival/service/impl/ArchivalConfigServiceImplTest.java +103 -0
  49. src/test/java/com/dalab/autoarchival/service/impl/ArchivalTaskServiceImplTest.java +246 -0
Dockerfile ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM openjdk:21-jdk-slim
2
+
3
+ WORKDIR /app
4
+
5
+ # Install required packages
6
+ RUN apt-get update && apt-get install -y \
7
+ curl \
8
+ wget \
9
+ && rm -rf /var/lib/apt/lists/*
10
+
11
+ # Copy application files
12
+ COPY . .
13
+
14
+ # Build application (if build.gradle.kts exists)
15
+ RUN if [ -f "build.gradle.kts" ]; then \
16
+ ./gradlew build -x test; \
17
+ fi
18
+
19
+ # Expose port
20
+ EXPOSE 8080
21
+
22
+ # Health check
23
+ HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \
24
+ CMD curl -f http://localhost:8080/actuator/health || exit 1
25
+
26
+ # Run application
27
+ CMD ["java", "-jar", "build/libs/da-autoarchival.jar"]
README.md CHANGED
@@ -1,10 +1,38 @@
1
  ---
2
- title: Da Autoarchival Dev
3
- emoji: 📉
4
- colorFrom: purple
5
- colorTo: purple
6
  sdk: docker
7
- pinned: false
8
  ---
9
 
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: da-autoarchival (dev)
3
+ emoji: 🔧
4
+ colorFrom: blue
5
+ colorTo: green
6
  sdk: docker
7
+ app_port: 8080
8
  ---
9
 
10
+ # da-autoarchival - dev Environment
11
+
12
+ This is the da-autoarchival microservice deployed in the dev environment.
13
+
14
+ ## Features
15
+
16
+ - RESTful API endpoints
17
+ - Health monitoring via Actuator
18
+ - JWT authentication integration
19
+ - PostgreSQL database connectivity
20
+
21
+ ## API Documentation
22
+
23
+ Once deployed, API documentation will be available at:
24
+ - Swagger UI: https://huggingface.co/spaces/dalabsai/da-autoarchival-dev/swagger-ui.html
25
+ - Health Check: https://huggingface.co/spaces/dalabsai/da-autoarchival-dev/actuator/health
26
+
27
+ ## Environment
28
+
29
+ - **Environment**: dev
30
+ - **Port**: 8080
31
+ - **Java Version**: 21
32
+ - **Framework**: Spring Boot
33
+
34
+ ## Deployment
35
+
36
+ This service is automatically deployed via the DALab CI/CD pipeline.
37
+
38
+ Last updated: 2025-06-16 23:40:12
build.gradle.kts ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // da-autoarchival inherits common configuration from parent build.gradle.kts
2
+ // This build file adds autoarchival-specific dependencies
3
+
4
+ dependencies {
5
+ // da-protos common entities and utilities
6
+ implementation(project(":da-protos"))
7
+
8
+ // Cloud Storage SDKs
9
+ implementation("software.amazon.awssdk:s3:2.20.162")
10
+ implementation("software.amazon.awssdk:glacier:2.20.162")
11
+ implementation("com.azure:azure-storage-blob:12.23.0")
12
+ implementation("com.google.cloud:google-cloud-storage:2.28.0")
13
+
14
+ // Additional dependencies specific to da-autoarchival
15
+ implementation("org.springframework.cloud:spring-cloud-starter-openfeign:4.1.1")
16
+ }
17
+
18
+ // Configure main application class
19
+ configure<org.springframework.boot.gradle.dsl.SpringBootExtension> {
20
+ mainClass.set("com.dalab.autoarchival.DaAutoarchivalApplication")
21
+ }
src/main/docker/Dockerfile ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Ultra-lean container using Google Distroless
2
+ # Expected final size: ~120-180MB (minimal base + JRE + JAR only)
3
+
4
+ FROM gcr.io/distroless/java21-debian12:nonroot
5
+
6
+ # Set working directory
7
+ WORKDIR /app
8
+
9
+ # Copy JAR file
10
+ COPY build/libs/da-autoarchival.jar app.jar
11
+
12
+ # Expose standard Spring Boot port
13
+ EXPOSE 8080
14
+
15
+ # Run application (distroless has no shell, so use exec form)
16
+ ENTRYPOINT ["java", \
17
+ "-XX:+UseContainerSupport", \
18
+ "-XX:MaxRAMPercentage=75.0", \
19
+ "-XX:+UseG1GC", \
20
+ "-XX:+UseStringDeduplication", \
21
+ "-Djava.security.egd=file:/dev/./urandom", \
22
+ "-Dspring.backgroundpreinitializer.ignore=true", \
23
+ "-jar", "app.jar"]
src/main/docker/Dockerfile.alpine-jlink ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Ultra-minimal Alpine + Custom JRE
2
+ # Expected size: ~120-160MB
3
+
4
+ # Stage 1: Create custom JRE with only needed modules
5
+ FROM eclipse-temurin:21-jdk-alpine as jre-builder
6
+ WORKDIR /app
7
+
8
+ # Analyze JAR to find required modules
9
+ COPY build/libs/*.jar app.jar
10
+ RUN jdeps --ignore-missing-deps --print-module-deps app.jar > modules.txt
11
+
12
+ # Create minimal JRE with only required modules
13
+ RUN jlink \
14
+ --add-modules $(cat modules.txt),java.logging,java.xml,java.sql,java.naming,java.desktop,java.management,java.security.jgss,java.instrument \
15
+ --strip-debug \
16
+ --no-man-pages \
17
+ --no-header-files \
18
+ --compress=2 \
19
+ --output /custom-jre
20
+
21
+ # Stage 2: Production image
22
+ FROM alpine:3.19
23
+ RUN apk add --no-cache tzdata && \
24
+ addgroup -g 1001 -S appgroup && \
25
+ adduser -u 1001 -S appuser -G appgroup
26
+
27
+ # Copy custom JRE
28
+ COPY --from=jre-builder /custom-jre /opt/java
29
+ ENV JAVA_HOME=/opt/java
30
+ ENV PATH="$JAVA_HOME/bin:$PATH"
31
+
32
+ WORKDIR /app
33
+ COPY build/libs/*.jar app.jar
34
+ RUN chown appuser:appgroup app.jar
35
+
36
+ USER appuser
37
+ EXPOSE 8080
38
+
39
+ ENTRYPOINT ["java", \
40
+ "-XX:+UseContainerSupport", \
41
+ "-XX:MaxRAMPercentage=70.0", \
42
+ "-XX:+UseG1GC", \
43
+ "-jar", "app.jar"]
src/main/docker/Dockerfile.layered ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Ultra-optimized layered build using Distroless
2
+ # Expected size: ~180-220MB with better caching
3
+
4
+ FROM gcr.io/distroless/java21-debian12:nonroot as base
5
+
6
+ # Stage 1: Extract JAR layers for optimal caching
7
+ FROM eclipse-temurin:21-jdk-alpine as extractor
8
+ WORKDIR /app
9
+ COPY build/libs/*.jar app.jar
10
+ RUN java -Djarmode=layertools -jar app.jar extract
11
+
12
+ # Stage 2: Production image with extracted layers
13
+ FROM base
14
+ WORKDIR /app
15
+
16
+ # Copy layers in dependency order (best caching)
17
+ COPY --from=extractor /app/dependencies/ ./
18
+ COPY --from=extractor /app/spring-boot-loader/ ./
19
+ COPY --from=extractor /app/snapshot-dependencies/ ./
20
+ COPY --from=extractor /app/application/ ./
21
+
22
+ EXPOSE 8080
23
+
24
+ # Optimized JVM settings for micro-containers
25
+ ENTRYPOINT ["java", \
26
+ "-XX:+UseContainerSupport", \
27
+ "-XX:MaxRAMPercentage=70.0", \
28
+ "-XX:+UseG1GC", \
29
+ "-XX:+UseStringDeduplication", \
30
+ "-XX:+CompactStrings", \
31
+ "-Xshare:on", \
32
+ "-Djava.security.egd=file:/dev/./urandom", \
33
+ "-Dspring.backgroundpreinitializer.ignore=true", \
34
+ "org.springframework.boot.loader.JarLauncher"]
src/main/docker/Dockerfile.native ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # GraalVM Native Image - Ultra-fast startup, tiny size
2
+ # Expected size: ~50-80MB, startup <100ms
3
+ # Note: Requires native compilation support in Spring Boot
4
+
5
+ # Stage 1: Native compilation
6
+ FROM ghcr.io/graalvm/graalvm-ce:ol9-java21 as native-builder
7
+ WORKDIR /app
8
+
9
+ # Install native-image
10
+ RUN gu install native-image
11
+
12
+ # Copy source and build native executable
13
+ COPY . .
14
+ RUN ./gradlew nativeCompile
15
+
16
+ # Stage 2: Minimal runtime
17
+ FROM scratch
18
+ COPY --from=native-builder /app/build/native/nativeCompile/app /app
19
+ EXPOSE 8080
20
+ ENTRYPOINT ["/app"]
src/main/java/com/dalab/autoarchival/DaAutoarchivalApplication.java ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival;
2
+
3
+ import org.springframework.boot.SpringApplication;
4
+ import org.springframework.boot.autoconfigure.SpringBootApplication;
5
+ import org.springframework.scheduling.annotation.EnableAsync;
6
+ import org.springframework.security.config.annotation.method.configuration.EnableMethodSecurity;
7
+ import io.swagger.v3.oas.models.OpenAPI;
8
+ import io.swagger.v3.oas.models.info.Info;
9
+ import io.swagger.v3.oas.models.info.License;
10
+ import org.springframework.beans.factory.annotation.Value;
11
+ import org.springframework.context.annotation.Bean;
12
+
13
+ @SpringBootApplication
14
+ @EnableMethodSecurity // For @PreAuthorize annotations
15
+ @EnableAsync // If async operations are needed
16
+ // @EnableFeignClients // If this service will call other services via Feign
17
+ public class DaAutoarchivalApplication {
18
+
19
+ public static void main(String[] args) {
20
+ SpringApplication.run(DaAutoarchivalApplication.class, args);
21
+ }
22
+
23
+ @Bean
24
+ public OpenAPI customOpenAPI(@Value("${spring.application.name:DALab AutoArchival Service}") String appName,
25
+ @Value("${spring.application.description:API for AutoArchival Service}") String appDescription,
26
+ @Value("${spring.application.version:0.0.1-SNAPSHOT}") String appVersion) {
27
+ return new OpenAPI()
28
+ .info(new Info()
29
+ .title(appName)
30
+ .version(appVersion)
31
+ .description(appDescription)
32
+ .termsOfService("http://swagger.io/terms/") // Placeholder
33
+ .license(new License().name("Apache 2.0").url("http://springdoc.org"))); // Placeholder
34
+ }
35
+ }
src/main/java/com/dalab/autoarchival/controller/ArchivalConfigController.java ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.controller;
2
+
3
+ import org.springframework.http.ResponseEntity;
4
+ import org.springframework.security.access.prepost.PreAuthorize;
5
+ import org.springframework.web.bind.annotation.GetMapping;
6
+ import org.springframework.web.bind.annotation.PutMapping;
7
+ import org.springframework.web.bind.annotation.RequestBody;
8
+ import org.springframework.web.bind.annotation.RequestMapping;
9
+ import org.springframework.web.bind.annotation.RestController;
10
+
11
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
12
+ import com.dalab.autoarchival.service.IArchivalConfigService;
13
+
14
+ import jakarta.validation.Valid;
15
+ import lombok.RequiredArgsConstructor;
16
+ import lombok.extern.slf4j.Slf4j;
17
+
18
+ /**
19
+ * REST controller for managing archival configuration.
20
+ */
21
+ @RestController
22
+ @RequestMapping("/api/v1/archival/config")
23
+ @RequiredArgsConstructor
24
+ @Slf4j
25
+ public class ArchivalConfigController {
26
+
27
+ private final IArchivalConfigService archivalConfigService;
28
+
29
+ /**
30
+ * Retrieves the current archival configuration.
31
+ * Requires ADMIN or DATA_STEWARD role.
32
+ *
33
+ * @return ResponseEntity with current ArchivalConfigDTO.
34
+ */
35
+ @GetMapping
36
+ @PreAuthorize("hasAnyAuthority('ROLE_ADMIN', 'ROLE_DATA_STEWARD')")
37
+ public ResponseEntity<ArchivalConfigDTO> getArchivalConfig() {
38
+ log.info("Request to retrieve archival configuration");
39
+ try {
40
+ ArchivalConfigDTO config = archivalConfigService.getArchivalConfig();
41
+ return ResponseEntity.ok(config);
42
+ } catch (Exception e) {
43
+ log.error("Error retrieving archival configuration: {}", e.getMessage(), e);
44
+ return ResponseEntity.internalServerError().build();
45
+ }
46
+ }
47
+
48
+ /**
49
+ * Updates the archival configuration.
50
+ * Requires ADMIN role.
51
+ *
52
+ * @param configDTO The new archival configuration.
53
+ * @return ResponseEntity with updated configuration.
54
+ */
55
+ @PutMapping
56
+ @PreAuthorize("hasAuthority('ROLE_ADMIN')")
57
+ public ResponseEntity<ArchivalConfigDTO> updateArchivalConfig(@Valid @RequestBody ArchivalConfigDTO configDTO) {
58
+ log.info("Request to update archival configuration: {}", configDTO);
59
+ try {
60
+ archivalConfigService.updateArchivalConfig(configDTO);
61
+ ArchivalConfigDTO updatedConfig = archivalConfigService.getArchivalConfig();
62
+ return ResponseEntity.ok(updatedConfig);
63
+ } catch (Exception e) {
64
+ log.error("Error updating archival configuration: {}", e.getMessage(), e);
65
+ return ResponseEntity.internalServerError().build();
66
+ }
67
+ }
68
+ }
src/main/java/com/dalab/autoarchival/controller/ArchivalTaskController.java ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.controller;
2
+
3
+ import org.springframework.data.domain.Pageable;
4
+ import org.springframework.data.web.PageableDefault;
5
+ import org.springframework.http.HttpStatus;
6
+ import org.springframework.http.ResponseEntity;
7
+ import org.springframework.security.access.prepost.PreAuthorize;
8
+ import org.springframework.web.bind.annotation.GetMapping;
9
+ import org.springframework.web.bind.annotation.PathVariable;
10
+ import org.springframework.web.bind.annotation.PostMapping;
11
+ import org.springframework.web.bind.annotation.RequestBody;
12
+ import org.springframework.web.bind.annotation.RequestMapping;
13
+ import org.springframework.web.bind.annotation.RestController;
14
+
15
+ import com.dalab.autoarchival.dto.ArchivalTaskListResponse;
16
+ import com.dalab.autoarchival.dto.ArchivalTaskRequest;
17
+ import com.dalab.autoarchival.dto.ArchivalTaskResponse;
18
+ import com.dalab.autoarchival.dto.ArchivalTaskStatusDTO;
19
+ import com.dalab.autoarchival.dto.TaskApprovalRequest;
20
+ import com.dalab.autoarchival.service.IArchivalTaskService;
21
+
22
+ import jakarta.validation.Valid;
23
+ import lombok.RequiredArgsConstructor;
24
+ import lombok.extern.slf4j.Slf4j;
25
+
26
+ @RestController
27
+ @RequestMapping("/api/v1/archival/tasks")
28
+ @RequiredArgsConstructor
29
+ @Slf4j
30
+ public class ArchivalTaskController {
31
+
32
+ private final IArchivalTaskService archivalTaskService;
33
+
34
+ /**
35
+ * Submits a new archival task for specified assets.
36
+ * Requires DATA_STEWARD or ADMIN role.
37
+ *
38
+ * @param taskRequest The archival task request details.
39
+ * @return ResponseEntity with ArchivalTaskResponse indicating task submission status.
40
+ */
41
+ @PostMapping
42
+ @PreAuthorize("hasAnyAuthority('ROLE_ADMIN', 'ROLE_DATA_STEWARD')")
43
+ public ResponseEntity<ArchivalTaskResponse> submitArchivalTask(@Valid @RequestBody ArchivalTaskRequest taskRequest) {
44
+ log.info("Received request to submit archival task: {}", taskRequest.getTaskName());
45
+ try {
46
+ ArchivalTaskResponse response = archivalTaskService.submitArchivalTask(taskRequest);
47
+ if ("FAILED_VALIDATION".equals(response.getStatus())) {
48
+ return ResponseEntity.badRequest().body(response);
49
+ }
50
+ return ResponseEntity.status(HttpStatus.ACCEPTED).body(response);
51
+ } catch (Exception e) {
52
+ log.error("Unexpected error submitting archival task '{}': {}", taskRequest.getTaskName(), e.getMessage(), e);
53
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
54
+ .body(ArchivalTaskResponse.builder()
55
+ .taskName(taskRequest.getTaskName())
56
+ .status("ERROR")
57
+ .message("An unexpected error occurred while submitting the archival task.")
58
+ .submittedAt(java.time.LocalDateTime.now())
59
+ .build());
60
+ }
61
+ }
62
+
63
+ /**
64
+ * Retrieves the status of a specific archival task.
65
+ * Requires DATA_STEWARD, ADMIN, or USER role (user who submitted, or involved).
66
+ *
67
+ * @param taskId The ID of the archival task.
68
+ * @return ResponseEntity with ArchivalTaskStatusDTO or Not Found.
69
+ */
70
+ @GetMapping("/{taskId}")
71
+ @PreAuthorize("hasAnyAuthority('ROLE_ADMIN', 'ROLE_DATA_STEWARD', 'ROLE_USER')") // User role may need further refinement (e.g., task owner)
72
+ public ResponseEntity<ArchivalTaskStatusDTO> getArchivalTaskStatus(@PathVariable String taskId) {
73
+ log.info("Request to retrieve status for archival task ID: {}", taskId);
74
+ ArchivalTaskStatusDTO statusDTO = archivalTaskService.getTaskStatus(taskId);
75
+ if (statusDTO != null) {
76
+ return ResponseEntity.ok(statusDTO);
77
+ } else {
78
+ log.warn("Archival task with ID '{}' not found.", taskId);
79
+ return ResponseEntity.notFound().build();
80
+ }
81
+ }
82
+
83
+ /**
84
+ * Lists all archival tasks with pagination.
85
+ * Requires DATA_STEWARD, ADMIN, or USER role.
86
+ *
87
+ * @param pageable Pagination information.
88
+ * @return ResponseEntity with ArchivalTaskListResponse.
89
+ */
90
+ @GetMapping
91
+ @PreAuthorize("hasAnyAuthority('ROLE_ADMIN', 'ROLE_DATA_STEWARD', 'ROLE_USER')")
92
+ public ResponseEntity<ArchivalTaskListResponse> listArchivalTasks(@PageableDefault(size = 20) Pageable pageable) {
93
+ log.info("Request to list archival tasks. Pagination: {}", pageable);
94
+ try {
95
+ ArchivalTaskListResponse response = archivalTaskService.listTasks(pageable);
96
+ return ResponseEntity.ok(response);
97
+ } catch (Exception e) {
98
+ log.error("Error listing archival tasks: {}", e.getMessage(), e);
99
+ // Consider a more specific error response DTO if needed
100
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
101
+ }
102
+ }
103
+
104
+ /**
105
+ * Approves a pending archival task.
106
+ * Requires ADMIN or designated approver role.
107
+ */
108
+ @PostMapping("/{taskId}/approve")
109
+ @PreAuthorize("hasAuthority('ROLE_ADMIN')") // Or a more specific approver role
110
+ public ResponseEntity<ArchivalTaskStatusDTO> approveArchivalTask(@PathVariable String taskId,
111
+ @RequestBody(required = false) TaskApprovalRequest approvalRequest) {
112
+ log.info("Request to approve archival task ID: {}. Comments: {}", taskId, approvalRequest != null ? approvalRequest.getComments() : "N/A");
113
+ ArchivalTaskStatusDTO updatedStatus = archivalTaskService.approveTask(taskId, approvalRequest);
114
+ if (updatedStatus == null) {
115
+ return ResponseEntity.notFound().build();
116
+ }
117
+ if ("APPROVED".equals(updatedStatus.getStatus()) || "SUBMITTED".equals(updatedStatus.getStatus())) { // Or other success states
118
+ return ResponseEntity.ok(updatedStatus);
119
+ } else {
120
+ // Task might not have been in an approvable state
121
+ return ResponseEntity.status(HttpStatus.CONFLICT).body(updatedStatus);
122
+ }
123
+ }
124
+
125
+ /**
126
+ * Rejects a pending archival task.
127
+ * Requires ADMIN or designated approver role.
128
+ */
129
+ @PostMapping("/{taskId}/reject")
130
+ @PreAuthorize("hasAuthority('ROLE_ADMIN')") // Or a more specific approver role
131
+ public ResponseEntity<ArchivalTaskStatusDTO> rejectArchivalTask(@PathVariable String taskId,
132
+ @RequestBody(required = false) TaskApprovalRequest rejectionRequest) {
133
+ log.info("Request to reject archival task ID: {}. Comments: {}", taskId, rejectionRequest != null ? rejectionRequest.getComments() : "N/A");
134
+ ArchivalTaskStatusDTO updatedStatus = archivalTaskService.rejectTask(taskId, rejectionRequest);
135
+ if (updatedStatus == null) {
136
+ return ResponseEntity.notFound().build();
137
+ }
138
+ if ("REJECTED".equals(updatedStatus.getStatus())) {
139
+ return ResponseEntity.ok(updatedStatus);
140
+ } else {
141
+ // Task might not have been in a rejectable state
142
+ return ResponseEntity.status(HttpStatus.CONFLICT).body(updatedStatus);
143
+ }
144
+ }
145
+
146
+ // POST /tasks/{taskId}/restore to be added later
147
+ }
src/main/java/com/dalab/autoarchival/dto/ArchivalConfigDTO.java ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import lombok.Builder;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+
8
+ import java.util.List;
9
+ import java.util.Map;
10
+
11
+ /**
12
+ * DTO for representing the auto-archival configuration.
13
+ */
14
+ @Data
15
+ @Builder
16
+ @NoArgsConstructor
17
+ @AllArgsConstructor
18
+ public class ArchivalConfigDTO {
19
+
20
+ private Boolean enabled; // Global enable/disable for auto-archival
21
+ private String defaultArchivalTier; // e.g., "S3_GLACIER_DEEP_ARCHIVE", "AZURE_ARCHIVE"
22
+ private Long defaultRetentionDays; // Default retention before data is eligible for permanent deletion after archival
23
+ private boolean requireApprovalForArchival; // If manual approval is needed before archival
24
+ private boolean requireApprovalForRestore; // If manual approval is needed for restoration
25
+
26
+ // Potentially add provider-specific configurations or rules here if needed
27
+ // For example, specific storage account details or lifecycle policy names
28
+ private Map<String, ProviderArchivalConfig> providerConfigs;
29
+
30
+ // Represents archival settings specific to a cloud provider
31
+ @Data
32
+ @Builder
33
+ @NoArgsConstructor
34
+ @AllArgsConstructor
35
+ public static class ProviderArchivalConfig {
36
+ private String archivalStorageLocation; // e.g., bucket name, container name
37
+ private String defaultRegion; // if applicable
38
+ private Map<String, String> customProperties; // for provider-specific settings
39
+ }
40
+
41
+ // Could also include a list of rules directly, or link to policies in da-policyengine
42
+ // For simplicity, starting with general config. Rules might be handled by Policy Engine.
43
+ }
src/main/java/com/dalab/autoarchival/dto/ArchivalTaskListResponse.java ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import lombok.Builder;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+ import org.springframework.data.domain.PageImpl;
8
+ import org.springframework.data.domain.Pageable;
9
+
10
+ import java.util.List;
11
+
12
+ /**
13
+ * DTO for a paginated list of archival tasks.
14
+ */
15
+ @Data
16
+ @Builder
17
+ @NoArgsConstructor
18
+ @AllArgsConstructor
19
+ public class ArchivalTaskListResponse {
20
+ private List<ArchivalTaskStatusDTO> tasks; // Using full status DTO for now
21
+ private int pageNumber;
22
+ private int pageSize;
23
+ private long totalElements;
24
+ private int totalPages;
25
+ private boolean last;
26
+ private boolean first;
27
+ private int numberOfElements;
28
+
29
+ public static ArchivalTaskListResponse fromPage(PageImpl<ArchivalTaskStatusDTO> page) {
30
+ return ArchivalTaskListResponse.builder()
31
+ .tasks(page.getContent())
32
+ .pageNumber(page.getNumber())
33
+ .pageSize(page.getSize())
34
+ .totalElements(page.getTotalElements())
35
+ .totalPages(page.getTotalPages())
36
+ .last(page.isLast())
37
+ .first(page.isFirst())
38
+ .numberOfElements(page.getNumberOfElements())
39
+ .build();
40
+ }
41
+ }
src/main/java/com/dalab/autoarchival/dto/ArchivalTaskRequest.java ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import lombok.Builder;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+
8
+ import jakarta.validation.Valid;
9
+ import jakarta.validation.constraints.NotEmpty;
10
+ import jakarta.validation.constraints.NotNull;
11
+ import java.util.List;
12
+ import java.util.Map;
13
+
14
+ /**
15
+ * DTO for requesting an archival task.
16
+ */
17
+ @Data
18
+ @Builder
19
+ @NoArgsConstructor
20
+ @AllArgsConstructor
21
+ public class ArchivalTaskRequest {
22
+
23
+ private String taskName; // Optional user-defined name for the task
24
+
25
+ @NotNull(message = "Archival scope cannot be null")
26
+ @Valid
27
+ private ArchivalScope scope;
28
+
29
+ // Optional: Allow overriding some global/provider config for this specific task
30
+ private ArchivalConfigOverride overrideConfig;
31
+
32
+ private String triggeredBy; // User or system that triggered the task
33
+ private String justification; // Optional justification for the archival
34
+
35
+ @Data
36
+ @Builder
37
+ @NoArgsConstructor
38
+ @AllArgsConstructor
39
+ public static class ArchivalScope {
40
+ // Define how assets are specified, e.g., by IDs, by query, by tags
41
+ @NotEmpty(message = "At least one asset ID or a query must be provided.") // Example validation
42
+ private List<String> assetIds; // Direct list of asset IDs from the catalog
43
+ // private String assetQuery; // Or a query to select assets (e.g., SQL-like or from da-catalog search API)
44
+ // private Map<String, String> assetTags; // Or assets matching specific tags
45
+ }
46
+
47
+ @Data
48
+ @Builder
49
+ @NoArgsConstructor
50
+ @AllArgsConstructor
51
+ public static class ArchivalConfigOverride {
52
+ private String targetArchivalTier; // Override default tier
53
+ private Long retentionDays; // Override default retention
54
+ // Potentially other specific overrides
55
+ }
56
+ }
src/main/java/com/dalab/autoarchival/dto/ArchivalTaskResponse.java ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import lombok.Builder;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+
8
+ import java.time.LocalDateTime;
9
+
10
+ /**
11
+ * DTO for archival task submission response.
12
+ */
13
+ @Data
14
+ @Builder
15
+ @NoArgsConstructor
16
+ @AllArgsConstructor
17
+ public class ArchivalTaskResponse {
18
+
19
+ private String taskId;
20
+ private String taskName;
21
+ private String status;
22
+ private String message;
23
+ private LocalDateTime submittedAt;
24
+ private String submittedBy;
25
+ private Integer estimatedProcessingTimeMinutes;
26
+
27
+ }
src/main/java/com/dalab/autoarchival/dto/ArchivalTaskStatusDTO.java ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import lombok.Builder;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+
8
+ import java.time.LocalDateTime;
9
+ import java.util.List;
10
+ import java.util.Map;
11
+
12
+ /**
13
+ * DTO for representing the detailed status of an archival task.
14
+ */
15
+ @Data
16
+ @Builder
17
+ @NoArgsConstructor
18
+ @AllArgsConstructor
19
+ public class ArchivalTaskStatusDTO {
20
+ private String taskId;
21
+ private String taskName;
22
+ private String status; // e.g., SUBMITTED, PENDING_APPROVAL, IN_PROGRESS, COMPLETED, FAILED, PARTIALLY_COMPLETED
23
+ private LocalDateTime submittedAt;
24
+ private LocalDateTime startedAt;
25
+ private LocalDateTime completedAt;
26
+ private String triggeredBy;
27
+ private String justification;
28
+ private ArchivalTaskRequest.ArchivalScope scope; // The original scope
29
+ private ArchivalTaskRequest.ArchivalConfigOverride overrideConfig; // Original overrides
30
+
31
+ private int totalAssetsInScope;
32
+ private int assetsArchivedSuccessfully;
33
+ private int assetsFailedToArchive;
34
+ private int assetsPendingArchival;
35
+
36
+ private List<AssetArchivalStatus> assetStatuses; // Status for each asset in the task
37
+ private List<String> errorMessages; // Overall task error messages
38
+
39
+ @Data
40
+ @Builder
41
+ @NoArgsConstructor
42
+ @AllArgsConstructor
43
+ public static class AssetArchivalStatus {
44
+ private String assetId;
45
+ private String assetName; // Optional, might need to be fetched from catalog
46
+ private String status; // PENDING, IN_PROGRESS, ARCHIVED, FAILED, RESTORED
47
+ private String archivalLocation; // e.g., S3 URI, Azure Blob URI post-archival
48
+ private String errorMessage; // If archival failed for this asset
49
+ private LocalDateTime archivedAt;
50
+ private LocalDateTime restoredAt;
51
+ }
52
+ }
src/main/java/com/dalab/autoarchival/dto/RestoreRequestDTO.java ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import java.util.List;
4
+
5
+ import jakarta.validation.constraints.NotEmpty;
6
+ import jakarta.validation.constraints.NotNull;
7
+ import lombok.AllArgsConstructor;
8
+ import lombok.Builder;
9
+ import lombok.Data;
10
+ import lombok.NoArgsConstructor;
11
+
12
+ /**
13
+ * DTO for restore request from archived storage.
14
+ */
15
+ @Data
16
+ @Builder
17
+ @NoArgsConstructor
18
+ @AllArgsConstructor
19
+ public class RestoreRequestDTO {
20
+
21
+ @NotNull
22
+ @NotEmpty
23
+ private List<String> assetIds;
24
+
25
+ private String requestedBy;
26
+ private String reason;
27
+ private String targetLocation;
28
+ private Integer retentionDays; // How long to keep restored data accessible
29
+ private Boolean priorityRestore; // Fast restore vs standard
30
+ private String restoreTier; // Added field expected by service
31
+
32
+ }
src/main/java/com/dalab/autoarchival/dto/RestoreResponseDTO.java ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import java.time.LocalDateTime;
4
+ import java.util.List;
5
+
6
+ import lombok.AllArgsConstructor;
7
+ import lombok.Builder;
8
+ import lombok.Data;
9
+ import lombok.NoArgsConstructor;
10
+
11
+ /**
12
+ * DTO for restore operation response.
13
+ */
14
+ @Data
15
+ @Builder
16
+ @NoArgsConstructor
17
+ @AllArgsConstructor
18
+ public class RestoreResponseDTO {
19
+
20
+ private String restoreTaskId;
21
+ private String archivalTaskId;
22
+ private String status; // SUBMITTED, IN_PROGRESS, COMPLETED, FAILED
23
+ private String message;
24
+ private List<String> assetIds;
25
+ private LocalDateTime requestedAt;
26
+ private LocalDateTime estimatedCompletionAt;
27
+ private String targetLocation;
28
+ private Integer retentionDays;
29
+
30
+ }
src/main/java/com/dalab/autoarchival/dto/TaskApprovalRequest.java ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.dto;
2
+
3
+ import lombok.Builder;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+
8
+ import jakarta.validation.constraints.Size;
9
+
10
+ /**
11
+ * DTO for providing comments during task approval or rejection.
12
+ */
13
+ @Data
14
+ @Builder
15
+ @NoArgsConstructor
16
+ @AllArgsConstructor
17
+ public class TaskApprovalRequest {
18
+ @Size(max = 1000, message = "Comments cannot exceed 1000 characters")
19
+ private String comments;
20
+ // Add approver/rejector userID if needed, though it can be derived from security context
21
+ }
src/main/java/com/dalab/autoarchival/exception/ArchivalException.java ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.exception;
2
+
3
+ /**
4
+ * Custom exception for errors occurring during archival or restore operations.
5
+ */
6
+ public class ArchivalException extends Exception {
7
+
8
+ public ArchivalException(String message) {
9
+ super(message);
10
+ }
11
+
12
+ public ArchivalException(String message, Throwable cause) {
13
+ super(message, cause);
14
+ }
15
+ }
src/main/java/com/dalab/autoarchival/kafka/consumer/PolicyActionEventListener.java ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.kafka.consumer;
2
+
3
+ import java.util.Map;
4
+
5
+ import org.slf4j.Logger;
6
+ import org.slf4j.LoggerFactory;
7
+ import org.springframework.kafka.annotation.KafkaListener;
8
+ import org.springframework.kafka.support.KafkaHeaders;
9
+ import org.springframework.messaging.handler.annotation.Header;
10
+ import org.springframework.messaging.handler.annotation.Payload;
11
+ import org.springframework.stereotype.Component;
12
+
13
+ import com.dalab.autoarchival.service.IArchivalTaskService;
14
+
15
+ @Component
16
+ public class PolicyActionEventListener {
17
+
18
+ private static final Logger LOGGER = LoggerFactory.getLogger(PolicyActionEventListener.class);
19
+
20
+ private final IArchivalTaskService archivalTaskService;
21
+
22
+ public PolicyActionEventListener(IArchivalTaskService archivalTaskService) {
23
+ this.archivalTaskService = archivalTaskService;
24
+ }
25
+
26
+ @KafkaListener(
27
+ topics = "${dalab.kafka.topics.policy-actions}",
28
+ groupId = "${spring.kafka.consumer.group-id}"
29
+ )
30
+ public void handlePolicyActionEvent(
31
+ @Payload Map<String, Object> event,
32
+ @Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
33
+ @Header(KafkaHeaders.RECEIVED_PARTITION) int partition,
34
+ @Header(KafkaHeaders.OFFSET) long offset
35
+ ) {
36
+ String eventId = (String) event.get("eventId");
37
+ String policyId = (String) event.get("policyId");
38
+ String actionType = (String) event.get("actionType");
39
+
40
+ LOGGER.info(
41
+ "Received PolicyActionEvent on topic: {}, partition: {}, offset: {}. Event ID: {}, Policy ID: {}, Action: {}",
42
+ topic, partition, offset, eventId, policyId, actionType
43
+ );
44
+
45
+ // Filter for archival-related actions
46
+ if ("ARCHIVE".equals(actionType) || "AUTO_ARCHIVE".equals(actionType)) {
47
+ try {
48
+ archivalTaskService.processArchivalAction(event);
49
+ LOGGER.debug("Successfully triggered archival processing for eventId: {}", eventId);
50
+ } catch (Exception e) {
51
+ LOGGER.error("Error processing PolicyActionEvent for archival (eventId: {}): {}", eventId, e.getMessage(), e);
52
+ // TODO: Implement proper error handling and dead-letter queue (DLQ) strategy
53
+ }
54
+ } else {
55
+ LOGGER.debug("Skipping PolicyActionEvent with action type: {} for eventId: {}", actionType, eventId);
56
+ }
57
+ }
58
+ }
src/main/java/com/dalab/autoarchival/mapper/ArchivalConfigMapper.java ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.mapper;
2
+
3
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
4
+ import com.dalab.autoarchival.model.ArchivalConfigEntity;
5
+ import org.mapstruct.Mapper;
6
+ import org.mapstruct.MappingTarget;
7
+ import org.mapstruct.factory.Mappers;
8
+
9
+ /**
10
+ * Mapper for converting between ArchivalConfigEntity and ArchivalConfigDTO.
11
+ */
12
+ @Mapper
13
+ public interface ArchivalConfigMapper {
14
+
15
+ ArchivalConfigMapper INSTANCE = Mappers.getMapper(ArchivalConfigMapper.class);
16
+
17
+ // Methods expected by the service implementation
18
+ ArchivalConfigDTO toDto(ArchivalConfigEntity entity);
19
+ void updateEntityFromDto(ArchivalConfigDTO dto, @MappingTarget ArchivalConfigEntity entity);
20
+
21
+ // Original methods
22
+ ArchivalConfigDTO toDTO(ArchivalConfigEntity entity);
23
+ ArchivalConfigEntity toEntity(ArchivalConfigDTO dto);
24
+
25
+ }
src/main/java/com/dalab/autoarchival/mapper/ArchivalTaskMapper.java ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.mapper;
2
+
3
+ import com.dalab.autoarchival.dto.ArchivalTaskStatusDTO;
4
+ import com.dalab.autoarchival.dto.ArchivalTaskResponse;
5
+ import com.dalab.autoarchival.dto.ArchivalTaskRequest;
6
+ import com.dalab.autoarchival.dto.RestoreRequestDTO;
7
+ import com.dalab.autoarchival.model.ArchivalTaskEntity;
8
+ import org.mapstruct.Mapper;
9
+ import org.mapstruct.factory.Mappers;
10
+
11
+ /**
12
+ * Mapper for converting between ArchivalTaskEntity and DTOs.
13
+ */
14
+ @Mapper
15
+ public interface ArchivalTaskMapper {
16
+
17
+ ArchivalTaskMapper INSTANCE = Mappers.getMapper(ArchivalTaskMapper.class);
18
+
19
+ // Methods expected by the service implementation
20
+ ArchivalTaskEntity requestToEntity(ArchivalTaskRequest request);
21
+ ArchivalTaskResponse entityToTaskResponse(ArchivalTaskEntity entity);
22
+ ArchivalTaskStatusDTO entityToStatusDTO(ArchivalTaskEntity entity);
23
+ ArchivalTaskEntity.RestoreRequestData restoreRequestDtoToData(RestoreRequestDTO dto);
24
+
25
+ // Original methods
26
+ ArchivalTaskStatusDTO toStatusDTO(ArchivalTaskEntity entity);
27
+ ArchivalTaskResponse toResponseDTO(ArchivalTaskEntity entity);
28
+
29
+ }
src/main/java/com/dalab/autoarchival/model/ArchivalConfigEntity.java ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.model;
2
+
3
+ import jakarta.persistence.*;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+ import org.hibernate.annotations.JdbcTypeCode;
8
+ import org.hibernate.type.SqlTypes;
9
+
10
+ import java.util.Map;
11
+
12
+ @Entity
13
+ @Table(name = "dalab_archival_config")
14
+ @Data
15
+ @NoArgsConstructor
16
+ @AllArgsConstructor
17
+ public class ArchivalConfigEntity {
18
+
19
+ @Id
20
+ private Long id; // Use a fixed ID, e.g., 1L, as it's a global config
21
+
22
+ private boolean enabled;
23
+ private String defaultArchivalTier;
24
+ private Long defaultRetentionDays;
25
+ private boolean requireApprovalForArchival;
26
+ private boolean requireApprovalForRestore;
27
+
28
+ @JdbcTypeCode(SqlTypes.JSON)
29
+ @Column(columnDefinition = "jsonb")
30
+ private Map<String, ProviderConfigData> providerConfigs; // Store as JSON
31
+
32
+ // Static inner class for provider config data to be serialized to JSON
33
+ @Data
34
+ @NoArgsConstructor
35
+ @AllArgsConstructor
36
+ public static class ProviderConfigData {
37
+ private String archivalStorageLocation;
38
+ private String defaultRegion;
39
+ private Map<String, String> customProperties;
40
+ }
41
+ }
src/main/java/com/dalab/autoarchival/model/ArchivalResult.java ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.model;
2
+
3
+ /**
4
+ * DTO to hold the results of an archival operation.
5
+ */
6
+ public class ArchivalResult {
7
+
8
+ private String archiveId; // Unique identifier for the created archive (e.g., S3 Object Version ID, Glacier Archive ID).
9
+ private String storageLocation; // URI or path to the archived object (e.g., s3://bucket/key, gs://bucket/object).
10
+ private long sizeBytes; // Size of the archived data in bytes.
11
+ private String checksum; // Checksum (e.g., MD5, SHA256) of the archived data, if available.
12
+ private String storageClass; // Actual storage class used (e.g., GLACIER, DEEP_ARCHIVE).
13
+ private java.time.OffsetDateTime archivalTimestamp; // Timestamp when the archival was completed.
14
+
15
+ // Constructors
16
+ public ArchivalResult() {
17
+ }
18
+
19
+ public ArchivalResult(String archiveId, String storageLocation, long sizeBytes, String checksum, String storageClass, java.time.OffsetDateTime archivalTimestamp) {
20
+ this.archiveId = archiveId;
21
+ this.storageLocation = storageLocation;
22
+ this.sizeBytes = sizeBytes;
23
+ this.checksum = checksum;
24
+ this.storageClass = storageClass;
25
+ this.archivalTimestamp = archivalTimestamp;
26
+ }
27
+
28
+ // Getters and Setters
29
+ public String getArchiveId() {
30
+ return archiveId;
31
+ }
32
+
33
+ public void setArchiveId(String archiveId) {
34
+ this.archiveId = archiveId;
35
+ }
36
+
37
+ public String getStorageLocation() {
38
+ return storageLocation;
39
+ }
40
+
41
+ public void setStorageLocation(String storageLocation) {
42
+ this.storageLocation = storageLocation;
43
+ }
44
+
45
+ public long getSizeBytes() {
46
+ return sizeBytes;
47
+ }
48
+
49
+ public void setSizeBytes(long sizeBytes) {
50
+ this.sizeBytes = sizeBytes;
51
+ }
52
+
53
+ public String getChecksum() {
54
+ return checksum;
55
+ }
56
+
57
+ public void setChecksum(String checksum) {
58
+ this.checksum = checksum;
59
+ }
60
+
61
+ public String getStorageClass() {
62
+ return storageClass;
63
+ }
64
+
65
+ public void setStorageClass(String storageClass) {
66
+ this.storageClass = storageClass;
67
+ }
68
+
69
+ public java.time.OffsetDateTime getArchivalTimestamp() {
70
+ return archivalTimestamp;
71
+ }
72
+
73
+ public void setArchivalTimestamp(java.time.OffsetDateTime archivalTimestamp) {
74
+ this.archivalTimestamp = archivalTimestamp;
75
+ }
76
+
77
+ @Override
78
+ public String toString() {
79
+ return "ArchivalResult{" +
80
+ "archiveId='" + archiveId + '\'' +
81
+ ", storageLocation='" + storageLocation + '\'' +
82
+ ", sizeBytes=" + sizeBytes +
83
+ ", checksum='" + checksum + '\'' +
84
+ ", storageClass='" + storageClass + '\'' +
85
+ ", archivalTimestamp=" + archivalTimestamp +
86
+ '}';
87
+ }
88
+ }
src/main/java/com/dalab/autoarchival/model/ArchivalStatus.java ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.model;
2
+
3
+ public enum ArchivalStatus {
4
+ PENDING_APPROVAL,
5
+ APPROVED, // Approved, ready for processing or being processed
6
+ REJECTED,
7
+ SUBMITTED, // Submitted directly, no approval needed, or post-approval
8
+ IN_PROGRESS,
9
+ COMPLETED,
10
+ PARTIALLY_COMPLETED,
11
+ FAILED,
12
+ CANCELLED,
13
+ RESTORATION_REQUESTED,
14
+ RESTORATION_IN_PROGRESS,
15
+ RESTORED, // Assets are available
16
+ RESTORATION_FAILED
17
+ }
src/main/java/com/dalab/autoarchival/model/ArchivalTask.java ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.model;
2
+
3
+ import java.time.OffsetDateTime;
4
+ import java.util.Map;
5
+
6
+ /**
7
+ * Represents an archival task.
8
+ * This is a placeholder and would typically be a JPA entity or a more detailed DTO.
9
+ */
10
+ public class ArchivalTask {
11
+
12
+ private String taskId;
13
+ private String assetId; // ID of the asset to be archived/restored
14
+ private String originalDataSourceProvider; // e.g., "AWS", "GCP", "AZURE"
15
+ private String originalDataSourceDetails; // e.g., S3 bucket/key, GCS object path
16
+ private ArchivalStatus status;
17
+
18
+ // Fields to store results from ICloudArchivalProvider
19
+ private String archiveId; // From ArchivalResult
20
+ private String archiveStorageLocation; // From ArchivalResult
21
+ private String restoreJobId; // From RestoreResult
22
+ private OffsetDateTime lastArchivedAt;
23
+ private OffsetDateTime lastRestoreInitiatedAt;
24
+
25
+ private Map<String, String> providerConfiguration; // To store provider specific config like target bucket/storage class for THIS task
26
+
27
+
28
+ public enum ArchivalStatus {
29
+ PENDING_ARCHIVAL,
30
+ ARCHIVAL_IN_PROGRESS,
31
+ ARCHIVED,
32
+ PENDING_RESTORATION,
33
+ RESTORATION_IN_PROGRESS,
34
+ RESTORED,
35
+ FAILED_ARCHIVAL,
36
+ FAILED_RESTORATION
37
+ }
38
+
39
+ // Constructors, Getters, Setters
40
+ public ArchivalTask(String taskId, String assetId) {
41
+ this.taskId = taskId;
42
+ this.assetId = assetId;
43
+ }
44
+
45
+ public String getTaskId() { return taskId; }
46
+ public void setTaskId(String taskId) { this.taskId = taskId; }
47
+ public String getAssetId() { return assetId; }
48
+ public void setAssetId(String assetId) { this.assetId = assetId; }
49
+ public String getOriginalDataSourceProvider() { return originalDataSourceProvider; }
50
+ public void setOriginalDataSourceProvider(String originalDataSourceProvider) { this.originalDataSourceProvider = originalDataSourceProvider; }
51
+ public String getOriginalDataSourceDetails() { return originalDataSourceDetails; }
52
+ public void setOriginalDataSourceDetails(String originalDataSourceDetails) { this.originalDataSourceDetails = originalDataSourceDetails; }
53
+ public ArchivalStatus getStatus() { return status; }
54
+ public void setStatus(ArchivalStatus status) { this.status = status; }
55
+ public String getArchiveId() { return archiveId; }
56
+ public void setArchiveId(String archiveId) { this.archiveId = archiveId; }
57
+ public String getArchiveStorageLocation() { return archiveStorageLocation; }
58
+ public void setArchiveStorageLocation(String archiveStorageLocation) { this.archiveStorageLocation = archiveStorageLocation; }
59
+ public String getRestoreJobId() { return restoreJobId; }
60
+ public void setRestoreJobId(String restoreJobId) { this.restoreJobId = restoreJobId; }
61
+ public OffsetDateTime getLastArchivedAt() { return lastArchivedAt; }
62
+ public void setLastArchivedAt(OffsetDateTime lastArchivedAt) { this.lastArchivedAt = lastArchivedAt; }
63
+ public OffsetDateTime getLastRestoreInitiatedAt() { return lastRestoreInitiatedAt; }
64
+ public void setLastRestoreInitiatedAt(OffsetDateTime lastRestoreInitiatedAt) { this.lastRestoreInitiatedAt = lastRestoreInitiatedAt; }
65
+ public Map<String, String> getProviderConfiguration() { return providerConfiguration; }
66
+ public void setProviderConfiguration(Map<String, String> providerConfiguration) { this.providerConfiguration = providerConfiguration; }
67
+ }
src/main/java/com/dalab/autoarchival/model/ArchivalTaskEntity.java ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.model;
2
+
3
+ import jakarta.persistence.*;
4
+ import lombok.Data;
5
+ import lombok.NoArgsConstructor;
6
+ import lombok.AllArgsConstructor;
7
+ import org.hibernate.annotations.JdbcTypeCode;
8
+ import org.hibernate.type.SqlTypes;
9
+
10
+ import java.time.LocalDateTime;
11
+ import java.util.List;
12
+ import java.util.Map;
13
+
14
+ @Entity
15
+ @Table(name = "dalab_archival_tasks")
16
+ @Data
17
+ @NoArgsConstructor
18
+ @AllArgsConstructor
19
+ public class ArchivalTaskEntity {
20
+
21
+ @Id
22
+ private String taskId;
23
+
24
+ private String taskName;
25
+
26
+ @Enumerated(EnumType.STRING)
27
+ private ArchivalStatus status;
28
+
29
+ private LocalDateTime submittedAt;
30
+ private LocalDateTime startedAt;
31
+ private LocalDateTime completedAt;
32
+
33
+ private String triggeredBy;
34
+ private String justification;
35
+
36
+ @JdbcTypeCode(SqlTypes.JSON)
37
+ @Column(columnDefinition = "jsonb")
38
+ private ArchivalScopeData scope; // Store as JSON
39
+
40
+ @JdbcTypeCode(SqlTypes.JSON)
41
+ @Column(columnDefinition = "jsonb")
42
+ private ArchivalConfigOverrideData overrideConfig; // Store as JSON
43
+
44
+ private int totalAssetsInScope;
45
+ private int assetsArchivedSuccessfully;
46
+ private int assetsFailedToArchive;
47
+ private int assetsPendingArchival;
48
+
49
+ @JdbcTypeCode(SqlTypes.JSON)
50
+ @Column(columnDefinition = "jsonb")
51
+ private List<AssetArchivalStatusData> assetStatuses; // Store as JSON
52
+
53
+ @JdbcTypeCode(SqlTypes.JSON)
54
+ @Column(columnDefinition = "jsonb")
55
+ private List<String> errorMessages; // Store as JSON
56
+
57
+ private String approvalComments;
58
+ private String rejectionComments;
59
+
60
+ @JdbcTypeCode(SqlTypes.JSON)
61
+ @Column(columnDefinition = "jsonb")
62
+ private RestoreRequestData restoreRequestDetails; // Store restore request as JSON
63
+ private LocalDateTime lastRestoreRequestedAt;
64
+ private String lastRestoreStatus;
65
+
66
+
67
+ // Static inner classes for JSONB data
68
+ @Data @NoArgsConstructor @AllArgsConstructor
69
+ public static class ArchivalScopeData {
70
+ private List<String> assetIds;
71
+ }
72
+
73
+ @Data @NoArgsConstructor @AllArgsConstructor
74
+ public static class ArchivalConfigOverrideData {
75
+ private String targetArchivalTier;
76
+ private Long retentionDays;
77
+ }
78
+
79
+ @Data @NoArgsConstructor @AllArgsConstructor
80
+ public static class AssetArchivalStatusData {
81
+ private String assetId;
82
+ private String assetName;
83
+ private String status; // PENDING, IN_PROGRESS, ARCHIVED, FAILED, RESTORED
84
+ private String archivalLocation;
85
+ private String errorMessage;
86
+ private LocalDateTime archivedAt;
87
+ private LocalDateTime restoredAt;
88
+ }
89
+
90
+ @Data @NoArgsConstructor @AllArgsConstructor
91
+ public static class RestoreRequestData {
92
+ private String restoreTier;
93
+ private int availabilityDays;
94
+ private String triggeredBy;
95
+ private String justification;
96
+ private LocalDateTime requestedAt;
97
+ }
98
+ }
src/main/java/com/dalab/autoarchival/model/RestoreResult.java ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.model;
2
+
3
+ /**
4
+ * DTO to hold the results of a restore initiation operation.
5
+ */
6
+ public class RestoreResult {
7
+
8
+ private String restoreJobId; // Identifier for the restore job (e.g., Glacier Job ID).
9
+ private String statusMessage; // Initial status or message from the provider (e.g., "Restore initiated").
10
+ private java.time.OffsetDateTime estimatedCompletionTime; // Estimated time when data will be available, if provided.
11
+ private String retrievalTier; // Tier used for retrieval (e.g., Standard, Expedited, Bulk for Glacier).
12
+
13
+ // Constructors
14
+ public RestoreResult() {
15
+ }
16
+
17
+ public RestoreResult(String restoreJobId, String statusMessage, java.time.OffsetDateTime estimatedCompletionTime, String retrievalTier) {
18
+ this.restoreJobId = restoreJobId;
19
+ this.statusMessage = statusMessage;
20
+ this.estimatedCompletionTime = estimatedCompletionTime;
21
+ this.retrievalTier = retrievalTier;
22
+ }
23
+
24
+ // Getters and Setters
25
+ public String getRestoreJobId() {
26
+ return restoreJobId;
27
+ }
28
+
29
+ public void setRestoreJobId(String restoreJobId) {
30
+ this.restoreJobId = restoreJobId;
31
+ }
32
+
33
+ public String getStatusMessage() {
34
+ return statusMessage;
35
+ }
36
+
37
+ public void setStatusMessage(String statusMessage) {
38
+ this.statusMessage = statusMessage;
39
+ }
40
+
41
+ public java.time.OffsetDateTime getEstimatedCompletionTime() {
42
+ return estimatedCompletionTime;
43
+ }
44
+
45
+ public void setEstimatedCompletionTime(java.time.OffsetDateTime estimatedCompletionTime) {
46
+ this.estimatedCompletionTime = estimatedCompletionTime;
47
+ }
48
+
49
+ public String getRetrievalTier() {
50
+ return retrievalTier;
51
+ }
52
+
53
+ public void setRetrievalTier(String retrievalTier) {
54
+ this.retrievalTier = retrievalTier;
55
+ }
56
+
57
+ @Override
58
+ public String toString() {
59
+ return "RestoreResult{" +
60
+ "restoreJobId='" + restoreJobId + '\'' +
61
+ ", statusMessage='" + statusMessage + '\'' +
62
+ ", estimatedCompletionTime=" + estimatedCompletionTime +
63
+ ", retrievalTier='" + retrievalTier + '\'' +
64
+ '}';
65
+ }
66
+ }
src/main/java/com/dalab/autoarchival/provider/AwsS3GlacierArchivalProvider.java ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.provider;
2
+
3
+ import java.io.InputStream;
4
+ import java.util.Map;
5
+
6
+ import org.springframework.stereotype.Component;
7
+
8
+ import com.dalab.autoarchival.exception.ArchivalException;
9
+ import com.dalab.autoarchival.model.ArchivalResult;
10
+ import com.dalab.autoarchival.model.ArchivalTask;
11
+ import com.dalab.autoarchival.model.RestoreResult;
12
+
13
+ @Component // Or configure as a bean in a config class
14
+ public class AwsS3GlacierArchivalProvider implements ICloudArchivalProvider {
15
+
16
+ private static final String PROVIDER_TYPE = "AWS";
17
+ private static final String NOT_IMPLEMENTED_MESSAGE = "AWS S3/Glacier archival feature is not yet implemented.";
18
+
19
+ @Override
20
+ public String getProviderType() {
21
+ return PROVIDER_TYPE;
22
+ }
23
+
24
+ @Override
25
+ public ArchivalResult archive(ArchivalTask task, InputStream dataToArchive, Map<String, String> providerSpecificConfig) throws ArchivalException {
26
+ // TODO: Implement AWS S3/Glacier archival logic
27
+ // 1. Configure AWS S3 Client (consider S3AsyncClient for non-blocking)
28
+ // 2. Determine target bucket, key, storage class (e.g., GLACIER, DEEP_ARCHIVE) from providerSpecificConfig or task
29
+ // 3. Upload InputStream to S3 with appropriate storage class.
30
+ // 4. Populate and return ArchivalResult.
31
+ throw new ArchivalException(NOT_IMPLEMENTED_MESSAGE);
32
+ }
33
+
34
+ @Override
35
+ public RestoreResult initiateRestore(ArchivalTask task, Map<String, String> providerSpecificConfig) throws ArchivalException {
36
+ // TODO: Implement AWS S3/Glacier restore initiation logic
37
+ // 1. Configure AWS S3 Client.
38
+ // 2. Use task.getArchiveId() or task.getArchiveStorageLocation() to identify the object.
39
+ // 3. Initiate restore request (e.g., s3Client.restoreObjectV2(...)).
40
+ // 4. Populate and return RestoreResult.
41
+ throw new ArchivalException(NOT_IMPLEMENTED_MESSAGE);
42
+ }
43
+ }
src/main/java/com/dalab/autoarchival/provider/GcsArchiveStorageProvider.java ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.provider;
2
+
3
+ import com.dalab.autoarchival.exception.ArchivalException;
4
+ import com.dalab.autoarchival.model.ArchivalResult;
5
+ import com.dalab.autoarchival.model.ArchivalTask;
6
+ import com.dalab.autoarchival.model.RestoreResult;
7
+ import org.springframework.stereotype.Component;
8
+
9
+ import java.io.InputStream;
10
+ import java.util.Map;
11
+
12
+ @Component // Or configure as a bean in a config class
13
+ public class GcsArchiveStorageProvider implements ICloudArchivalProvider {
14
+
15
+ private static final String PROVIDER_TYPE = "GCP";
16
+ private static final String NOT_IMPLEMENTED_MESSAGE = "GCP Cloud Storage archival feature is not yet implemented.";
17
+
18
+ @Override
19
+ public String getProviderType() {
20
+ return PROVIDER_TYPE;
21
+ }
22
+
23
+ @Override
24
+ public ArchivalResult archive(ArchivalTask task, InputStream dataToArchive, Map<String, String> providerSpecificConfig) throws ArchivalException {
25
+ // TODO: Implement GCS archival logic
26
+ // 1. Configure GCS Storage client.
27
+ // 2. Determine target bucket, object name, storage class (e.g., ARCHIVE) from providerSpecificConfig or task.
28
+ // 3. Upload InputStream to GCS.
29
+ // 4. Populate and return ArchivalResult.
30
+ throw new ArchivalException(NOT_IMPLEMENTED_MESSAGE);
31
+ }
32
+
33
+ @Override
34
+ public RestoreResult initiateRestore(ArchivalTask task, Map<String, String> providerSpecificConfig) throws ArchivalException {
35
+ // TODO: Implement GCS restore logic (GCS restore is typically just changing storage class or direct download if not in coldline/archive that requires restore step)
36
+ // For GCS "Archive" class, objects are accessible directly but with higher retrieval costs/times.
37
+ // A "restore" might mean copying to a standard storage class bucket or simply noting accessibility.
38
+ throw new ArchivalException(NOT_IMPLEMENTED_MESSAGE);
39
+ }
40
+ }
src/main/java/com/dalab/autoarchival/provider/ICloudArchivalProvider.java ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.provider;
2
+
3
+ import com.dalab.autoarchival.model.ArchivalTask;
4
+ import com.dalab.autoarchival.model.ArchivalResult;
5
+ import com.dalab.autoarchival.model.RestoreResult;
6
+ import com.dalab.autoarchival.exception.ArchivalException;
7
+
8
+ import java.io.InputStream;
9
+ import java.util.Map;
10
+
11
+ /**
12
+ * Interface for cloud-specific archival storage providers.
13
+ * Implementations will handle the actual interaction with services like AWS S3 Glacier, GCS Archive, etc.
14
+ */
15
+ public interface ICloudArchivalProvider {
16
+
17
+ /**
18
+ * Gets the specific cloud provider type this provider supports (e.g., "AWS", "GCP").
19
+ * This can be used by a factory to select the correct provider.
20
+ * @return The cloud provider type identifier.
21
+ */
22
+ String getProviderType();
23
+
24
+ /**
25
+ * Archives the given data stream for the specified archival task.
26
+ *
27
+ * @param task The ArchivalTask entity containing details about the asset and archival job.
28
+ * @param dataToArchive An InputStream containing the data to be archived.
29
+ * @param providerSpecificConfig Configuration specific to this provider (e.g., bucket name, region, storage class).
30
+ * @return An ArchivalResult containing details of the archival operation (e.g., archive ID, location).
31
+ * @throws ArchivalException if any error occurs during the archival process.
32
+ */
33
+ ArchivalResult archive(ArchivalTask task, InputStream dataToArchive, Map<String, String> providerSpecificConfig) throws ArchivalException;
34
+
35
+ /**
36
+ * Initiates the restoration of an archived asset.
37
+ * For some services (like Glacier), this might be an asynchronous operation.
38
+ *
39
+ * @param task The ArchivalTask entity for which data needs to be restored.
40
+ * It should contain necessary identifiers from a previous successful archival.
41
+ * @param providerSpecificConfig Configuration specific to this provider.
42
+ * @return A RestoreResult indicating the status of the restore initiation (e.g., job ID for tracking).
43
+ * @throws ArchivalException if any error occurs during the restore initiation.
44
+ */
45
+ RestoreResult initiateRestore(ArchivalTask task, Map<String, String> providerSpecificConfig) throws ArchivalException;
46
+
47
+ // Optional methods for more detailed restore management - can be added later
48
+ // String getRestoreStatus(ArchivalTask task, String restoreJobId, Map<String, String> providerSpecificConfig) throws ArchivalException;
49
+ // InputStream retrieveRestoredData(ArchivalTask task, String restoreJobId, Map<String, String> providerSpecificConfig) throws ArchivalException;
50
+ }
src/main/java/com/dalab/autoarchival/repository/ArchivalConfigRepository.java ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.repository;
2
+
3
+ import com.dalab.autoarchival.model.ArchivalConfigEntity;
4
+ import org.springframework.data.jpa.repository.JpaRepository;
5
+ import org.springframework.stereotype.Repository;
6
+
7
+ @Repository
8
+ public interface ArchivalConfigRepository extends JpaRepository<ArchivalConfigEntity, Long> {
9
+ // Since it's a global config with a fixed ID, specific find methods might not be needed
10
+ // Can use findById(1L)
11
+ }
src/main/java/com/dalab/autoarchival/repository/ArchivalTaskRepository.java ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.repository;
2
+
3
+ import com.dalab.autoarchival.model.ArchivalTaskEntity;
4
+ import org.springframework.data.jpa.repository.JpaRepository;
5
+ import org.springframework.stereotype.Repository;
6
+
7
+ @Repository
8
+ public interface ArchivalTaskRepository extends JpaRepository<ArchivalTaskEntity, String> {
9
+ // Add custom query methods if needed, e.g., findByStatus, findByTriggeredBy, etc.
10
+ }
src/main/java/com/dalab/autoarchival/service/IArchivalConfigService.java ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service;
2
+
3
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
4
+
5
+ public interface IArchivalConfigService {
6
+
7
+ /**
8
+ * Retrieves the current archival configuration.
9
+ * @return The current ArchivalConfigDTO, or a default/empty one if not configured.
10
+ */
11
+ ArchivalConfigDTO getArchivalConfig();
12
+
13
+ /**
14
+ * Updates the archival configuration.
15
+ * @param archivalConfigDTO The new configuration to apply.
16
+ */
17
+ void updateArchivalConfig(ArchivalConfigDTO archivalConfigDTO);
18
+ }
src/main/java/com/dalab/autoarchival/service/IArchivalTaskService.java ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service;
2
+
3
+ import com.dalab.autoarchival.dto.ArchivalTaskListResponse;
4
+ import com.dalab.autoarchival.dto.ArchivalTaskRequest;
5
+ import com.dalab.autoarchival.dto.ArchivalTaskResponse;
6
+ import com.dalab.autoarchival.dto.ArchivalTaskStatusDTO;
7
+ import com.dalab.autoarchival.dto.TaskApprovalRequest;
8
+ import com.dalab.autoarchival.dto.RestoreRequestDTO;
9
+ import com.dalab.autoarchival.dto.RestoreResponseDTO;
10
+ // Potentially add other DTOs like ArchivalTaskSummaryDTO if needed for other methods
11
+ import org.springframework.data.domain.Pageable;
12
+
13
+ public interface IArchivalTaskService {
14
+
15
+ /**
16
+ * Submits a new archival task.
17
+ *
18
+ * @param request The archival task request.
19
+ * @return ArchivalTaskResponse indicating the outcome of the submission.
20
+ */
21
+ ArchivalTaskResponse submitArchivalTask(ArchivalTaskRequest request);
22
+
23
+ /**
24
+ * Retrieves the status of a specific archival task.
25
+ *
26
+ * @param taskId The ID of the task.
27
+ * @return ArchivalTaskStatusDTO containing the detailed status, or null if not found.
28
+ */
29
+ ArchivalTaskStatusDTO getTaskStatus(String taskId);
30
+
31
+ /**
32
+ * Lists all archival tasks with pagination.
33
+ *
34
+ * @param pageable Pagination information.
35
+ * @return ArchivalTaskListResponse containing the paginated list of tasks.
36
+ */
37
+ ArchivalTaskListResponse listTasks(Pageable pageable);
38
+
39
+ /**
40
+ * Approves a task.
41
+ *
42
+ * @param taskId The ID of the task to be approved.
43
+ * @param approvalRequest DTO containing approval parameters.
44
+ * @return ArchivalTaskStatusDTO indicating the outcome of the approval.
45
+ */
46
+ ArchivalTaskStatusDTO approveTask(String taskId, TaskApprovalRequest approvalRequest);
47
+
48
+ /**
49
+ * Rejects a task.
50
+ *
51
+ * @param taskId The ID of the task to be rejected.
52
+ * @param rejectionRequest DTO containing rejection parameters.
53
+ * @return ArchivalTaskStatusDTO indicating the outcome of the rejection.
54
+ */
55
+ ArchivalTaskStatusDTO rejectTask(String taskId, TaskApprovalRequest rejectionRequest);
56
+
57
+ /**
58
+ * Requests restoration of assets associated with an archival task.
59
+ *
60
+ * @param taskId The ID of the original archival task whose assets are to be restored.
61
+ * @param restoreRequest DTO containing restore parameters.
62
+ * @return RestoreResponseDTO indicating the outcome of the restore request.
63
+ */
64
+ RestoreResponseDTO requestRestore(String taskId, RestoreRequestDTO restoreRequest);
65
+
66
+ /**
67
+ * Processes archival action from policy engine events.
68
+ *
69
+ * @param event The policy action event containing archival instructions.
70
+ */
71
+ void processArchivalAction(java.util.Map<String, Object> event);
72
+
73
+ // Future methods:
74
+ // Page<ArchivalTaskSummaryDTO> listTasks(Pageable pageable);
75
+ // void approveTask(String taskId, String approverComments);
76
+ // void rejectTask(String taskId, String rejectorComments);
77
+ // ArchivalTaskResponse requestRestore(String taskId, RestoreRequestDTO restoreRequest);
78
+ }
src/main/java/com/dalab/autoarchival/service/impl/ArchivalConfigServiceImpl.java ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.impl;
2
+
3
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
4
+ import com.dalab.autoarchival.mapper.ArchivalConfigMapper;
5
+ import com.dalab.autoarchival.model.ArchivalConfigEntity;
6
+ import com.dalab.autoarchival.repository.ArchivalConfigRepository;
7
+ import com.dalab.autoarchival.service.IArchivalConfigService;
8
+ import jakarta.annotation.PostConstruct;
9
+ import lombok.RequiredArgsConstructor;
10
+ import lombok.extern.slf4j.Slf4j;
11
+ import org.springframework.stereotype.Service;
12
+ import org.springframework.transaction.annotation.Transactional;
13
+
14
+ import java.util.HashMap;
15
+ import java.util.Optional;
16
+
17
+ @Service("archivalConfigService") // Specify bean name to avoid conflict if InMemory still exists
18
+ @RequiredArgsConstructor
19
+ @Slf4j
20
+ public class ArchivalConfigServiceImpl implements IArchivalConfigService {
21
+
22
+ private final ArchivalConfigRepository configRepository;
23
+ private final ArchivalConfigMapper configMapper;
24
+ private static final Long GLOBAL_CONFIG_ID = 1L;
25
+
26
+ @PostConstruct
27
+ public void init() {
28
+ // Ensure a default config exists if the DB is empty
29
+ if (!configRepository.existsById(GLOBAL_CONFIG_ID)) {
30
+ log.info("No global archival configuration found. Initializing with default values.");
31
+ ArchivalConfigEntity defaultConfig = new ArchivalConfigEntity(
32
+ GLOBAL_CONFIG_ID,
33
+ false, // enabled
34
+ "STANDARD_ARCHIVE", // defaultArchivalTier
35
+ 3650L, // defaultRetentionDays (10 years)
36
+ true, // requireApprovalForArchival
37
+ true, // requireApprovalForRestore
38
+ new HashMap<>() // providerConfigs
39
+ );
40
+ configRepository.save(defaultConfig);
41
+ }
42
+ }
43
+
44
+ @Override
45
+ @Transactional(readOnly = true)
46
+ public ArchivalConfigDTO getArchivalConfig() {
47
+ log.debug("Fetching global archival configuration.");
48
+ Optional<ArchivalConfigEntity> entityOptional = configRepository.findById(GLOBAL_CONFIG_ID);
49
+ // Should always exist due to PostConstruct, but handle defensively
50
+ return entityOptional.map(configMapper::toDto)
51
+ .orElseGet(() -> {
52
+ log.warn("Global archival configuration (ID: {}) not found, returning empty DTO. This should not happen after initialization.", GLOBAL_CONFIG_ID);
53
+ return new ArchivalConfigDTO(); // Or throw exception
54
+ });
55
+ }
56
+
57
+ @Override
58
+ @Transactional
59
+ public void updateArchivalConfig(ArchivalConfigDTO archivalConfigDTO) {
60
+ if (archivalConfigDTO == null) {
61
+ throw new IllegalArgumentException("Archival configuration DTO cannot be null.");
62
+ }
63
+ log.info("Updating global archival configuration.");
64
+ ArchivalConfigEntity entity = configRepository.findById(GLOBAL_CONFIG_ID)
65
+ .orElseThrow(() -> {
66
+ log.error("Global archival config (ID: {}) not found for update. This is unexpected.", GLOBAL_CONFIG_ID);
67
+ return new IllegalStateException("Global archival configuration not found for update.");
68
+ });
69
+
70
+ configMapper.updateEntityFromDto(archivalConfigDTO, entity);
71
+ // Ensure ID is not changed by mapper if DTO carries it over
72
+ entity.setId(GLOBAL_CONFIG_ID);
73
+ configRepository.save(entity);
74
+ log.info("Global archival configuration updated successfully.");
75
+ }
76
+ }
src/main/java/com/dalab/autoarchival/service/impl/ArchivalTaskNotFoundException.java ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.impl;
2
+
3
+ import org.springframework.http.HttpStatus;
4
+ import org.springframework.web.bind.annotation.ResponseStatus;
5
+
6
+ @ResponseStatus(HttpStatus.NOT_FOUND)
7
+ public class ArchivalTaskNotFoundException extends RuntimeException {
8
+ public ArchivalTaskNotFoundException(String taskId, String operation) {
9
+ super(String.format("Archival task with ID '%s' not found for operation: %s", taskId, operation));
10
+ }
11
+ }
src/main/java/com/dalab/autoarchival/service/impl/ArchivalTaskServiceImpl.java ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.impl;
2
+
3
+ import java.time.LocalDateTime;
4
+ import java.util.Collections;
5
+ import java.util.List;
6
+ import java.util.UUID;
7
+ import java.util.stream.Collectors;
8
+
9
+ import org.springframework.data.domain.Page;
10
+ import org.springframework.data.domain.PageImpl;
11
+ import org.springframework.data.domain.Pageable;
12
+ import org.springframework.stereotype.Service;
13
+ import org.springframework.transaction.annotation.Transactional;
14
+
15
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
16
+ import com.dalab.autoarchival.dto.ArchivalTaskListResponse;
17
+ import com.dalab.autoarchival.dto.ArchivalTaskRequest;
18
+ import com.dalab.autoarchival.dto.ArchivalTaskResponse;
19
+ import com.dalab.autoarchival.dto.ArchivalTaskStatusDTO;
20
+ import com.dalab.autoarchival.dto.RestoreRequestDTO;
21
+ import com.dalab.autoarchival.dto.RestoreResponseDTO;
22
+ import com.dalab.autoarchival.dto.TaskApprovalRequest;
23
+ import com.dalab.autoarchival.mapper.ArchivalTaskMapper;
24
+ import com.dalab.autoarchival.model.ArchivalStatus;
25
+ import com.dalab.autoarchival.model.ArchivalTaskEntity;
26
+ import com.dalab.autoarchival.repository.ArchivalTaskRepository;
27
+ import com.dalab.autoarchival.service.IArchivalConfigService;
28
+ import com.dalab.autoarchival.service.IArchivalTaskService;
29
+
30
+ import lombok.RequiredArgsConstructor;
31
+ import lombok.extern.slf4j.Slf4j;
32
+
33
+ @Service("archivalTaskService") // Specify bean name
34
+ @RequiredArgsConstructor
35
+ @Slf4j
36
+ public class ArchivalTaskServiceImpl implements IArchivalTaskService {
37
+
38
+ private final ArchivalTaskRepository taskRepository;
39
+ private final ArchivalTaskMapper taskMapper;
40
+ private final IArchivalConfigService archivalConfigService; // JPA version
41
+
42
+ @Override
43
+ @Transactional
44
+ public ArchivalTaskResponse submitArchivalTask(ArchivalTaskRequest request) {
45
+ log.info("Submitting new archival task: {}", request.getTaskName());
46
+ if (request.getScope() == null || request.getScope().getAssetIds() == null || request.getScope().getAssetIds().isEmpty()) {
47
+ log.warn("Archival task submission failed for task {}: Asset IDs are missing.", request.getTaskName());
48
+ // Not creating an entity for a failed validation that doesn't hit DB
49
+ return ArchivalTaskResponse.builder()
50
+ .taskId(null) // No ID generated
51
+ .taskName(request.getTaskName())
52
+ .status("FAILED_VALIDATION")
53
+ .submittedAt(LocalDateTime.now())
54
+ .message("Archival scope must contain at least one asset ID.")
55
+ .build();
56
+ }
57
+
58
+ ArchivalTaskEntity entity = taskMapper.requestToEntity(request);
59
+ entity.setTaskId(UUID.randomUUID().toString());
60
+ entity.setSubmittedAt(LocalDateTime.now());
61
+
62
+ ArchivalConfigDTO globalConfig = archivalConfigService.getArchivalConfig();
63
+ if (globalConfig.isRequireApprovalForArchival()) {
64
+ entity.setStatus(ArchivalStatus.PENDING_APPROVAL);
65
+ } else {
66
+ entity.setStatus(ArchivalStatus.SUBMITTED);
67
+ // TODO: If not pending approval, potentially trigger async processing immediately
68
+ }
69
+
70
+ // Initialize asset statuses if scope is present
71
+ if (request.getScope() != null && request.getScope().getAssetIds() != null) {
72
+ List<ArchivalTaskEntity.AssetArchivalStatusData> initialAssetStatuses = request.getScope().getAssetIds().stream()
73
+ .map(assetId -> new ArchivalTaskEntity.AssetArchivalStatusData(assetId, null, "PENDING", null, null, null, null))
74
+ .collect(Collectors.toList());
75
+ entity.setAssetStatuses(initialAssetStatuses);
76
+ entity.setAssetsPendingArchival(initialAssetStatuses.size());
77
+ }
78
+
79
+ ArchivalTaskEntity savedEntity = taskRepository.save(entity);
80
+ log.info("Archival task {} submitted with ID {} and status {}.", savedEntity.getTaskName(), savedEntity.getTaskId(), savedEntity.getStatus());
81
+ return taskMapper.entityToTaskResponse(savedEntity);
82
+ }
83
+
84
+ @Override
85
+ @Transactional(readOnly = true)
86
+ public ArchivalTaskStatusDTO getTaskStatus(String taskId) {
87
+ log.debug("Fetching status for archival task ID: {}", taskId);
88
+ return taskRepository.findById(taskId)
89
+ .map(taskMapper::entityToStatusDTO)
90
+ .orElse(null);
91
+ }
92
+
93
+ @Override
94
+ @Transactional(readOnly = true)
95
+ public ArchivalTaskListResponse listTasks(Pageable pageable) {
96
+ log.debug("Listing archival tasks with pagination: {}", pageable);
97
+ Page<ArchivalTaskEntity> taskPage = taskRepository.findAll(pageable);
98
+ List<ArchivalTaskStatusDTO> dtoList = taskPage.getContent().stream()
99
+ .map(taskMapper::entityToStatusDTO)
100
+ .collect(Collectors.toList());
101
+ PageImpl<ArchivalTaskStatusDTO> dtoPage = new PageImpl<>(dtoList, pageable, taskPage.getTotalElements());
102
+ return ArchivalTaskListResponse.fromPage(dtoPage);
103
+ }
104
+
105
+ @Override
106
+ @Transactional
107
+ public ArchivalTaskStatusDTO approveTask(String taskId, TaskApprovalRequest approvalRequest) {
108
+ log.info("Approving archival task ID: {}", taskId);
109
+ ArchivalTaskEntity task = taskRepository.findById(taskId)
110
+ .orElseThrow(() -> new ArchivalTaskNotFoundException(taskId, "approve"));
111
+
112
+ if (task.getStatus() != ArchivalStatus.PENDING_APPROVAL) {
113
+ log.warn("Cannot approve task {}: not in PENDING_APPROVAL state (current: {}).", taskId, task.getStatus());
114
+ // Return current status with an error message if desired, or throw specific exception
115
+ ArchivalTaskStatusDTO currentStatusDto = taskMapper.entityToStatusDTO(task);
116
+ currentStatusDto.setErrorMessages(Collections.singletonList("Task is not in a state that can be approved."));
117
+ return currentStatusDto;
118
+ }
119
+ task.setStatus(ArchivalStatus.APPROVED); // Or SUBMITTED to enter processing queue
120
+ if (approvalRequest != null) {
121
+ task.setApprovalComments(approvalRequest.getComments());
122
+ }
123
+ // TODO: Trigger async processing if approval is the gate
124
+ ArchivalTaskEntity savedTask = taskRepository.save(task);
125
+ return taskMapper.entityToStatusDTO(savedTask);
126
+ }
127
+
128
+ @Override
129
+ @Transactional
130
+ public ArchivalTaskStatusDTO rejectTask(String taskId, TaskApprovalRequest rejectionRequest) {
131
+ log.info("Rejecting archival task ID: {}", taskId);
132
+ ArchivalTaskEntity task = taskRepository.findById(taskId)
133
+ .orElseThrow(() -> new ArchivalTaskNotFoundException(taskId, "reject"));
134
+
135
+ if (task.getStatus() != ArchivalStatus.PENDING_APPROVAL) {
136
+ log.warn("Cannot reject task {}: not in PENDING_APPROVAL state (current: {}).", taskId, task.getStatus());
137
+ ArchivalTaskStatusDTO currentStatusDto = taskMapper.entityToStatusDTO(task);
138
+ currentStatusDto.setErrorMessages(Collections.singletonList("Task is not in a state that can be rejected."));
139
+ return currentStatusDto;
140
+ }
141
+ task.setStatus(ArchivalStatus.REJECTED);
142
+ if (rejectionRequest != null) {
143
+ task.setRejectionComments(rejectionRequest.getComments());
144
+ }
145
+ ArchivalTaskEntity savedTask = taskRepository.save(task);
146
+ return taskMapper.entityToStatusDTO(savedTask);
147
+ }
148
+
149
+ @Override
150
+ @Transactional
151
+ public RestoreResponseDTO requestRestore(String taskId, RestoreRequestDTO restoreRequestDto) {
152
+ log.info("Requesting restore for archival task ID: {}", taskId);
153
+ ArchivalTaskEntity task = taskRepository.findById(taskId)
154
+ .orElseThrow(() -> new ArchivalTaskNotFoundException(taskId, "restore request"));
155
+
156
+ // Typically, only COMPLETED or ARCHIVED tasks can be restored
157
+ if (task.getStatus() != ArchivalStatus.COMPLETED && task.getStatus() != ArchivalStatus.APPROVED /* if APPROVED implies archived */ ) {
158
+ log.warn("Cannot restore task {}: not in a restorable state (current: {}).", taskId, task.getStatus());
159
+ return RestoreResponseDTO.builder()
160
+ .archivalTaskId(taskId)
161
+ .status("NOT_RESTORABLE")
162
+ .requestedAt(LocalDateTime.now())
163
+ .message("Archival task is not in a state that allows restoration.")
164
+ .build();
165
+ }
166
+
167
+ task.setRestoreRequestDetails(taskMapper.restoreRequestDtoToData(restoreRequestDto));
168
+ task.getRestoreRequestDetails().setRequestedAt(LocalDateTime.now()); // Ensure request time is set
169
+ task.setLastRestoreRequestedAt(LocalDateTime.now());
170
+ task.setLastRestoreStatus(ArchivalStatus.RESTORATION_REQUESTED.name());
171
+ task.setStatus(ArchivalStatus.RESTORATION_REQUESTED); // Update main task status
172
+
173
+ // TODO: Trigger async restore process. This process should:
174
+ // 1. Iterate task.getAssetStatuses()
175
+ // 2. For each asset, call cloud provider API to initiate restore from task.assetStatuses[i].archivalLocation
176
+ // 3. Update task.assetStatuses[i].status to RESTORATION_IN_PROGRESS, then RESTORED/FAILED
177
+ // 4. Update task.assetsRestoredSuccessfully etc. counts
178
+ // 5. Update task.status (main) to RESTORATION_IN_PROGRESS, then RESTORED or PARTIALLY_COMPLETED / RESTORATION_FAILED
179
+
180
+ ArchivalTaskEntity savedTask = taskRepository.save(task);
181
+ log.info("Restore initiated for task {}. New status: {}. Restore Tier: {}",
182
+ savedTask.getTaskId(), savedTask.getStatus(), restoreRequestDto.getRestoreTier());
183
+
184
+ return RestoreResponseDTO.builder()
185
+ .restoreTaskId(savedTask.getTaskId()) // Using archival task ID as restore task ID for now
186
+ .archivalTaskId(savedTask.getTaskId())
187
+ .status(savedTask.getStatus().name())
188
+ .requestedAt(savedTask.getLastRestoreRequestedAt())
189
+ .message("Restore process initiated.")
190
+ .build();
191
+ }
192
+
193
+ @Override
194
+ public void processArchivalAction(java.util.Map<String, Object> event) {
195
+ // Extract relevant information from the event
196
+ String eventId = (String) event.get("eventId");
197
+ String policyId = (String) event.get("policyId");
198
+ String actionType = (String) event.get("actionType");
199
+ String assetId = (String) event.get("assetId");
200
+
201
+ log.info("Processing archival action for event: {}, policy: {}, asset: {}", eventId, policyId, assetId);
202
+
203
+ try {
204
+ // Create an archival scope with the asset ID
205
+ ArchivalTaskRequest.ArchivalScope scope = ArchivalTaskRequest.ArchivalScope.builder()
206
+ .assetIds(java.util.List.of(assetId))
207
+ .build();
208
+
209
+ // Create an archival task request based on the policy action
210
+ ArchivalTaskRequest request = ArchivalTaskRequest.builder()
211
+ .taskName("Auto-archival from policy " + policyId)
212
+ .scope(scope)
213
+ .justification("Automated archival triggered by policy engine")
214
+ .build();
215
+
216
+ // Submit the archival task
217
+ ArchivalTaskResponse response = submitArchivalTask(request);
218
+ log.info("Successfully created archival task {} for event {}", response.getTaskId(), eventId);
219
+
220
+ } catch (Exception e) {
221
+ log.error("Failed to process archival action for event {}: {}", eventId, e.getMessage(), e);
222
+ throw new RuntimeException("Failed to process archival action", e);
223
+ }
224
+ }
225
+ }
src/main/java/com/dalab/autoarchival/service/storage/ArchivalResult.java ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.storage;
2
+
3
+ import java.time.LocalDateTime;
4
+ import java.util.Map;
5
+ import java.util.UUID;
6
+
7
+ import lombok.AllArgsConstructor;
8
+ import lombok.Builder;
9
+ import lombok.Data;
10
+ import lombok.NoArgsConstructor;
11
+
12
+ /**
13
+ * Result of an archival operation.
14
+ */
15
+ @Data
16
+ @Builder
17
+ @NoArgsConstructor
18
+ @AllArgsConstructor
19
+ public class ArchivalResult {
20
+
21
+ private UUID assetId;
22
+ private String operationId;
23
+ private String sourceLocation;
24
+ private String archivedLocation;
25
+ private String targetTier;
26
+ private OperationStatus status;
27
+ private LocalDateTime initiatedAt;
28
+ private LocalDateTime completedAt;
29
+ private Long dataSizeBytes;
30
+ private Map<String, String> metadata;
31
+ private String errorMessage;
32
+ private Double estimatedMonthlySavings;
33
+ }
src/main/java/com/dalab/autoarchival/service/storage/CostSavingsEstimate.java ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.storage;
2
+
3
+ import lombok.AllArgsConstructor;
4
+ import lombok.Builder;
5
+ import lombok.Data;
6
+ import lombok.NoArgsConstructor;
7
+
8
+ /**
9
+ * Cost savings estimate for archival operations.
10
+ */
11
+ @Data
12
+ @Builder
13
+ @NoArgsConstructor
14
+ @AllArgsConstructor
15
+ public class CostSavingsEstimate {
16
+
17
+ private String sourceStorageClass;
18
+ private String targetStorageClass;
19
+ private Long dataSizeBytes;
20
+ private Double currentMonthlyCost;
21
+ private Double archivedMonthlyCost;
22
+ private Double monthlySavings;
23
+ private Double savingsPercentage;
24
+ private String currency;
25
+ private String region;
26
+ }
src/main/java/com/dalab/autoarchival/service/storage/ICloudStorageService.java ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.storage;
2
+
3
+ import java.util.Map;
4
+ import java.util.UUID;
5
+
6
+ /**
7
+ * Interface for cloud storage operations supporting archival and restoration.
8
+ * Abstracts different cloud providers (AWS S3/Glacier, Azure Blob, GCP Cloud Storage).
9
+ */
10
+ public interface ICloudStorageService {
11
+
12
+ /**
13
+ * Archive data to cold storage tier.
14
+ *
15
+ * @param assetId The unique identifier of the asset being archived
16
+ * @param sourceLocation The original location of the data (e.g., S3 key, blob name)
17
+ * @param targetTier The target storage tier (e.g., "GLACIER", "ARCHIVE", "COLDLINE")
18
+ * @param metadata Additional metadata to store with the archived object
19
+ * @return ArchivalResult containing the archived location and operation details
20
+ */
21
+ ArchivalResult archiveData(UUID assetId, String sourceLocation, String targetTier, Map<String, String> metadata);
22
+
23
+ /**
24
+ * Restore data from cold storage to accessible tier.
25
+ *
26
+ * @param assetId The unique identifier of the asset being restored
27
+ * @param archivedLocation The location of the archived data
28
+ * @param targetTier The target tier for restoration (e.g., "STANDARD", "HOT")
29
+ * @return RestorationResult containing the restoration job details
30
+ */
31
+ RestorationResult restoreData(UUID assetId, String archivedLocation, String targetTier);
32
+
33
+ /**
34
+ * Get the status of an archival operation.
35
+ *
36
+ * @param operationId The ID of the archival operation
37
+ * @return OperationStatus indicating the current state
38
+ */
39
+ OperationStatus getArchivalStatus(String operationId);
40
+
41
+ /**
42
+ * Get the status of a restoration operation.
43
+ *
44
+ * @param operationId The ID of the restoration operation
45
+ * @return OperationStatus indicating the current state
46
+ */
47
+ OperationStatus getRestorationStatus(String operationId);
48
+
49
+ /**
50
+ * Calculate the cost savings from archiving data.
51
+ *
52
+ * @param sourceLocation The original location
53
+ * @param targetTier The archival tier
54
+ * @param dataSizeBytes The size of data in bytes
55
+ * @return CostSavingsEstimate with monthly savings information
56
+ */
57
+ CostSavingsEstimate calculateCostSavings(String sourceLocation, String targetTier, long dataSizeBytes);
58
+
59
+ /**
60
+ * Check if the storage service supports the specified cloud provider.
61
+ *
62
+ * @param cloudProvider The cloud provider (AWS, AZURE, GCP)
63
+ * @return true if supported, false otherwise
64
+ */
65
+ boolean supportsCloudProvider(String cloudProvider);
66
+
67
+ /**
68
+ * Get available storage tiers for the cloud provider.
69
+ *
70
+ * @param cloudProvider The cloud provider
71
+ * @return Array of available tier names
72
+ */
73
+ String[] getAvailableStorageTiers(String cloudProvider);
74
+ }
src/main/java/com/dalab/autoarchival/service/storage/OperationStatus.java ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.storage;
2
+
3
+ /**
4
+ * Status of cloud storage operations (archival/restoration).
5
+ */
6
+ public enum OperationStatus {
7
+ INITIATED,
8
+ IN_PROGRESS,
9
+ COMPLETED,
10
+ FAILED,
11
+ CANCELLED,
12
+ PENDING_APPROVAL
13
+ }
src/main/java/com/dalab/autoarchival/service/storage/RestorationResult.java ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.storage;
2
+
3
+ import java.time.LocalDateTime;
4
+ import java.util.UUID;
5
+
6
+ import lombok.AllArgsConstructor;
7
+ import lombok.Builder;
8
+ import lombok.Data;
9
+ import lombok.NoArgsConstructor;
10
+
11
+ /**
12
+ * Result of a restoration operation.
13
+ */
14
+ @Data
15
+ @Builder
16
+ @NoArgsConstructor
17
+ @AllArgsConstructor
18
+ public class RestorationResult {
19
+
20
+ private UUID assetId;
21
+ private String operationId;
22
+ private String archivedLocation;
23
+ private String restoredLocation;
24
+ private String targetTier;
25
+ private OperationStatus status;
26
+ private LocalDateTime initiatedAt;
27
+ private LocalDateTime completedAt;
28
+ private LocalDateTime expiresAt; // For temporary restoration
29
+ private Long dataSizeBytes;
30
+ private String errorMessage;
31
+ private Double estimatedCost;
32
+ }
src/main/java/com/dalab/autoarchival/service/storage/impl/AwsS3StorageService.java ADDED
@@ -0,0 +1,272 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.storage.impl;
2
+
3
+ import java.time.LocalDateTime;
4
+ import java.util.Map;
5
+ import java.util.UUID;
6
+
7
+ import org.slf4j.Logger;
8
+ import org.slf4j.LoggerFactory;
9
+ import org.springframework.beans.factory.annotation.Value;
10
+ import org.springframework.stereotype.Service;
11
+
12
+ import com.dalab.autoarchival.service.storage.ArchivalResult;
13
+ import com.dalab.autoarchival.service.storage.CostSavingsEstimate;
14
+ import com.dalab.autoarchival.service.storage.ICloudStorageService;
15
+ import com.dalab.autoarchival.service.storage.OperationStatus;
16
+ import com.dalab.autoarchival.service.storage.RestorationResult;
17
+
18
+ import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
19
+ import software.amazon.awssdk.regions.Region;
20
+ import software.amazon.awssdk.services.s3.S3Client;
21
+ import software.amazon.awssdk.services.s3.model.CopyObjectRequest;
22
+ import software.amazon.awssdk.services.s3.model.HeadObjectRequest;
23
+ import software.amazon.awssdk.services.s3.model.HeadObjectResponse;
24
+ import software.amazon.awssdk.services.s3.model.RestoreObjectRequest;
25
+ import software.amazon.awssdk.services.s3.model.RestoreRequest;
26
+ import software.amazon.awssdk.services.s3.model.StorageClass;
27
+ import software.amazon.awssdk.services.s3.model.Tier;
28
+
29
+ /**
30
+ * AWS S3 implementation of cloud storage service for archival operations.
31
+ * Supports S3 Standard, S3 IA, S3 Glacier, and S3 Glacier Deep Archive.
32
+ */
33
+ @Service
34
+ public class AwsS3StorageService implements ICloudStorageService {
35
+
36
+ private static final Logger log = LoggerFactory.getLogger(AwsS3StorageService.class);
37
+
38
+ private final S3Client s3Client;
39
+ private final String defaultRegion;
40
+
41
+ // Cost per GB per month (USD) - these are approximate values
42
+ private static final double STANDARD_COST_PER_GB = 0.023;
43
+ private static final double IA_COST_PER_GB = 0.0125;
44
+ private static final double GLACIER_COST_PER_GB = 0.004;
45
+ private static final double DEEP_ARCHIVE_COST_PER_GB = 0.00099;
46
+
47
+ public AwsS3StorageService(@Value("${aws.region:us-east-1}") String region) {
48
+ this.defaultRegion = region;
49
+ this.s3Client = S3Client.builder()
50
+ .region(Region.of(region))
51
+ .credentialsProvider(DefaultCredentialsProvider.create())
52
+ .build();
53
+ }
54
+
55
+ @Override
56
+ public ArchivalResult archiveData(UUID assetId, String sourceLocation, String targetTier, Map<String, String> metadata) {
57
+ log.info("Archiving asset {} from {} to tier {}", assetId, sourceLocation, targetTier);
58
+
59
+ try {
60
+ // Parse S3 location (format: s3://bucket/key)
61
+ String[] parts = parseS3Location(sourceLocation);
62
+ String bucket = parts[0];
63
+ String key = parts[1];
64
+
65
+ // Get object metadata
66
+ HeadObjectResponse headResponse = s3Client.headObject(HeadObjectRequest.builder()
67
+ .bucket(bucket)
68
+ .key(key)
69
+ .build());
70
+
71
+ // Create copy request with new storage class
72
+ StorageClass storageClass = mapToS3StorageClass(targetTier);
73
+ String operationId = UUID.randomUUID().toString();
74
+
75
+ CopyObjectRequest copyRequest = CopyObjectRequest.builder()
76
+ .sourceBucket(bucket)
77
+ .sourceKey(key)
78
+ .destinationBucket(bucket)
79
+ .destinationKey(key)
80
+ .storageClass(storageClass)
81
+ .metadata(metadata)
82
+ .build();
83
+
84
+ s3Client.copyObject(copyRequest);
85
+
86
+ return ArchivalResult.builder()
87
+ .assetId(assetId)
88
+ .operationId(operationId)
89
+ .sourceLocation(sourceLocation)
90
+ .archivedLocation(sourceLocation) // Same location, different storage class
91
+ .targetTier(targetTier)
92
+ .status(OperationStatus.COMPLETED)
93
+ .initiatedAt(LocalDateTime.now())
94
+ .completedAt(LocalDateTime.now())
95
+ .dataSizeBytes(headResponse.contentLength())
96
+ .metadata(metadata)
97
+ .estimatedMonthlySavings(calculateMonthlySavings("STANDARD", targetTier, headResponse.contentLength()))
98
+ .build();
99
+
100
+ } catch (Exception e) {
101
+ log.error("Failed to archive asset {}: {}", assetId, e.getMessage(), e);
102
+ return ArchivalResult.builder()
103
+ .assetId(assetId)
104
+ .operationId(UUID.randomUUID().toString())
105
+ .sourceLocation(sourceLocation)
106
+ .targetTier(targetTier)
107
+ .status(OperationStatus.FAILED)
108
+ .initiatedAt(LocalDateTime.now())
109
+ .errorMessage(e.getMessage())
110
+ .build();
111
+ }
112
+ }
113
+
114
+ @Override
115
+ public RestorationResult restoreData(UUID assetId, String archivedLocation, String targetTier) {
116
+ log.info("Restoring asset {} from {} to tier {}", assetId, archivedLocation, targetTier);
117
+
118
+ try {
119
+ String[] parts = parseS3Location(archivedLocation);
120
+ String bucket = parts[0];
121
+ String key = parts[1];
122
+
123
+ String operationId = UUID.randomUUID().toString();
124
+
125
+ // For Glacier/Deep Archive, initiate restore request
126
+ if (targetTier.equals("STANDARD") || targetTier.equals("EXPEDITED")) {
127
+ RestoreRequest restoreRequest = RestoreRequest.builder()
128
+ .days(7) // Restore for 7 days
129
+ .glacierJobParameters(builder -> builder.tier(Tier.STANDARD))
130
+ .build();
131
+
132
+ RestoreObjectRequest request = RestoreObjectRequest.builder()
133
+ .bucket(bucket)
134
+ .key(key)
135
+ .restoreRequest(restoreRequest)
136
+ .build();
137
+
138
+ s3Client.restoreObject(request);
139
+
140
+ return RestorationResult.builder()
141
+ .assetId(assetId)
142
+ .operationId(operationId)
143
+ .archivedLocation(archivedLocation)
144
+ .restoredLocation(archivedLocation)
145
+ .targetTier(targetTier)
146
+ .status(OperationStatus.IN_PROGRESS)
147
+ .initiatedAt(LocalDateTime.now())
148
+ .expiresAt(LocalDateTime.now().plusDays(7))
149
+ .estimatedCost(0.01) // Approximate restore cost
150
+ .build();
151
+ }
152
+
153
+ // For other tiers, use copy operation
154
+ return RestorationResult.builder()
155
+ .assetId(assetId)
156
+ .operationId(operationId)
157
+ .archivedLocation(archivedLocation)
158
+ .restoredLocation(archivedLocation)
159
+ .targetTier(targetTier)
160
+ .status(OperationStatus.COMPLETED)
161
+ .initiatedAt(LocalDateTime.now())
162
+ .completedAt(LocalDateTime.now())
163
+ .build();
164
+
165
+ } catch (Exception e) {
166
+ log.error("Failed to restore asset {}: {}", assetId, e.getMessage(), e);
167
+ return RestorationResult.builder()
168
+ .assetId(assetId)
169
+ .operationId(UUID.randomUUID().toString())
170
+ .archivedLocation(archivedLocation)
171
+ .targetTier(targetTier)
172
+ .status(OperationStatus.FAILED)
173
+ .initiatedAt(LocalDateTime.now())
174
+ .errorMessage(e.getMessage())
175
+ .build();
176
+ }
177
+ }
178
+
179
+ @Override
180
+ public OperationStatus getArchivalStatus(String operationId) {
181
+ // In a real implementation, you would track operation status in a database
182
+ // For now, return COMPLETED as archival operations are typically synchronous
183
+ return OperationStatus.COMPLETED;
184
+ }
185
+
186
+ @Override
187
+ public OperationStatus getRestorationStatus(String operationId) {
188
+ // In a real implementation, you would check the actual restore status
189
+ // For now, return IN_PROGRESS for demonstration
190
+ return OperationStatus.IN_PROGRESS;
191
+ }
192
+
193
+ @Override
194
+ public CostSavingsEstimate calculateCostSavings(String sourceLocation, String targetTier, long dataSizeBytes) {
195
+ double dataSizeGB = dataSizeBytes / (1024.0 * 1024.0 * 1024.0);
196
+
197
+ double currentCost = STANDARD_COST_PER_GB * dataSizeGB;
198
+ double targetCost = getCostPerGB(targetTier) * dataSizeGB;
199
+ double savings = currentCost - targetCost;
200
+ double savingsPercentage = (savings / currentCost) * 100;
201
+
202
+ return CostSavingsEstimate.builder()
203
+ .sourceStorageClass("STANDARD")
204
+ .targetStorageClass(targetTier)
205
+ .dataSizeBytes(dataSizeBytes)
206
+ .currentMonthlyCost(currentCost)
207
+ .archivedMonthlyCost(targetCost)
208
+ .monthlySavings(savings)
209
+ .savingsPercentage(savingsPercentage)
210
+ .currency("USD")
211
+ .region(defaultRegion)
212
+ .build();
213
+ }
214
+
215
+ @Override
216
+ public boolean supportsCloudProvider(String cloudProvider) {
217
+ return "AWS".equalsIgnoreCase(cloudProvider);
218
+ }
219
+
220
+ @Override
221
+ public String[] getAvailableStorageTiers(String cloudProvider) {
222
+ if (supportsCloudProvider(cloudProvider)) {
223
+ return new String[]{"STANDARD", "STANDARD_IA", "GLACIER", "DEEP_ARCHIVE"};
224
+ }
225
+ return new String[0];
226
+ }
227
+
228
+ private String[] parseS3Location(String s3Location) {
229
+ // Parse s3://bucket/key format
230
+ if (!s3Location.startsWith("s3://")) {
231
+ throw new IllegalArgumentException("Invalid S3 location format: " + s3Location);
232
+ }
233
+
234
+ String withoutProtocol = s3Location.substring(5);
235
+ int firstSlash = withoutProtocol.indexOf('/');
236
+ if (firstSlash == -1) {
237
+ throw new IllegalArgumentException("Invalid S3 location format: " + s3Location);
238
+ }
239
+
240
+ String bucket = withoutProtocol.substring(0, firstSlash);
241
+ String key = withoutProtocol.substring(firstSlash + 1);
242
+
243
+ return new String[]{bucket, key};
244
+ }
245
+
246
+ private StorageClass mapToS3StorageClass(String targetTier) {
247
+ return switch (targetTier.toUpperCase()) {
248
+ case "STANDARD" -> StorageClass.STANDARD;
249
+ case "STANDARD_IA" -> StorageClass.STANDARD_IA;
250
+ case "GLACIER" -> StorageClass.GLACIER;
251
+ case "DEEP_ARCHIVE" -> StorageClass.DEEP_ARCHIVE;
252
+ default -> throw new IllegalArgumentException("Unsupported storage tier: " + targetTier);
253
+ };
254
+ }
255
+
256
+ private double getCostPerGB(String storageClass) {
257
+ return switch (storageClass.toUpperCase()) {
258
+ case "STANDARD" -> STANDARD_COST_PER_GB;
259
+ case "STANDARD_IA" -> IA_COST_PER_GB;
260
+ case "GLACIER" -> GLACIER_COST_PER_GB;
261
+ case "DEEP_ARCHIVE" -> DEEP_ARCHIVE_COST_PER_GB;
262
+ default -> STANDARD_COST_PER_GB;
263
+ };
264
+ }
265
+
266
+ private double calculateMonthlySavings(String sourceClass, String targetClass, long dataSizeBytes) {
267
+ double dataSizeGB = dataSizeBytes / (1024.0 * 1024.0 * 1024.0);
268
+ double sourceCost = getCostPerGB(sourceClass) * dataSizeGB;
269
+ double targetCost = getCostPerGB(targetClass) * dataSizeGB;
270
+ return Math.max(0, sourceCost - targetCost);
271
+ }
272
+ }
src/main/resources/application.properties ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # DALab AutoArchival Service Configuration
2
+ spring.application.name=da-autoarchival
3
+ server.port=8080
4
+
5
+ # Database Configuration - da_autoarchival database
6
+ spring.datasource.url=jdbc:postgresql://localhost:5432/da_autoarchival
7
+ spring.datasource.username=da_autoarchival_user
8
+ spring.datasource.password=da_autoarchival_pass
9
+ spring.datasource.driver-class-name=org.postgresql.Driver
10
+
11
+ # JPA Configuration
12
+ spring.jpa.hibernate.ddl-auto=update
13
+ spring.jpa.show-sql=false
14
+ spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect
15
+ spring.jpa.properties.hibernate.format_sql=true
16
+
17
+ # Common entities database configuration (for da-protos entities)
18
+ dalab.common.datasource.url=jdbc:postgresql://localhost:5432/dalab_common
19
+ dalab.common.datasource.username=dalab_common_user
20
+ dalab.common.datasource.password=dalab_common_pass
21
+
22
+ # Kafka Configuration
23
+ spring.kafka.bootstrap-servers=localhost:9092
24
+ spring.kafka.consumer.group-id=da-autoarchival-group
25
+ spring.kafka.consumer.auto-offset-reset=earliest
26
+ spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
27
+ spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer
28
+ spring.kafka.consumer.properties.spring.json.trusted.packages=*
29
+ spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
30
+ spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer
31
+
32
+ # Kafka Topics
33
+ dalab.kafka.topics.policy-actions=dalab.policies.actions
34
+ dalab.kafka.topics.archival-events=dalab.archival.events
35
+
36
+ # Security Configuration (Keycloak JWT)
37
+ spring.security.oauth2.resourceserver.jwt.issuer-uri=http://localhost:8180/realms/dalab
38
+ spring.security.oauth2.resourceserver.jwt.jwk-set-uri=http://localhost:8180/realms/dalab/protocol/openid-connect/certs
39
+
40
+ # Cloud Storage Configuration
41
+ # AWS S3/Glacier Configuration
42
+ aws.s3.region=us-east-1
43
+ aws.s3.bucket.archival=dalab-archival-bucket
44
+ aws.glacier.vault.name=dalab-glacier-vault
45
+
46
+ # Azure Blob Storage Configuration
47
+ azure.storage.account.name=dalabarchival
48
+ azure.storage.container.archival=archival-container
49
+
50
+ # Google Cloud Storage Configuration
51
+ gcp.storage.project.id=dalab-project
52
+ gcp.storage.bucket.archival=dalab-gcp-archival
53
+
54
+ # Archival Configuration
55
+ archival.default.retention.days=2555 # 7 years
56
+ archival.cost.optimization.threshold=0.3 # 30% savings threshold
57
+ archival.batch.size=100
58
+
59
+ # Actuator Configuration
60
+ management.endpoints.web.exposure.include=health,info,metrics,prometheus
61
+ management.endpoint.health.show-details=when-authorized
62
+ management.metrics.export.prometheus.enabled=true
63
+
64
+ # Logging Configuration
65
+ logging.level.com.dalab.autoarchival=INFO
66
+ logging.level.org.springframework.kafka=WARN
67
+ logging.level.org.springframework.security=WARN
src/test/java/com/dalab/autoarchival/controller/ArchivalConfigControllerTest.java ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.controller;
2
+
3
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
4
+ import com.dalab.autoarchival.service.IArchivalConfigService;
5
+ import com.fasterxml.jackson.databind.ObjectMapper;
6
+ import org.junit.jupiter.api.BeforeEach;
7
+ import org.junit.jupiter.api.Test;
8
+ import org.springframework.beans.factory.annotation.Autowired;
9
+ import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
10
+ import org.springframework.boot.test.mock.mockito.MockBean;
11
+ import org.springframework.http.MediaType;
12
+ import org.springframework.security.test.context.support.WithMockUser;
13
+ import org.springframework.test.web.servlet.MockMvc;
14
+
15
+ import java.util.HashMap;
16
+
17
+ import static org.mockito.ArgumentMatchers.any;
18
+ import static org.mockito.Mockito.*;
19
+ import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
20
+ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
21
+ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
22
+
23
+ @WebMvcTest(ArchivalConfigController.class)
24
+ class ArchivalConfigControllerTest {
25
+
26
+ @Autowired
27
+ private MockMvc mockMvc;
28
+
29
+ @MockBean
30
+ private IArchivalConfigService archivalConfigService;
31
+
32
+ @Autowired
33
+ private ObjectMapper objectMapper;
34
+
35
+ private ArchivalConfigDTO sampleConfigDTO;
36
+
37
+ @BeforeEach
38
+ void setUp() {
39
+ sampleConfigDTO = ArchivalConfigDTO.builder()
40
+ .enabled(true)
41
+ .defaultArchivalTier("S3_GLACIER")
42
+ .defaultRetentionDays(365L)
43
+ .requireApprovalForArchival(true)
44
+ .requireApprovalForRestore(false)
45
+ .providerConfigs(new HashMap<>())
46
+ .build();
47
+ }
48
+
49
+ @Test
50
+ @WithMockUser(authorities = "ROLE_ADMIN")
51
+ void getArchivalConfiguration_AsAdmin_ShouldReturnConfig() throws Exception {
52
+ when(archivalConfigService.getArchivalConfig()).thenReturn(sampleConfigDTO);
53
+
54
+ mockMvc.perform(get("/api/v1/archival/config"))
55
+ .andExpect(status().isOk())
56
+ .andExpect(jsonPath("$.enabled").value(true))
57
+ .andExpect(jsonPath("$.defaultArchivalTier").value("S3_GLACIER"));
58
+ }
59
+
60
+ @Test
61
+ @WithMockUser(authorities = "ROLE_DATA_STEWARD")
62
+ void getArchivalConfiguration_AsDataSteward_ShouldReturnConfig() throws Exception {
63
+ when(archivalConfigService.getArchivalConfig()).thenReturn(sampleConfigDTO);
64
+
65
+ mockMvc.perform(get("/api/v1/archival/config"))
66
+ .andExpect(status().isOk());
67
+ }
68
+
69
+ @Test
70
+ @WithMockUser(authorities = "ROLE_USER")
71
+ void getArchivalConfiguration_AsUser_ShouldBeForbidden() throws Exception {
72
+ mockMvc.perform(get("/api/v1/archival/config"))
73
+ .andExpect(status().isForbidden());
74
+ }
75
+
76
+ @Test
77
+ @WithMockUser(authorities = "ROLE_ADMIN")
78
+ void updateArchivalConfiguration_AsAdmin_ShouldReturnOk() throws Exception {
79
+ doNothing().when(archivalConfigService).updateArchivalConfig(any(ArchivalConfigDTO.class));
80
+
81
+ mockMvc.perform(put("/api/v1/archival/config")
82
+ .with(csrf()) // Add CSRF token for PUT if Spring Security CSRF is enabled
83
+ .contentType(MediaType.APPLICATION_JSON)
84
+ .content(objectMapper.writeValueAsString(sampleConfigDTO)))
85
+ .andExpect(status().isOk());
86
+
87
+ verify(archivalConfigService, times(1)).updateArchivalConfig(any(ArchivalConfigDTO.class));
88
+ }
89
+
90
+ @Test
91
+ @WithMockUser(authorities = "ROLE_DATA_STEWARD")
92
+ void updateArchivalConfiguration_AsDataSteward_ShouldBeForbidden() throws Exception {
93
+ mockMvc.perform(put("/api/v1/archival/config")
94
+ .with(csrf())
95
+ .contentType(MediaType.APPLICATION_JSON)
96
+ .content(objectMapper.writeValueAsString(sampleConfigDTO)))
97
+ .andExpect(status().isForbidden());
98
+ }
99
+ }
src/test/java/com/dalab/autoarchival/controller/ArchivalTaskControllerTest.java ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.controller;
2
+
3
+ import static org.mockito.ArgumentMatchers.*;
4
+ import static org.mockito.Mockito.*;
5
+ import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.*;
6
+ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
7
+ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
8
+
9
+ import java.time.LocalDateTime;
10
+ import java.util.Collections;
11
+ import java.util.List;
12
+ import java.util.UUID;
13
+
14
+ import org.junit.jupiter.api.BeforeEach;
15
+ import org.junit.jupiter.api.Test;
16
+ import org.springframework.beans.factory.annotation.Autowired;
17
+ import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
18
+ import org.springframework.boot.test.mock.mockito.MockBean;
19
+ import org.springframework.data.domain.Pageable;
20
+ import org.springframework.http.MediaType;
21
+ import org.springframework.security.test.context.support.WithMockUser;
22
+ import org.springframework.test.web.servlet.MockMvc;
23
+
24
+ import com.dalab.autoarchival.dto.ArchivalTaskListResponse;
25
+ import com.dalab.autoarchival.dto.ArchivalTaskRequest;
26
+ import com.dalab.autoarchival.dto.ArchivalTaskResponse;
27
+ import com.dalab.autoarchival.dto.ArchivalTaskStatusDTO;
28
+ import com.dalab.autoarchival.dto.RestoreRequestDTO;
29
+ import com.dalab.autoarchival.dto.RestoreResponseDTO;
30
+ import com.dalab.autoarchival.service.IArchivalTaskService;
31
+ import com.fasterxml.jackson.databind.ObjectMapper;
32
+
33
+ @WebMvcTest(ArchivalTaskController.class)
34
+ class ArchivalTaskControllerTest {
35
+
36
+ @Autowired
37
+ private MockMvc mockMvc;
38
+
39
+ @MockBean
40
+ private IArchivalTaskService taskService;
41
+
42
+ @Autowired
43
+ private ObjectMapper objectMapper;
44
+
45
+ private ArchivalTaskRequest sampleTaskRequest;
46
+ private ArchivalTaskResponse sampleTaskResponse;
47
+ private ArchivalTaskStatusDTO sampleTaskStatusDTO;
48
+ private String sampleTaskId;
49
+
50
+ @BeforeEach
51
+ void setUp() {
52
+ sampleTaskId = UUID.randomUUID().toString();
53
+
54
+ ArchivalTaskRequest.ArchivalScope scope = ArchivalTaskRequest.ArchivalScope.builder()
55
+ .assetIds(List.of("asset1", "asset2"))
56
+ .build();
57
+ sampleTaskRequest = ArchivalTaskRequest.builder()
58
+ .taskName("Test Archival Task")
59
+ .scope(scope)
60
+ .build();
61
+
62
+ sampleTaskResponse = ArchivalTaskResponse.builder()
63
+ .taskId(sampleTaskId)
64
+ .taskName("Test Archival Task")
65
+ .status("SUBMITTED")
66
+ .submittedAt(LocalDateTime.now())
67
+ .build();
68
+
69
+ sampleTaskStatusDTO = ArchivalTaskStatusDTO.builder()
70
+ .taskId(sampleTaskId)
71
+ .taskName("Test Archival Task")
72
+ .status("SUBMITTED")
73
+ .submittedAt(LocalDateTime.now())
74
+ .scope(scope)
75
+ .totalAssetsInScope(2)
76
+ .build();
77
+ }
78
+
79
+ @Test
80
+ @WithMockUser(authorities = "ROLE_ADMIN")
81
+ void submitArchivalTask_AsAdmin_ShouldReturnAccepted() throws Exception {
82
+ when(taskService.submitArchivalTask(any(ArchivalTaskRequest.class))).thenReturn(sampleTaskResponse);
83
+
84
+ mockMvc.perform(post("/api/v1/archival/tasks")
85
+ .with(csrf())
86
+ .contentType(MediaType.APPLICATION_JSON)
87
+ .content(objectMapper.writeValueAsString(sampleTaskRequest)))
88
+ .andExpect(status().isAccepted())
89
+ .andExpect(jsonPath("$.taskId").value(sampleTaskId))
90
+ .andExpect(jsonPath("$.status").value("SUBMITTED"));
91
+ }
92
+
93
+ @Test
94
+ @WithMockUser(authorities = "ROLE_ADMIN")
95
+ void submitArchivalTask_WithInvalidScope_ShouldReturnBadRequest() throws Exception {
96
+ ArchivalTaskRequest invalidRequest = ArchivalTaskRequest.builder().taskName("Invalid Task").scope(null).build();
97
+ ArchivalTaskResponse validationResponse = ArchivalTaskResponse.builder()
98
+ .status("FAILED_VALIDATION").message("Scope is null").build();
99
+ when(taskService.submitArchivalTask(any(ArchivalTaskRequest.class))).thenReturn(validationResponse);
100
+
101
+ mockMvc.perform(post("/api/v1/archival/tasks")
102
+ .with(csrf())
103
+ .contentType(MediaType.APPLICATION_JSON)
104
+ .content(objectMapper.writeValueAsString(invalidRequest)))
105
+ .andExpect(status().isBadRequest());
106
+ }
107
+
108
+ @Test
109
+ @WithMockUser(authorities = "ROLE_USER") // Assuming USER can get status
110
+ void getArchivalTaskStatus_WhenTaskExists_ShouldReturnStatus() throws Exception {
111
+ when(taskService.getTaskStatus(sampleTaskId)).thenReturn(sampleTaskStatusDTO);
112
+
113
+ mockMvc.perform(get("/api/v1/archival/tasks/{taskId}", sampleTaskId))
114
+ .andExpect(status().isOk())
115
+ .andExpect(jsonPath("$.taskId").value(sampleTaskId))
116
+ .andExpect(jsonPath("$.status").value("SUBMITTED"));
117
+ }
118
+
119
+ @Test
120
+ @WithMockUser(authorities = "ROLE_USER")
121
+ void getArchivalTaskStatus_WhenTaskNotExists_ShouldReturnNotFound() throws Exception {
122
+ when(taskService.getTaskStatus(sampleTaskId)).thenReturn(null);
123
+
124
+ mockMvc.perform(get("/api/v1/archival/tasks/{taskId}", sampleTaskId))
125
+ .andExpect(status().isNotFound());
126
+ }
127
+
128
+ @Test
129
+ @WithMockUser(authorities = "ROLE_USER")
130
+ void listArchivalTasks_ShouldReturnPageOfTasks() throws Exception {
131
+ ArchivalTaskListResponse listResponse = ArchivalTaskListResponse.builder()
132
+ .tasks(Collections.singletonList(sampleTaskStatusDTO))
133
+ .pageNumber(0).pageSize(20).totalElements(1).totalPages(1).last(true).first(true).numberOfElements(1)
134
+ .build();
135
+ when(taskService.listTasks(any(Pageable.class))).thenReturn(listResponse);
136
+
137
+ mockMvc.perform(get("/api/v1/archival/tasks").param("page", "0").param("size", "20"))
138
+ .andExpect(status().isOk())
139
+ .andExpect(jsonPath("$.tasks[0].taskId").value(sampleTaskId));
140
+ }
141
+
142
+ @Test
143
+ @WithMockUser(authorities = "ROLE_ADMIN")
144
+ void approveArchivalTask_AsAdmin_ShouldReturnOk() throws Exception {
145
+ ArchivalTaskStatusDTO approvedStatus = ArchivalTaskStatusDTO.builder().taskId(sampleTaskId).status("APPROVED").build();
146
+ when(taskService.approveTask(eq(sampleTaskId), any())).thenReturn(approvedStatus);
147
+
148
+ mockMvc.perform(post("/api/v1/archival/tasks/{taskId}/approve", sampleTaskId)
149
+ .with(csrf()))
150
+ .andExpect(status().isOk())
151
+ .andExpect(jsonPath("$.status").value("APPROVED"));
152
+ }
153
+
154
+ @Test
155
+ @WithMockUser(authorities = "ROLE_ADMIN")
156
+ void approveArchivalTask_NotApprovable_ShouldReturnConflict() throws Exception {
157
+ ArchivalTaskStatusDTO conflictStatus = ArchivalTaskStatusDTO.builder().taskId(sampleTaskId).status("COMPLETED").build();
158
+ when(taskService.approveTask(eq(sampleTaskId), any())).thenReturn(conflictStatus);
159
+
160
+ mockMvc.perform(post("/api/v1/archival/tasks/{taskId}/approve", sampleTaskId)
161
+ .with(csrf()))
162
+ .andExpect(status().isConflict());
163
+ }
164
+
165
+
166
+ @Test
167
+ @WithMockUser(authorities = "ROLE_ADMIN")
168
+ void rejectArchivalTask_AsAdmin_ShouldReturnOk() throws Exception {
169
+ ArchivalTaskStatusDTO rejectedStatus = ArchivalTaskStatusDTO.builder().taskId(sampleTaskId).status("REJECTED").build();
170
+ when(taskService.rejectTask(eq(sampleTaskId), any())).thenReturn(rejectedStatus);
171
+
172
+ mockMvc.perform(post("/api/v1/archival/tasks/{taskId}/reject", sampleTaskId)
173
+ .with(csrf()))
174
+ .andExpect(status().isOk())
175
+ .andExpect(jsonPath("$.status").value("REJECTED"));
176
+ }
177
+
178
+ @Test
179
+ @WithMockUser(authorities = "ROLE_ADMIN")
180
+ void requestRestoreArchivalTask_AsAdmin_ShouldReturnAccepted() throws Exception {
181
+ RestoreRequestDTO restoreRequest = RestoreRequestDTO.builder().restoreTier("STANDARD").retentionDays(7).build();
182
+ RestoreResponseDTO restoreResponse = RestoreResponseDTO.builder().archivalTaskId(sampleTaskId).status("RESTORATION_REQUESTED").build();
183
+ when(taskService.requestRestore(eq(sampleTaskId), any(RestoreRequestDTO.class))).thenReturn(restoreResponse);
184
+
185
+ mockMvc.perform(post("/api/v1/archival/tasks/{taskId}/restore", sampleTaskId)
186
+ .with(csrf())
187
+ .contentType(MediaType.APPLICATION_JSON)
188
+ .content(objectMapper.writeValueAsString(restoreRequest)))
189
+ .andExpect(status().isAccepted())
190
+ .andExpect(jsonPath("$.status").value("RESTORATION_REQUESTED"));
191
+ }
192
+ }
src/test/java/com/dalab/autoarchival/service/impl/ArchivalConfigServiceImplTest.java ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.impl;
2
+
3
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
4
+ import com.dalab.autoarchival.mapper.ArchivalConfigMapper;
5
+ import com.dalab.autoarchival.model.ArchivalConfigEntity;
6
+ import com.dalab.autoarchival.repository.ArchivalConfigRepository;
7
+ import org.junit.jupiter.api.BeforeEach;
8
+ import org.junit.jupiter.api.Test;
9
+ import org.junit.jupiter.api.extension.ExtendWith;
10
+ import org.mockito.InjectMocks;
11
+ import org.mockito.Mock;
12
+ import org.mockito.junit.jupiter.MockitoExtension;
13
+
14
+ import java.util.Optional;
15
+ import java.util.HashMap;
16
+
17
+ import static org.junit.jupiter.api.Assertions.*;
18
+ import static org.mockito.Mockito.*;
19
+
20
+ @ExtendWith(MockitoExtension.class)
21
+ class ArchivalConfigServiceImplTest {
22
+
23
+ @Mock
24
+ private ArchivalConfigRepository configRepository;
25
+
26
+ @Mock
27
+ private ArchivalConfigMapper configMapper;
28
+
29
+ @InjectMocks
30
+ private ArchivalConfigServiceImpl configService;
31
+
32
+ private ArchivalConfigEntity sampleEntity;
33
+ private ArchivalConfigDTO sampleDTO;
34
+ private static final Long GLOBAL_CONFIG_ID = 1L;
35
+
36
+ @BeforeEach
37
+ void setUp() {
38
+ sampleEntity = new ArchivalConfigEntity(
39
+ GLOBAL_CONFIG_ID, true, "S3_STANDARD", 30L, true, true, new HashMap<>()
40
+ );
41
+ sampleDTO = ArchivalConfigDTO.builder()
42
+ .enabled(true).defaultArchivalTier("S3_STANDARD").defaultRetentionDays(30L)
43
+ .requireApprovalForArchival(true).requireApprovalForRestore(true).providerConfigs(new HashMap<>()).build();
44
+ }
45
+
46
+ @Test
47
+ void init_WhenConfigExists_ShouldNotSave() {
48
+ when(configRepository.existsById(GLOBAL_CONFIG_ID)).thenReturn(true);
49
+ configService.init();
50
+ verify(configRepository, never()).save(any(ArchivalConfigEntity.class));
51
+ }
52
+
53
+ @Test
54
+ void init_WhenConfigNotExists_ShouldSaveDefault() {
55
+ when(configRepository.existsById(GLOBAL_CONFIG_ID)).thenReturn(false);
56
+ when(configRepository.save(any(ArchivalConfigEntity.class))).thenReturn(sampleEntity); // Mock save
57
+ configService.init();
58
+ verify(configRepository, times(1)).save(any(ArchivalConfigEntity.class));
59
+ }
60
+
61
+ @Test
62
+ void getArchivalConfig_ShouldReturnMappedDTO() {
63
+ when(configRepository.findById(GLOBAL_CONFIG_ID)).thenReturn(Optional.of(sampleEntity));
64
+ when(configMapper.toDto(sampleEntity)).thenReturn(sampleDTO);
65
+
66
+ ArchivalConfigDTO result = configService.getArchivalConfig();
67
+
68
+ assertNotNull(result);
69
+ assertEquals(sampleDTO.getDefaultArchivalTier(), result.getDefaultArchivalTier());
70
+ verify(configRepository, times(1)).findById(GLOBAL_CONFIG_ID);
71
+ verify(configMapper, times(1)).toDto(sampleEntity);
72
+ }
73
+
74
+ @Test
75
+ void updateArchivalConfig_ShouldUpdateAndSaveFolder() {
76
+ when(configRepository.findById(GLOBAL_CONFIG_ID)).thenReturn(Optional.of(sampleEntity));
77
+ // configMapper.updateEntityFromDto is void, so mock it if necessary (e.g. with doNothing() or verify interaction)
78
+ doNothing().when(configMapper).updateEntityFromDto(eq(sampleDTO), eq(sampleEntity));
79
+ when(configRepository.save(any(ArchivalConfigEntity.class))).thenReturn(sampleEntity);
80
+
81
+ configService.updateArchivalConfig(sampleDTO);
82
+
83
+ verify(configRepository, times(1)).findById(GLOBAL_CONFIG_ID);
84
+ verify(configMapper, times(1)).updateEntityFromDto(sampleDTO, sampleEntity);
85
+ verify(configRepository, times(1)).save(sampleEntity);
86
+ assertEquals(GLOBAL_CONFIG_ID, sampleEntity.getId()); // Ensure ID is preserved
87
+ }
88
+
89
+ @Test
90
+ void updateArchivalConfig_WhenDtoIsNull_ShouldThrowException() {
91
+ assertThrows(IllegalArgumentException.class, () -> {
92
+ configService.updateArchivalConfig(null);
93
+ });
94
+ }
95
+
96
+ @Test
97
+ void updateArchivalConfig_WhenEntityNotFound_ShouldThrowException() {
98
+ when(configRepository.findById(GLOBAL_CONFIG_ID)).thenReturn(Optional.empty());
99
+ assertThrows(IllegalStateException.class, () -> {
100
+ configService.updateArchivalConfig(sampleDTO);
101
+ });
102
+ }
103
+ }
src/test/java/com/dalab/autoarchival/service/impl/ArchivalTaskServiceImplTest.java ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package com.dalab.autoarchival.service.impl;
2
+
3
+ import static org.junit.jupiter.api.Assertions.*;
4
+ import static org.mockito.ArgumentMatchers.*;
5
+ import static org.mockito.Mockito.*;
6
+
7
+ import java.time.LocalDateTime;
8
+ import java.util.Collections;
9
+ import java.util.List;
10
+ import java.util.Optional;
11
+ import java.util.UUID;
12
+
13
+ import org.junit.jupiter.api.BeforeEach;
14
+ import org.junit.jupiter.api.Test;
15
+ import org.junit.jupiter.api.extension.ExtendWith;
16
+ import org.mockito.ArgumentCaptor;
17
+ import org.mockito.InjectMocks;
18
+ import org.mockito.Mock;
19
+ import org.mockito.junit.jupiter.MockitoExtension;
20
+ import org.springframework.data.domain.Page;
21
+ import org.springframework.data.domain.PageImpl;
22
+ import org.springframework.data.domain.PageRequest;
23
+ import org.springframework.data.domain.Pageable;
24
+
25
+ import com.dalab.autoarchival.dto.ArchivalConfigDTO;
26
+ import com.dalab.autoarchival.dto.ArchivalTaskListResponse;
27
+ import com.dalab.autoarchival.dto.ArchivalTaskRequest;
28
+ import com.dalab.autoarchival.dto.ArchivalTaskResponse;
29
+ import com.dalab.autoarchival.dto.ArchivalTaskStatusDTO;
30
+ import com.dalab.autoarchival.dto.RestoreRequestDTO;
31
+ import com.dalab.autoarchival.dto.RestoreResponseDTO;
32
+ import com.dalab.autoarchival.dto.TaskApprovalRequest;
33
+ import com.dalab.autoarchival.mapper.ArchivalTaskMapper;
34
+ import com.dalab.autoarchival.model.ArchivalStatus;
35
+ import com.dalab.autoarchival.model.ArchivalTaskEntity;
36
+ import com.dalab.autoarchival.repository.ArchivalTaskRepository;
37
+ import com.dalab.autoarchival.service.IArchivalConfigService;
38
+
39
+ @ExtendWith(MockitoExtension.class)
40
+ class ArchivalTaskServiceImplTest {
41
+
42
+ @Mock
43
+ private ArchivalTaskRepository taskRepository;
44
+
45
+ @Mock
46
+ private ArchivalTaskMapper taskMapper;
47
+
48
+ @Mock
49
+ private IArchivalConfigService archivalConfigService; // JPA version
50
+
51
+ @InjectMocks
52
+ private ArchivalTaskServiceImpl taskService;
53
+
54
+ private ArchivalTaskRequest sampleTaskRequest;
55
+ private ArchivalTaskEntity sampleTaskEntity;
56
+ private ArchivalTaskStatusDTO sampleTaskStatusDTO;
57
+ private ArchivalTaskResponse sampleTaskResponse;
58
+ private String sampleTaskId;
59
+ private ArchivalConfigDTO globalConfigNoApproval;
60
+ private ArchivalConfigDTO globalConfigWithApproval;
61
+
62
+ @BeforeEach
63
+ void setUp() {
64
+ sampleTaskId = UUID.randomUUID().toString();
65
+ ArchivalTaskRequest.ArchivalScope scope = ArchivalTaskRequest.ArchivalScope.builder()
66
+ .assetIds(List.of("asset1")).build();
67
+
68
+ sampleTaskRequest = ArchivalTaskRequest.builder()
69
+ .taskName("Test Task")
70
+ .scope(scope)
71
+ .triggeredBy("user@test.com")
72
+ .build();
73
+
74
+ sampleTaskEntity = new ArchivalTaskEntity();
75
+ sampleTaskEntity.setTaskId(sampleTaskId);
76
+ sampleTaskEntity.setTaskName("Test Task");
77
+ sampleTaskEntity.setStatus(ArchivalStatus.SUBMITTED);
78
+ sampleTaskEntity.setSubmittedAt(LocalDateTime.now());
79
+ sampleTaskEntity.setScope(new ArchivalTaskEntity.ArchivalScopeData(List.of("asset1")));
80
+
81
+ sampleTaskStatusDTO = ArchivalTaskStatusDTO.builder().taskId(sampleTaskId).status("SUBMITTED").build();
82
+ sampleTaskResponse = ArchivalTaskResponse.builder().taskId(sampleTaskId).status("SUBMITTED").build();
83
+
84
+ globalConfigNoApproval = ArchivalConfigDTO.builder().requireApprovalForArchival(false).build();
85
+ globalConfigWithApproval = ArchivalConfigDTO.builder().requireApprovalForArchival(true).build();
86
+ }
87
+
88
+ @Test
89
+ void submitArchivalTask_ValidRequest_NoApprovalNeeded_ShouldSaveAndReturnResponse() {
90
+ when(archivalConfigService.getArchivalConfig()).thenReturn(globalConfigNoApproval);
91
+ when(taskMapper.requestToEntity(sampleTaskRequest)).thenReturn(sampleTaskEntity);
92
+ when(taskRepository.save(any(ArchivalTaskEntity.class))).thenReturn(sampleTaskEntity);
93
+ when(taskMapper.entityToTaskResponse(sampleTaskEntity)).thenReturn(sampleTaskResponse);
94
+
95
+ ArchivalTaskResponse response = taskService.submitArchivalTask(sampleTaskRequest);
96
+
97
+ assertNotNull(response);
98
+ assertEquals("SUBMITTED", response.getStatus());
99
+ ArgumentCaptor<ArchivalTaskEntity> entityCaptor = ArgumentCaptor.forClass(ArchivalTaskEntity.class);
100
+ verify(taskRepository).save(entityCaptor.capture());
101
+ assertEquals(ArchivalStatus.SUBMITTED, entityCaptor.getValue().getStatus());
102
+ assertNotNull(entityCaptor.getValue().getAssetStatuses());
103
+ assertEquals(1, entityCaptor.getValue().getAssetsPendingArchival());
104
+ }
105
+
106
+ @Test
107
+ void submitArchivalTask_ValidRequest_ApprovalNeeded_ShouldSetPendingApproval() {
108
+ when(archivalConfigService.getArchivalConfig()).thenReturn(globalConfigWithApproval);
109
+ when(taskMapper.requestToEntity(sampleTaskRequest)).thenReturn(sampleTaskEntity);
110
+ when(taskRepository.save(any(ArchivalTaskEntity.class))).thenReturn(sampleTaskEntity);
111
+ when(taskMapper.entityToTaskResponse(sampleTaskEntity)).thenReturn(sampleTaskResponse);
112
+
113
+ taskService.submitArchivalTask(sampleTaskRequest);
114
+
115
+ ArgumentCaptor<ArchivalTaskEntity> entityCaptor = ArgumentCaptor.forClass(ArchivalTaskEntity.class);
116
+ verify(taskRepository).save(entityCaptor.capture());
117
+ assertEquals(ArchivalStatus.PENDING_APPROVAL, entityCaptor.getValue().getStatus());
118
+ }
119
+
120
+ @Test
121
+ void submitArchivalTask_InvalidScope_ShouldReturnFailedValidation() {
122
+ ArchivalTaskRequest invalidRequest = ArchivalTaskRequest.builder().scope(null).build();
123
+ ArchivalTaskResponse response = taskService.submitArchivalTask(invalidRequest);
124
+ assertEquals("FAILED_VALIDATION", response.getStatus());
125
+ verify(taskRepository, never()).save(any());
126
+ }
127
+
128
+ @Test
129
+ void getTaskStatus_TaskExists_ShouldReturnDTO() {
130
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.of(sampleTaskEntity));
131
+ when(taskMapper.entityToStatusDTO(sampleTaskEntity)).thenReturn(sampleTaskStatusDTO);
132
+
133
+ ArchivalTaskStatusDTO result = taskService.getTaskStatus(sampleTaskId);
134
+
135
+ assertNotNull(result);
136
+ assertEquals(sampleTaskId, result.getTaskId());
137
+ }
138
+
139
+ @Test
140
+ void getTaskStatus_TaskNotExists_ShouldReturnNull() {
141
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.empty());
142
+ ArchivalTaskStatusDTO result = taskService.getTaskStatus(sampleTaskId);
143
+ assertNull(result);
144
+ }
145
+
146
+ @Test
147
+ void listTasks_ShouldReturnPaginatedDTOs() {
148
+ Pageable pageable = PageRequest.of(0, 10);
149
+ Page<ArchivalTaskEntity> page = new PageImpl<>(Collections.singletonList(sampleTaskEntity), pageable, 1);
150
+ when(taskRepository.findAll(pageable)).thenReturn(page);
151
+ when(taskMapper.entityToStatusDTO(sampleTaskEntity)).thenReturn(sampleTaskStatusDTO);
152
+
153
+ ArchivalTaskListResponse response = taskService.listTasks(pageable);
154
+
155
+ assertNotNull(response);
156
+ assertEquals(1, response.getTasks().size());
157
+ assertEquals(sampleTaskId, response.getTasks().get(0).getTaskId());
158
+ }
159
+
160
+ @Test
161
+ void approveTask_TaskExistsAndPending_ShouldApprove() {
162
+ sampleTaskEntity.setStatus(ArchivalStatus.PENDING_APPROVAL);
163
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.of(sampleTaskEntity));
164
+ when(taskRepository.save(any(ArchivalTaskEntity.class))).thenReturn(sampleTaskEntity);
165
+ when(taskMapper.entityToStatusDTO(sampleTaskEntity)).thenAnswer(inv -> {
166
+ ArchivalTaskEntity e = inv.getArgument(0);
167
+ return ArchivalTaskStatusDTO.builder().taskId(e.getTaskId()).status(e.getStatus().name()).build();
168
+ });
169
+
170
+ TaskApprovalRequest approvalReq = TaskApprovalRequest.builder().comments("Approved").build();
171
+ ArchivalTaskStatusDTO result = taskService.approveTask(sampleTaskId, approvalReq);
172
+
173
+ assertEquals("APPROVED", result.getStatus());
174
+ assertEquals("Approved", sampleTaskEntity.getApprovalComments());
175
+ verify(taskRepository).save(sampleTaskEntity);
176
+ }
177
+
178
+ @Test
179
+ void approveTask_TaskNotPending_ShouldReturnCurrentStatusWithErrors() {
180
+ sampleTaskEntity.setStatus(ArchivalStatus.COMPLETED);
181
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.of(sampleTaskEntity));
182
+ when(taskMapper.entityToStatusDTO(sampleTaskEntity)).thenReturn(ArchivalTaskStatusDTO.builder().status("COMPLETED").build());
183
+
184
+ ArchivalTaskStatusDTO result = taskService.approveTask(sampleTaskId, new TaskApprovalRequest());
185
+
186
+ assertEquals("COMPLETED", result.getStatus());
187
+ assertNotNull(result.getErrorMessages());
188
+ assertFalse(result.getErrorMessages().isEmpty());
189
+ verify(taskRepository, never()).save(any());
190
+ }
191
+
192
+ @Test
193
+ void approveTask_TaskNotFound_ShouldThrowException() {
194
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.empty());
195
+ assertThrows(ArchivalTaskNotFoundException.class, () -> {
196
+ taskService.approveTask(sampleTaskId, new TaskApprovalRequest());
197
+ });
198
+ }
199
+
200
+ // Similar tests for rejectTask and requestRestore
201
+ @Test
202
+ void rejectTask_TaskExistsAndPending_ShouldReject() {
203
+ sampleTaskEntity.setStatus(ArchivalStatus.PENDING_APPROVAL);
204
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.of(sampleTaskEntity));
205
+ when(taskRepository.save(any(ArchivalTaskEntity.class))).thenReturn(sampleTaskEntity);
206
+ when(taskMapper.entityToStatusDTO(sampleTaskEntity)).thenAnswer(inv -> {
207
+ ArchivalTaskEntity e = inv.getArgument(0);
208
+ return ArchivalTaskStatusDTO.builder().taskId(e.getTaskId()).status(e.getStatus().name()).build();
209
+ });
210
+
211
+ TaskApprovalRequest rejectionReq = TaskApprovalRequest.builder().comments("Rejected").build();
212
+ ArchivalTaskStatusDTO result = taskService.rejectTask(sampleTaskId, rejectionReq);
213
+
214
+ assertEquals("REJECTED", result.getStatus());
215
+ assertEquals("Rejected", sampleTaskEntity.getRejectionComments());
216
+ verify(taskRepository).save(sampleTaskEntity);
217
+ }
218
+
219
+ @Test
220
+ void requestRestore_TaskCompleted_ShouldInitiateRestore() {
221
+ sampleTaskEntity.setStatus(ArchivalStatus.COMPLETED);
222
+ RestoreRequestDTO restoreDto = RestoreRequestDTO.builder().restoreTier("STANDARD").retentionDays(1).build();
223
+ ArchivalTaskEntity.RestoreRequestData restoreData = new ArchivalTaskEntity.RestoreRequestData("STANDARD", 1, null, null, null);
224
+
225
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.of(sampleTaskEntity));
226
+ when(taskMapper.restoreRequestDtoToData(restoreDto)).thenReturn(restoreData);
227
+ when(taskRepository.save(any(ArchivalTaskEntity.class))).thenReturn(sampleTaskEntity);
228
+
229
+ RestoreResponseDTO response = taskService.requestRestore(sampleTaskId, restoreDto);
230
+
231
+ assertEquals(ArchivalStatus.RESTORATION_REQUESTED.name(), response.getStatus());
232
+ assertNotNull(sampleTaskEntity.getRestoreRequestDetails());
233
+ verify(taskRepository).save(sampleTaskEntity);
234
+ }
235
+
236
+ @Test
237
+ void requestRestore_TaskNotRestorable_ShouldReturnNotRestorable() {
238
+ sampleTaskEntity.setStatus(ArchivalStatus.IN_PROGRESS);
239
+ RestoreRequestDTO restoreDto = RestoreRequestDTO.builder().build();
240
+ when(taskRepository.findById(sampleTaskId)).thenReturn(Optional.of(sampleTaskEntity));
241
+
242
+ RestoreResponseDTO response = taskService.requestRestore(sampleTaskId, restoreDto);
243
+ assertEquals("NOT_RESTORABLE", response.getStatus());
244
+ verify(taskRepository, never()).save(any());
245
+ }
246
+ }