Merge pull request 'refactor-architecture' (#38) from refactor-architecture into main

Reviewed-on: #38
This commit is contained in:
rov 2026-03-28 14:40:34 -03:00
commit 28ed23b0c3
7 changed files with 120 additions and 7 deletions

View File

@ -7,6 +7,7 @@ import com.magamochi.content.service.ContentDownloadService;
import com.magamochi.content.service.ContentImportService;
import com.magamochi.content.service.ContentService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
@ -15,6 +16,9 @@ import jakarta.validation.constraints.NotNull;
import java.io.IOException;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springdoc.core.annotations.ParameterObject;
import org.springframework.data.domain.Pageable;
import org.springframework.data.web.PageableDefault;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@ -105,12 +109,15 @@ public class ContentController {
}
@Operation(
summary = "Get a list of manga import jobs",
description = "Returns a list of manga import jobs.",
summary = "Get a paginated list of manga import jobs",
description =
"Returns a paginated list of manga import jobs with optional filters and global status counts.",
tags = {"Content"},
operationId = "getMangaImportJobs")
@GetMapping(value = "/import/jobs")
public DefaultResponseDTO<List<MangaImportJobDTO>> requestPresignedImport() {
return DefaultResponseDTO.ok(contentImportService.getImportJobs());
public DefaultResponseDTO<MangaImportJobPageResponseDTO> getMangaImportJobs(
@ParameterObject MangaImportJobFilterDTO filter,
@Parameter(hidden = true) @ParameterObject @PageableDefault Pageable pageable) {
return DefaultResponseDTO.ok(contentImportService.getImportJobs(filter, pageable));
}
}

View File

@ -0,0 +1,5 @@
package com.magamochi.content.model.dto;
import com.magamochi.content.model.enumeration.ImportJobStatus;
public record MangaImportJobFilterDTO(String searchQuery, ImportJobStatus status) {}

View File

@ -0,0 +1,11 @@
package com.magamochi.content.model.dto;
import org.springframework.data.domain.Page;
public record MangaImportJobPageResponseDTO(
Page<MangaImportJobDTO> page,
long totalJobs,
long pendingJobs,
long processingJobs,
long completedJobs,
long failedJobs) {}

View File

@ -4,9 +4,13 @@ import com.magamochi.content.model.entity.MangaImportJob;
import com.magamochi.content.model.enumeration.ImportJobStatus;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.stereotype.Repository;
@Repository
public interface MangaImportJobRepository extends JpaRepository<MangaImportJob, Long> {
public interface MangaImportJobRepository
extends JpaRepository<MangaImportJob, Long>, JpaSpecificationExecutor<MangaImportJob> {
List<MangaImportJob> findByStatusOrderByIdAsc(ImportJobStatus importJobStatus);
long countByStatus(ImportJobStatus status);
}

View File

@ -21,6 +21,15 @@ public class FileImportConsumer {
@RabbitListener(queues = "${queues.file-import}")
public void receiveFileImportCommand(FileImportCommand command) {
log.info("Received file import command: {}", command);
if (nonNull(command.mangaImportJobId())) {
var job = contentImportService.findImportJob(command.mangaImportJobId());
if (job.isPresent() && job.get().getStatus().equals(ImportJobStatus.SUCCESS)) {
log.info("Job {} already completed, skipping", command.mangaImportJobId());
return;
}
}
try {
contentImportService.importFile(
command.mangaContentProviderId(), command.filename(), command.mangaImportJobId());

View File

@ -9,6 +9,8 @@ import com.magamochi.catalog.service.MangaResolutionService;
import com.magamochi.common.exception.UnprocessableException;
import com.magamochi.common.model.enumeration.ContentType;
import com.magamochi.content.model.dto.MangaImportJobDTO;
import com.magamochi.content.model.dto.MangaImportJobFilterDTO;
import com.magamochi.content.model.dto.MangaImportJobPageResponseDTO;
import com.magamochi.content.model.dto.PresignedImportRequestDTO;
import com.magamochi.content.model.dto.PresignedImportResponseDTO;
import com.magamochi.content.model.entity.MangaContent;
@ -24,6 +26,7 @@ import com.magamochi.image.service.ImageFetchService;
import com.magamochi.image.service.ImageService;
import com.magamochi.image.service.S3Service;
import com.magamochi.ingestion.service.ContentProviderService;
import com.magamochi.model.specification.MangaImportJobSpecification;
import jakarta.validation.constraints.NotNull;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
@ -31,6 +34,9 @@ import java.util.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.apache.tika.io.TikaInputStream;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@ -207,7 +213,28 @@ public class ContentImportService {
}
}
public List<MangaImportJobDTO> getImportJobs() {
return mangaImportJobRepository.findAll().stream().map(MangaImportJobDTO::from).toList();
public Optional<MangaImportJob> findImportJob(Long id) {
return mangaImportJobRepository.findById(id);
}
public MangaImportJobPageResponseDTO getImportJobs(
MangaImportJobFilterDTO filter, Pageable pageable) {
var spec = MangaImportJobSpecification.build(filter);
var sortedPageable =
PageRequest.of(
pageable.getPageNumber(),
pageable.getPageSize(),
Sort.by(Sort.Direction.DESC, "updatedAt"));
var page = mangaImportJobRepository.findAll(spec, sortedPageable).map(MangaImportJobDTO::from);
return new MangaImportJobPageResponseDTO(
page,
mangaImportJobRepository.count(),
mangaImportJobRepository.countByStatus(ImportJobStatus.PENDING),
mangaImportJobRepository.countByStatus(ImportJobStatus.PROCESSING),
mangaImportJobRepository.countByStatus(ImportJobStatus.SUCCESS),
mangaImportJobRepository.countByStatus(ImportJobStatus.FAILED));
}
}

View File

@ -0,0 +1,50 @@
package com.magamochi.model.specification;
import static java.util.Objects.nonNull;
import com.magamochi.content.model.dto.MangaImportJobFilterDTO;
import com.magamochi.content.model.entity.MangaImportJob;
import jakarta.persistence.criteria.Predicate;
import java.util.ArrayList;
import java.util.List;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.jpa.domain.Specification;
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class MangaImportJobSpecification {
public static Specification<MangaImportJob> build(MangaImportJobFilterDTO filter) {
return (root, query, cb) -> {
List<Predicate> predicates = new ArrayList<>();
if (StringUtils.isNotBlank(filter.searchQuery())) {
var searchTerm = filter.searchQuery().trim();
var searchPattern = "%" + searchTerm.toLowerCase() + "%";
var filenamePredicate = cb.like(cb.lower(root.get("originalFilename")), searchPattern);
List<Predicate> orPredicates = new ArrayList<>();
orPredicates.add(filenamePredicate);
try {
var numericValue = Long.parseLong(searchTerm);
orPredicates.add(cb.equal(root.get("id"), numericValue));
orPredicates.add(cb.equal(root.get("malId"), numericValue));
orPredicates.add(cb.equal(root.get("aniListId"), numericValue));
} catch (NumberFormatException ignored) {
// not a numeric value, skip ID predicates
}
predicates.add(cb.or(orPredicates.toArray(Predicate[]::new)));
}
if (nonNull(filter.status())) {
predicates.add(cb.equal(root.get("status"), filter.status()));
}
return cb.and(predicates.toArray(Predicate[]::new));
};
}
}