캐시 handler 생성

로그 조회 갯수 캐싱 처리
로그 객체 생성
로그 엑셀 객체 생성
엑셀 진행률 관리 tracker 생성
This commit is contained in:
2025-06-05 12:09:50 +09:00
parent fa67ae5e7c
commit 23850acf0f
13 changed files with 1452 additions and 49 deletions

View File

@@ -4,6 +4,7 @@ import com.caliverse.admin.domain.request.LogGenericRequest;
import com.caliverse.admin.domain.response.LogResponse;
import com.caliverse.admin.domain.service.LogService;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletResponse;
import lombok.RequiredArgsConstructor;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@@ -22,4 +23,15 @@ public class LogController {
@RequestBody LogGenericRequest logGenericRequest){
return ResponseEntity.ok().body( logService.genericLogList(logGenericRequest));
}
@PostMapping("/generic/excel-export")
public void excelExport(HttpServletResponse response,
@RequestBody LogGenericRequest logGenericRequest){
logService.excelExport(response, logGenericRequest);
}
@GetMapping("/progress/{taskId}")
public ResponseEntity<Map<String, Object>> getProgress(@PathVariable String taskId) {
return ResponseEntity.ok().body(logService.getProgress(taskId));
}
}

View File

@@ -0,0 +1,24 @@
package com.caliverse.admin.domain.cache;
import com.caliverse.admin.domain.datacomponent.MetaDataFileLoader;
import com.caliverse.admin.domain.request.LogGenericRequest;
import com.caliverse.admin.logs.logservice.businesslogservice.BusinessLogGenericService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.stereotype.Component;
@Slf4j
@Component
@EnableCaching(proxyTargetClass = true)
public class CommonCacheHandler {
@Autowired
private BusinessLogGenericService businessLogGenericService;
@Cacheable(value = "businessLogCount", keyGenerator = "logCountKeyGenerator")
public Integer getLogCount(LogGenericRequest logGenericRequest) {
log.info("Cache MISS - Executing actual DB query for count");
return businessLogGenericService.getRawLogCount(logGenericRequest);
}
}

View File

@@ -0,0 +1,40 @@
package com.caliverse.admin.domain.cache;
import com.caliverse.admin.domain.request.LogGenericRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.cache.interceptor.KeyGenerator;
import java.lang.reflect.Method;
@Component("logCountKeyGenerator")
@Slf4j
public class LogCountKeyGenerator implements KeyGenerator {
@Override
public Object generate(Object target, Method method, Object... params) {
LogGenericRequest request = (LogGenericRequest) params[0];
StringBuilder keyBuilder = new StringBuilder();
keyBuilder.append(request.getStartDt().toLocalDate())
.append("_")
.append(request.getEndDt().toLocalDate())
.append("_")
.append(request.getLogAction() != null ? request.getLogAction().name() : "ALL")
.append("_")
.append(request.getLogDomain() != null ? request.getLogDomain().name() : "ALL")
.append("_")
.append(request.getSearchData() != null ? request.getSearchData() : "ALL")
.append("_")
.append(request.getTranId() != null ? request.getTranId() : "ALL");
// 필터가 있으면 해시 추가
if (request.getFilters() != null && !request.getFilters().isEmpty()) {
keyBuilder.append("_").append(request.getFilters().hashCode());
}
String finalKey = keyBuilder.toString();
log.info("Generated cache key: '{}'", finalKey);
return finalKey;
}
}

View File

@@ -0,0 +1,20 @@
package com.caliverse.admin.domain.entity.excel;
import lombok.Builder;
import lombok.Data;
import java.util.Map;
@Data
@Builder
public class ExcelBusinessLog {
private String logTime;
private String userGuid;
private String userNickname;
private String accountId;
private String action;
private String domain;
private String tranId;
private Map<String, Object> header;
private Map<String, Object> body;
}

View File

@@ -0,0 +1,21 @@
package com.caliverse.admin.domain.entity.log;
import lombok.Builder;
import lombok.Data;
import java.util.Map;
@Data
@Builder
public class GenericLog{
private String logTime;
private String userGuid;
private String userNickname;
private String accountId;
private String action;
private String domain;
private String tranId;
private Map<String, Object> header;
private Map<String, Object> body;
}

View File

@@ -40,6 +40,7 @@ public class LogGenericRequest {
private Integer pageSize;
@JsonProperty("order_by")
private String orderBy;
private String taskId;
@Data
public static class LogFilter{

View File

@@ -1,5 +1,6 @@
package com.caliverse.admin.domain.response;
import com.caliverse.admin.domain.entity.log.GenericLog;
import com.caliverse.admin.logs.Indicatordomain.GenericMongoLog;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -32,7 +33,7 @@ public class LogResponse {
@JsonProperty("generic_list")
// private List<Map<String, Object>> genericList;
private List<GenericMongoLog> genericList;
private List<GenericLog> genericList;
private int total;
@JsonProperty("total_all")

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,29 @@
package com.caliverse.admin.domain.service;
import com.caliverse.admin.domain.cache.CommonCacheHandler;
import com.caliverse.admin.domain.entity.excel.ExcelBusinessLog;
import com.caliverse.admin.domain.entity.log.GenericLog;
import com.caliverse.admin.domain.request.LogGenericRequest;
import com.caliverse.admin.domain.response.LogResponse;
import com.caliverse.admin.global.common.code.CommonCode;
import com.caliverse.admin.global.common.code.ErrorCode;
import com.caliverse.admin.global.common.exception.RestApiException;
import com.caliverse.admin.global.component.tracker.ExcelProgressTracker;
import com.caliverse.admin.logs.Indicatordomain.GenericMongoLog;
import com.caliverse.admin.logs.logservice.businesslogservice.BusinessLogGenericService;
import jakarta.servlet.http.HttpServletResponse;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.mongodb.UncategorizedMongoDbException;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
@Service
@@ -21,75 +32,263 @@ import java.util.stream.Collectors;
public class LogService {
private final BusinessLogGenericService businessLogGenericService;
private final ExcelService excelService;
private final CommonCacheHandler commonCacheHandler;
private final ExcelProgressTracker progressTracker;
public LogResponse genericLogList(LogGenericRequest logGenericRequest){
int page = logGenericRequest.getPageNo();
int size = logGenericRequest.getPageSize();
// LocalDateTime startDt = logGenericRequest.getStartDt().plusHours(9);
// LocalDateTime endDt = logGenericRequest.getEndDt().plusHours(9).plusDays(1);
// logGenericRequest.setStartDt(startDt);
// logGenericRequest.setEndDt(endDt);
// List<Map<String, Object>> logList = businessLogGenericService.loadBusinessLogData(logGenericRequest);
LocalDateTime startDt = logGenericRequest.getStartDt().plusHours(9);
LocalDateTime endDt = logGenericRequest.getEndDt().plusHours(9).plusDays(1);
logGenericRequest.setStartDt(startDt);
logGenericRequest.setEndDt(endDt);
List<GenericMongoLog> logList = new ArrayList<>();
// 병렬 처리를 위한 CompletableFuture 생성
CompletableFuture<List<GenericLog>> logListFuture = CompletableFuture.supplyAsync(() -> {
try {
return businessLogGenericService.loadBusinessLogData(logGenericRequest, GenericLog.class);
} catch (UncategorizedMongoDbException e) {
if (e.getMessage().contains("Sort exceeded memory limit")) {
log.error("MongoDB Query memory limit error: {}", e.getMessage());
throw new RuntimeException("MEMORY_LIMIT_ERROR", e);
} else if (e.getMessage().contains("time limit")) {
log.error("MongoDB Query operation exceeded time limit: {}", e.getMessage());
throw new RuntimeException("TIME_LIMIT_ERROR", e);
} else {
log.error("MongoDB Query error", e);
throw new RuntimeException("MONGODB_QUERY_ERROR", e);
}
} catch (Exception e) {
log.error("businessLog error", e);
throw new RuntimeException("BUSINESS_LOG_ERROR", e);
}
});
CompletableFuture<Integer> totalCountFuture = CompletableFuture.supplyAsync(() -> commonCacheHandler.getLogCount(logGenericRequest));
try {
// 두 작업이 모두 완료될 때까지 대기
List<GenericLog> logList = logListFuture.get();
int totalAll = totalCountFuture.get();
int totalItems = logList.size();
return LogResponse.builder()
.resultData(LogResponse.ResultData.builder()
.genericList(logList)
.total(totalItems)
.totalAll(totalAll)
.pageNo(logGenericRequest.getPageNo() != null ? page : 1)
.build())
.status(CommonCode.SUCCESS.getHttpStatus())
.result(CommonCode.SUCCESS.getResult())
.build();
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof RuntimeException) {
RuntimeException runtimeException = (RuntimeException) cause;
String message = runtimeException.getMessage();
if ("MEMORY_LIMIT_ERROR".equals(message)) {
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString())
.build();
} else if ("TIME_LIMIT_ERROR".equals(message)) {
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString())
.build();
} else if ("MONGODB_QUERY_ERROR".equals(message)) {
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(ErrorCode.ERROR_MONGODB_QUERY.toString())
.build();
}
}
log.error("Parallel execution error", e);
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(CommonCode.ERROR.getResult())
.build();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.error("Thread interrupted", e);
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(CommonCode.ERROR.getResult())
.build();
}
}
// public LogResponse genericLogList(LogGenericRequest logGenericRequest){
// int page = logGenericRequest.getPageNo();
// int size = logGenericRequest.getPageSize();
//
//// LocalDateTime startDt = logGenericRequest.getStartDt().plusHours(9);
//// LocalDateTime endDt = logGenericRequest.getEndDt().plusHours(9).plusDays(1);
//// logGenericRequest.setStartDt(startDt);
//// logGenericRequest.setEndDt(endDt);
//
//// List<Map<String, Object>> logList = businessLogGenericService.loadBusinessLogData(logGenericRequest);
// LocalDateTime startDt = logGenericRequest.getStartDt().plusHours(9);
// LocalDateTime endDt = logGenericRequest.getEndDt().plusHours(9).plusDays(1);
// logGenericRequest.setStartDt(startDt);
// logGenericRequest.setEndDt(endDt);
//
// List<GenericLog> logList = new ArrayList<>();
// try{
// logList = businessLogGenericService.loadBusinessLogData(logGenericRequest, GenericLog.class);
// }catch(UncategorizedMongoDbException e){
// if (e.getMessage().contains("Sort exceeded memory limit")) {
// log.error("MongoDB Query memory limit error: {}", e.getMessage());
// return LogResponse.builder()
// .status(CommonCode.ERROR.getHttpStatus())
// .result(ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString())
// .build();
// } else if (e.getMessage().contains("time limit")) {
// log.error("MongoDB Query operation exceeded time limit: {}", e.getMessage());
// return LogResponse.builder()
// .status(CommonCode.ERROR.getHttpStatus())
// .result(ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString())
// .build();
// }else {
// log.error("MongoDB Query error", e);
// return LogResponse.builder()
// .status(CommonCode.ERROR.getHttpStatus())
// .result(ErrorCode.ERROR_MONGODB_QUERY.toString())
// .build();
// }
// }catch (Exception e){
// log.error("businessLog error", e);
// }
//
//// logList = logList.stream().map(logData -> {
//// try {
//// var header = logData.getMessage().get("Header");
//// var body = logData.getMessage().get("Body");
//// logData.setHeader((Map<String, Object>) header);
//// logData.setBody((Map<String, Object>) body);
//// logData.setMessage("");
//// return logData;
//// } catch (Exception e) {
//// log.error("Error parsing JSON from message field", e);
//// logData.setMessage(null);
//// return logData;
//// }
//// }).collect(Collectors.toList());
//
// int totalItems = logList.size();
// int totalAll = commonCacheHandler.getLogCount(logGenericRequest);
// return LogResponse.builder()
// .resultData(LogResponse.ResultData.builder()
// .genericList(logList)
// .total(totalItems)
// .totalAll(totalAll)
// .pageNo(logGenericRequest.getPageNo() != null ?
// page : 1)
// .build())
// .status(CommonCode.SUCCESS.getHttpStatus())
// .result(CommonCode.SUCCESS.getResult())
// .build();
//
// }
public void excelExport(HttpServletResponse response, LogGenericRequest logGenericRequest){
String taskId = logGenericRequest.getTaskId();
LocalDateTime startDt = logGenericRequest.getStartDt().plusHours(9);
LocalDateTime endDt = logGenericRequest.getEndDt().plusHours(9).plusDays(1);
logGenericRequest.setStartDt(startDt);
logGenericRequest.setEndDt(endDt);
logGenericRequest.setPageNo(null);
logGenericRequest.setPageSize(null);
progressTracker.updateProgress(taskId, 5, 100, "엑셀 생성 준비 중...");
List<GenericLog> logList = new ArrayList<>();
try{
logList = businessLogGenericService.loadBusinessLogData(logGenericRequest, GenericMongoLog.class);
logList = businessLogGenericService.loadBusinessLogData(logGenericRequest, GenericLog.class);
progressTracker.updateProgress(taskId, 20, 100, "데이터 생성완료");
}catch(UncategorizedMongoDbException e){
if (e.getMessage().contains("Sort exceeded memory limit")) {
log.error("MongoDB Query memory limit error: {}", e.getMessage());
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString())
.build();
throw new RestApiException(CommonCode.ERROR.getHttpStatus(), ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString());
} else if (e.getMessage().contains("time limit")) {
log.error("MongoDB Query operation exceeded time limit: {}", e.getMessage());
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString())
.build();
throw new RestApiException(CommonCode.ERROR.getHttpStatus(), ErrorCode.ERROR_LOG_MEMORY_LIMIT.toString());
}else {
log.error("MongoDB Query error", e);
return LogResponse.builder()
.status(CommonCode.ERROR.getHttpStatus())
.result(ErrorCode.ERROR_MONGODB_QUERY.toString())
.build();
throw new RestApiException(CommonCode.ERROR.getHttpStatus(), ErrorCode.ERROR_MONGODB_QUERY.toString());
}
}catch (Exception e){
log.error("businessLog error", e);
log.error("ExcelExport Data Search Error", e);
}
logList = logList.stream().map(logData -> {
try {
var header = logData.getMessage().get("Header");
var body = logData.getMessage().get("Body");
logData.setHeader((Map<String, Object>) header);
logData.setBody((Map<String, Object>) body);
return logData;
} catch (Exception e) {
log.error("Error parsing JSON from message field", e);
return logData;
}
}).collect(Collectors.toList());
progressTracker.updateProgress(taskId, 25, 100, "데이터 파싱 준비중...");
List<ExcelBusinessLog> excelList = logList.stream()
.map(logData -> {
try {
return ExcelBusinessLog.builder()
.logTime(logData.getLogTime())
.accountId(logData.getAccountId())
.userGuid(logData.getUserGuid())
.userNickname(logData.getUserNickname())
.tranId(logData.getTranId())
.action(logData.getAction())
.domain(logData.getDomain())
.header(logData.getHeader())
.body(logData.getBody())
.build();
} catch (Exception e) {
log.error("Error parsing JSON from message field", e);
return null;
}
})
.filter(Objects::nonNull) // null 값 제거
.toList();
progressTracker.updateProgress(taskId, 30, 100, "데이터 파싱 완료...");
int totalItems = logList.size();
try{
excelService.generateExcelToResponse(
response,
excelList,
"비즈니스 로그 데이터",
"sheet1",
taskId
);
return LogResponse.builder()
.resultData(LogResponse.ResultData.builder()
.genericList(logList)
.total(totalItems)
.totalAll(totalItems)
.pageNo(logGenericRequest.getPageNo() != null ?
page : 1)
.build())
.status(CommonCode.SUCCESS.getHttpStatus())
.result(CommonCode.SUCCESS.getResult())
.build();
}catch (Exception e){
log.error("Excel Export Create Error", e);
throw new RestApiException(CommonCode.ERROR.getHttpStatus(), ErrorCode.ERROR_EXCEL_DOWN.toString());
}
}
public Map<String, Object> getProgress(String taskId){
try {
ExcelProgressTracker.ProgressInfo progress = progressTracker.getProgress(taskId);
if (progress == null) {
Map<String, Object> response = new HashMap<>();
response.put("exists", false);
return (response);
}
Map<String, Object> response = new HashMap<>();
response.put("exists", true);
response.put("percentage", progress.getPercentage());
response.put("currentStep", progress.getCurrentStep());
response.put("totalSteps", progress.getTotalSteps());
response.put("message", progress.getMessage());
return response;
}catch (Exception e){
log.error(e.getMessage());
throw new RestApiException(CommonCode.ERROR.getHttpStatus(), ErrorCode.ERROR_EXCEL_DOWN.toString());
}
}
}

View File

@@ -58,7 +58,6 @@ public class ExcelUtils {
return value;
}
switch (cell.getCellType()) {
case STRING: // getRichStringCellValue() 메소드를 사용하여 컨텐츠를 읽음
value = cell.getRichStringCellValue().getString();

View File

@@ -0,0 +1,49 @@
package com.caliverse.admin.global.component.tracker;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@Component
@Slf4j
public class ExcelProgressTracker {
private final Map<String, ProgressInfo> progressMap = new ConcurrentHashMap<>();
@Data
@AllArgsConstructor
public static class ProgressInfo {
private int currentStep;
private int totalSteps;
private String message;
private long timestamp;
public int getPercentage() {
return totalSteps > 0 ? (currentStep * 100) / totalSteps : 0;
}
}
public void updateProgress(String taskId, int currentStep, int totalSteps, String message) {
progressMap.put(taskId, new ProgressInfo(currentStep, totalSteps, message, System.currentTimeMillis()));
log.info("Progress [{}]: {}/{} - {}", taskId, currentStep, totalSteps, message);
}
public ProgressInfo getProgress(String taskId) {
return progressMap.get(taskId);
}
public void removeProgress(String taskId) {
progressMap.remove(taskId);
}
// 5분 이상 된 진행률 정보 자동 정리
@Scheduled(fixedRate = 300000) // 5분마다
public void cleanupOldProgress() {
long fiveMinutesAgo = System.currentTimeMillis() - 300000;
progressMap.entrySet().removeIf(entry -> entry.getValue().getTimestamp() < fiveMinutesAgo);
}
}

View File

@@ -48,7 +48,11 @@ public class MongoIndicatorConfig {
String auth = username.isEmpty() ? "" : String.format("%s:%s@",username, encodePassword);
String connection;
if(activeProfile.equals("local") || activeProfile.equals("dev")) {
connection = String.format("mongodb://%s%s/?authSource=%s", auth, businessLogHost, db);
if(businessLogHost.contains("metaverse")){
connection = String.format("mongodb+srv://%s%s/%s?retryWrites=true&w=majority", auth, businessLogHost, db);
}else{
connection = String.format("mongodb://%s%s/?authSource=%s", auth, businessLogHost, db);
}
}else{
connection = String.format("mongodb+srv://%s%s/%s?retryWrites=true&w=majority&appName=backoffice-%s", auth, businessLogHost, db, activeProfile);
}

View File

@@ -38,6 +38,9 @@ public class RedisConfig {
@Value("${redis.ssl}")
private boolean ssl;
@Value("${redis.prefix}")
private String prefix;
@Value("${redis.abort-connect}")
private boolean abortConnect;
@@ -124,7 +127,8 @@ public class RedisConfig {
public RedisCacheManager cacheManager(JedisConnectionFactory jedisConnectionFactory) {
RedisCacheConfiguration cacheConfig = RedisCacheConfiguration.defaultCacheConfig()
.entryTtl(Duration.ofMinutes(60)) // 캐시 TTL 설정
.disableCachingNullValues(); // null 값 캐싱 금지
.disableCachingNullValues() // null 값 캐싱 금지
.computePrefixWith(cacheName -> prefix + ":" + cacheName + ":");
return RedisCacheManager.builder(jedisConnectionFactory)
.cacheDefaults(cacheConfig)