diff --git a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java index c2218bd0..cce15a25 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java +++ b/apps/user-service/src/main/java/site/icebang/domain/schedule/model/Schedule.java @@ -1,6 +1,6 @@ package site.icebang.domain.schedule.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -22,10 +22,10 @@ public class Schedule { private String parameters; // JSON format private boolean isActive; private String lastRunStatus; - private LocalDateTime lastRunAt; - private LocalDateTime createdAt; + private Instant lastRunAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; private String scheduleText; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java index 6dd40c5d..035d6d17 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/JobDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -10,9 +10,9 @@ public class JobDto { private String name; private String description; private Boolean isEnabled; - private LocalDateTime createdAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; private Integer executionOrder; diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java index 397285cb..752bd619 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/ScheduleDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -10,7 +10,7 @@ public class ScheduleDto { private String cronExpression; private Boolean isActive; private String lastRunStatus; - private LocalDateTime lastRunAt; + private Instant lastRunAt; private String scheduleText; - private LocalDateTime createdAt; + private Instant createdAt; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java index fa83fe7d..1047d141 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/TaskDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import com.fasterxml.jackson.databind.JsonNode; @@ -14,6 +14,6 @@ public class TaskDto { private Integer executionOrder; private JsonNode settings; private JsonNode parameters; - private LocalDateTime createdAt; - private LocalDateTime updatedAt; + private Instant createdAt; + private Instant updatedAt; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java index a39ce0c3..4d074930 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowCardDto.java @@ -1,7 +1,7 @@ package site.icebang.domain.workflow.dto; import java.math.BigInteger; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -12,5 +12,5 @@ public class WorkflowCardDto { private String description; private boolean isEnabled; private String createdBy; - private LocalDateTime createdAt; + private Instant createdAt; } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java index a2ef46b8..175db6ac 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowDetailCardDto.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.dto; -import java.time.LocalDateTime; +import java.time.Instant; import java.util.List; import java.util.Map; @@ -9,7 +9,7 @@ @Data public class WorkflowDetailCardDto extends WorkflowCardDto { private String defaultConfig; - private LocalDateTime updatedAt; + private Instant updatedAt; private String updatedBy; private List schedules; private List> jobs; diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java index 18a25b7e..9f5a9b8d 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/dto/WorkflowHistoryDTO.java @@ -1,7 +1,7 @@ package site.icebang.domain.workflow.dto; import java.math.BigInteger; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Data; @@ -11,8 +11,8 @@ public class WorkflowHistoryDTO { private BigInteger id; private BigInteger workflowId; private String traceId; - private LocalDateTime startedAt; - private LocalDateTime finishedAt; + private Instant startedAt; + private Instant finishedAt; private BigInteger createdBy; private String triggerType; private String runNumber; diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java index f0d36d8b..c363f8de 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Job.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -17,9 +17,9 @@ public class Job { private String name; private String description; private boolean isEnabled; - private LocalDateTime createdAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; public Job(JobDto dto) { diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java index 038890dc..eeaffd28 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/JobRun.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Getter; import lombok.NoArgsConstructor; @@ -13,15 +13,15 @@ public class JobRun { private Long workflowRunId; private Long jobId; private String status; // PENDING, RUNNING, SUCCESS, FAILED - private LocalDateTime startedAt; - private LocalDateTime finishedAt; - private LocalDateTime createdAt; + private Instant startedAt; + private Instant finishedAt; + private Instant createdAt; private JobRun(Long workflowRunId, Long jobId) { this.workflowRunId = workflowRunId; this.jobId = jobId; this.status = "RUNNING"; - this.startedAt = LocalDateTime.now(); + this.startedAt = Instant.now(); this.createdAt = this.startedAt; } @@ -33,6 +33,6 @@ public static JobRun start(Long workflowRunId, Long jobId) { /** Job 실행 완료 처리 */ public void finish(String status) { this.status = status; - this.finishedAt = LocalDateTime.now(); + this.finishedAt = Instant.now(); } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java index 2c917100..04d577c1 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Task.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import com.fasterxml.jackson.databind.JsonNode; @@ -26,9 +26,9 @@ public class Task { private JsonNode settings; - private LocalDateTime createdAt; + private Instant createdAt; - private LocalDateTime updatedAt; + private Instant updatedAt; public Task(TaskDto taskDto) { this.id = taskDto.getId(); diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java index d49542f0..6d89a150 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/TaskRun.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.Getter; import lombok.NoArgsConstructor; @@ -15,16 +15,16 @@ public class TaskRun { private Integer executionOrder; private String status; // PENDING, RUNNING, SUCCESS, FAILED private String resultMessage; // 실행 결과 메시지 - private LocalDateTime startedAt; - private LocalDateTime finishedAt; - private LocalDateTime createdAt; + private Instant startedAt; + private Instant finishedAt; + private Instant createdAt; // 생성자나 정적 팩토리 메서드를 통해 객체 생성 로직을 관리 private TaskRun(Long jobRunId, Long taskId) { this.jobRunId = jobRunId; this.taskId = taskId; this.status = "PENDING"; - this.createdAt = LocalDateTime.now(); + this.createdAt = Instant.now(); } /** Task 실행 시작을 위한 정적 팩토리 메서드 */ @@ -32,7 +32,7 @@ public static TaskRun start(Long jobRunId, Long taskId, Integer executionOrder) TaskRun taskRun = new TaskRun(jobRunId, taskId); taskRun.executionOrder = executionOrder; taskRun.status = "RUNNING"; - taskRun.startedAt = LocalDateTime.now(); + taskRun.startedAt = Instant.now(); return taskRun; } @@ -40,6 +40,6 @@ public static TaskRun start(Long jobRunId, Long taskId, Integer executionOrder) public void finish(String status, String resultMessage) { this.status = status; this.resultMessage = resultMessage; - this.finishedAt = LocalDateTime.now(); + this.finishedAt = Instant.now(); } } diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java index 8b536003..695364aa 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/Workflow.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -16,9 +16,9 @@ public class Workflow { private String name; private String description; private boolean isEnabled; - private LocalDateTime createdAt; + private Instant createdAt; private Long createdBy; - private LocalDateTime updatedAt; + private Instant updatedAt; private Long updatedBy; /** 워크플로우별 기본 설정값 (JSON) */ diff --git a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java index 011f7ee5..5741e77b 100644 --- a/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java +++ b/apps/user-service/src/main/java/site/icebang/domain/workflow/model/WorkflowRun.java @@ -1,6 +1,6 @@ package site.icebang.domain.workflow.model; -import java.time.LocalDateTime; +import java.time.Instant; import java.util.UUID; import lombok.Getter; @@ -14,15 +14,15 @@ public class WorkflowRun { private Long workflowId; private String traceId; // 분산 추적을 위한 ID private String status; // PENDING, RUNNING, SUCCESS, FAILED - private LocalDateTime startedAt; - private LocalDateTime finishedAt; - private LocalDateTime createdAt; + private Instant startedAt; + private Instant finishedAt; + private Instant createdAt; private WorkflowRun(Long workflowId) { this.workflowId = workflowId; this.traceId = UUID.randomUUID().toString(); // 고유 추적 ID 생성 this.status = "RUNNING"; - this.startedAt = LocalDateTime.now(); + this.startedAt = Instant.now(); this.createdAt = this.startedAt; } @@ -34,6 +34,6 @@ public static WorkflowRun start(Long workflowId) { /** 워크플로우 실행 완료 처리 */ public void finish(String status) { this.status = status; - this.finishedAt = LocalDateTime.now(); + this.finishedAt = Instant.now(); } } diff --git a/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/InstantTypeHandler.java b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/InstantTypeHandler.java new file mode 100644 index 00000000..4146c4af --- /dev/null +++ b/apps/user-service/src/main/java/site/icebang/global/config/mybatis/typehandler/InstantTypeHandler.java @@ -0,0 +1,94 @@ +package site.icebang.global.config.mybatis.typehandler; + +import java.sql.CallableStatement; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.Instant; + +import org.apache.ibatis.type.BaseTypeHandler; +import org.apache.ibatis.type.JdbcType; +import org.apache.ibatis.type.MappedTypes; + +/** + * MyBatis에서 Java 8의 {@code Instant} 타입을 데이터베이스의 TIMESTAMP 타입과 매핑하기 위한 커스텀 타입 핸들러입니다. + * + *

이 핸들러를 통해 애플리케이션에서는 UTC 기준의 시간을 {@code Instant} 객체로 다루고, 데이터베이스에는 해당 객체를 TIMESTAMP 형태로 저장하거나 + * 읽어올 수 있습니다. + * + *

MyBatis XML 매퍼에서의 사용 예제:

+ * + *
{@code
+ * 
+ *     
+ * 
+ * }
+ * + * @author jihu0210@naver.com + * @since v0.1.0 + */ +@MappedTypes(Instant.class) +public class InstantTypeHandler extends BaseTypeHandler { + + /** + * {@code Instant} 파라미터를 DB에 저장하기 위해 Timestamp로 변환하여 PreparedStatement에 설정합니다. + * + * @param ps PreparedStatement 객체 + * @param i 파라미터 인덱스 + * @param parameter 변환할 Instant 객체 + * @param jdbcType JDBC 타입 + * @throws SQLException 변환 실패 시 + */ + @Override + public void setNonNullParameter(PreparedStatement ps, int i, Instant parameter, JdbcType jdbcType) + throws SQLException { + ps.setTimestamp(i, Timestamp.from(parameter)); + } + + /** + * ResultSet에서 컬럼 이름으로 Timestamp를 가져와 {@code Instant} 객체로 변환합니다. + * + * @param rs ResultSet 객체 + * @param columnName 컬럼 이름 + * @return 변환된 Instant 객체, 원본이 null이면 null + * @throws SQLException 변환 실패 시 + */ + @Override + public Instant getNullableResult(ResultSet rs, String columnName) throws SQLException { + Timestamp timestamp = rs.getTimestamp(columnName); + return timestamp != null ? timestamp.toInstant() : null; + } + + /** + * ResultSet에서 컬럼 인덱스로 Timestamp를 가져와 {@code Instant} 객체로 변환합니다. + * + * @param rs ResultSet 객체 + * @param columnIndex 컬럼 인덱스 + * @return 변환된 Instant 객체, 원본이 null이면 null + * @throws SQLException 변환 실패 시 + */ + @Override + public Instant getNullableResult(ResultSet rs, int columnIndex) throws SQLException { + Timestamp timestamp = rs.getTimestamp(columnIndex); + return timestamp != null ? timestamp.toInstant() : null; + } + + /** + * CallableStatement에서 컬럼 인덱스로 Timestamp를 가져와 {@code Instant} 객체로 변환합니다. + * + * @param cs CallableStatement 객체 + * @param columnIndex 컬럼 인덱스 + * @return 변환된 Instant 객체, 원본이 null이면 null + * @throws SQLException 변환 실패 시 + */ + @Override + public Instant getNullableResult(CallableStatement cs, int columnIndex) throws SQLException { + Timestamp timestamp = cs.getTimestamp(columnIndex); + return timestamp != null ? timestamp.toInstant() : null; + } +} diff --git a/apps/user-service/src/main/resources/application-develop.yml b/apps/user-service/src/main/resources/application-develop.yml index 9de00956..64e1a0be 100644 --- a/apps/user-service/src/main/resources/application-develop.yml +++ b/apps/user-service/src/main/resources/application-develop.yml @@ -28,7 +28,7 @@ spring: auto-startup: true # 📌 Quartz 전용 DataSource 설정을 여기에 추가 datasource: - url: jdbc:mariadb://localhost:3306/pre_process + url: jdbc:mariadb://localhost:3306/pre_process?serverTimezone=UTC username: mariadb password: qwer1234 driver-class-name: org.mariadb.jdbc.Driver @@ -44,14 +44,16 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-maria.sql - - classpath:sql/01-schema.sql - - classpath:sql/02-quartz-schema.sql + - classpath:sql/schema/00-drop-maria.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-mariadb-timezone.sql data-locations: - - classpath:sql/00-truncate.sql - - classpath:sql/01-insert-internal-users.sql - - classpath:sql/02-insert-external-users.sql - - classpath:sql/03-insert-workflow.sql + - classpath:sql/data/00-truncate.sql + - classpath:sql/data/01-insert-internal-users.sql + - classpath:sql/data/02-insert-external-users.sql + - classpath:sql/data/03-insert-workflow.sql + - classpath:sql/data/05-fix-timezone-data.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application-production.yml b/apps/user-service/src/main/resources/application-production.yml index 406fed87..c53e00bb 100644 --- a/apps/user-service/src/main/resources/application-production.yml +++ b/apps/user-service/src/main/resources/application-production.yml @@ -4,7 +4,7 @@ spring: on-profile: production datasource: - url: jdbc:mariadb://${DB_HOST}:${DB_PORT}/${DB_NAME} + url: jdbc:mariadb://${DB_HOST}:${DB_PORT}/${DB_NAME}?serverTimezone=UTC username: ${DB_USER} password: ${DB_PASS} driver-class-name: org.mariadb.jdbc.Driver diff --git a/apps/user-service/src/main/resources/application-test-e2e.yml b/apps/user-service/src/main/resources/application-test-e2e.yml index 3a777909..14c572b1 100644 --- a/apps/user-service/src/main/resources/application-test-e2e.yml +++ b/apps/user-service/src/main/resources/application-test-e2e.yml @@ -7,8 +7,16 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-maria.sql - - classpath:sql/01-schema.sql + - classpath:sql/schema/00-drop-maria.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-mariadb-timezone.sql + data-locations: + - classpath:sql/data/00-truncate.sql + - classpath:sql/data/01-insert-internal-users.sql + - classpath:sql/data/02-insert-external-users.sql + - classpath:sql/data/03-insert-workflow.sql + - classpath:sql/data/05-fix-timezone-data.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application-test-integration.yml b/apps/user-service/src/main/resources/application-test-integration.yml index 6eccdace..0bc7cbcc 100644 --- a/apps/user-service/src/main/resources/application-test-integration.yml +++ b/apps/user-service/src/main/resources/application-test-integration.yml @@ -10,7 +10,7 @@ spring: password: driver-class-name: org.h2.Driver hikari: - connection-init-sql: "SET MODE MariaDB; SET NON_KEYWORDS USER;" + connection-init-sql: "SET MODE MariaDB; SET NON_KEYWORDS USER; " connection-timeout: 30000 idle-timeout: 600000 max-lifetime: 1800000 @@ -28,8 +28,10 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-h2.sql - - classpath:sql/01-schema.sql + - classpath:sql/schema/00-drop-h2.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-h2-timezone.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application-test-unit.yml b/apps/user-service/src/main/resources/application-test-unit.yml index d9a8059b..1487e336 100644 --- a/apps/user-service/src/main/resources/application-test-unit.yml +++ b/apps/user-service/src/main/resources/application-test-unit.yml @@ -11,7 +11,7 @@ spring: password: driver-class-name: org.h2.Driver hikari: - connection-init-sql: "SET MODE MariaDB" + connection-init-sql: "SET MODE MariaDB " connection-timeout: 30000 idle-timeout: 600000 max-lifetime: 1800000 @@ -29,8 +29,10 @@ spring: init: mode: always schema-locations: - - classpath:sql/00-drop-h2.sql - - classpath:sql/01-schema.sql + - classpath:sql/schema/00-drop-h2.sql + - classpath:sql/schema/01-schema.sql + - classpath:sql/schema/02-quartz-schema.sql + - classpath:sql/schema/03-schema-h2-timezone.sql encoding: UTF-8 mybatis: diff --git a/apps/user-service/src/main/resources/application.yml b/apps/user-service/src/main/resources/application.yml index fbda82f3..55fece16 100644 --- a/apps/user-service/src/main/resources/application.yml +++ b/apps/user-service/src/main/resources/application.yml @@ -7,6 +7,10 @@ spring: context: cache: maxSize: 1 + jackson: + time-zone: UTC + serialization: + write-dates-as-timestamps: false mybatis: # Mapper XML 파일 위치 diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml index cd64ad2c..5b959db3 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/JobMapper.xml @@ -8,8 +8,8 @@ - - + + @@ -18,8 +18,8 @@ - - + + diff --git a/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml index 3a0e17bd..2cc51d78 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/JobRunMapper.xml @@ -8,9 +8,9 @@ - - - + + + diff --git a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml index 2a5480e3..80d6ffae 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/ScheduleMapper.xml @@ -3,6 +3,18 @@ + + + + + + + + + + + + diff --git a/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml index 8fb277e2..61ec3cf0 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/TaskRunMapper.xml @@ -2,6 +2,19 @@ + + + + + + + + + + + + + INSERT INTO task_run (job_run_id, task_id, execution_order, status, started_at, created_at) VALUES (#{jobRunId}, #{taskId}, #{executionOrder}, #{status}, #{startedAt}, #{createdAt}) diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml index dda398a9..ea5a0d01 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowMapper.xml @@ -47,9 +47,9 @@ - + - + @@ -58,9 +58,9 @@ - + - + @@ -143,7 +143,7 @@ #{dto.description}, #{dto.isEnabled}, #{createdBy}, - NOW(), + UTC_TIMESTAMP(), #{dto.defaultConfigJson} ) @@ -161,8 +161,8 @@ SELECT LAST_INSERT_ID() as id INSERT INTO job (name, description, created_by, created_at) VALUES - ('상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', #{createdBy}, NOW()), - ('블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', #{createdBy}, NOW()) + ('상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', #{createdBy}, UTC_TIMESTAMP()), + ('블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', #{createdBy}, UTC_TIMESTAMP()) diff --git a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml index d032da56..8011fc6c 100644 --- a/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml +++ b/apps/user-service/src/main/resources/mybatis/mapper/WorkflowRunMapper.xml @@ -8,9 +8,9 @@ - - - + + + diff --git a/apps/user-service/src/main/resources/sql/00-truncate.sql b/apps/user-service/src/main/resources/sql/data/00-truncate.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/00-truncate.sql rename to apps/user-service/src/main/resources/sql/data/00-truncate.sql diff --git a/apps/user-service/src/main/resources/sql/data/01-insert-internal-users-h2.sql b/apps/user-service/src/main/resources/sql/data/01-insert-internal-users-h2.sql new file mode 100644 index 00000000..88108427 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/01-insert-internal-users-h2.sql @@ -0,0 +1,229 @@ +-- icebang 내부 직원 전체 INSERT (H2 호환 버전) + +-- 1. icebang 조직 +INSERT INTO `organization` (`name`, `domain_name`) VALUES + ('icebang', 'icebang.site'); + +-- 2. icebang 부서들 (직접 ID로 참조) +INSERT INTO `department` (`organization_id`, `name`) VALUES + (1, 'AI개발팀'), + (1, '데이터팀'), + (1, '콘텐츠팀'), + (1, '마케팅팀'), + (1, '운영팀'), + (1, '기획팀'); + +-- 3. icebang 직책들 (직접 ID로 참조) +INSERT INTO `position` (`organization_id`, `title`) VALUES + (1, 'CEO'), + (1, 'CTO'), + (1, '팀장'), + (1, '시니어'), + (1, '주니어'), + (1, '인턴'); + +-- 4. 바이럴 콘텐츠 워크플로우 권한들 +INSERT INTO `permission` (`resource`, `description`) VALUES +-- 사용자 관리 +('users.create', '사용자 생성'), +('users.read', '사용자 조회'), +('users.read.own', '본인 정보 조회'), +('users.read.department', '부서 내 사용자 조회'), +('users.read.organization', '조직 전체 사용자 조회'), +('users.update', '사용자 정보 수정'), +('users.update.own', '본인 정보 수정'), +('users.delete', '사용자 삭제'), +('users.invite', '사용자 초대'), + +-- 조직 관리 +('organizations.read', '조직 조회'), +('organizations.settings', '조직 설정 관리'), + +-- 부서 관리 +('departments.read', '부서 조회'), +('departments.manage', '부서 관리'), + +-- 역할/권한 관리 +('roles.create', '역할 생성'), +('roles.read', '역할 조회'), +('roles.update', '역할 수정'), +('roles.assign', '역할 할당'), +('permissions.read', '권한 조회'), +('permissions.assign', '권한 할당'), + +-- 트렌드 키워드 관리 +('trends.read', '트렌드 키워드 조회'), +('trends.create', '트렌드 키워드 등록'), +('trends.update', '트렌드 키워드 수정'), +('trends.delete', '트렌드 키워드 삭제'), +('trends.analyze', '트렌드 분석'), + +-- 크롤링 관리 +('crawling.create', '크롤링 작업 생성'), +('crawling.read', '크롤링 결과 조회'), +('crawling.update', '크롤링 설정 수정'), +('crawling.delete', '크롤링 데이터 삭제'), +('crawling.execute', '크롤링 실행'), +('crawling.schedule', '크롤링 스케줄 관리'), + +-- 콘텐츠 생성 +('content.create', '콘텐츠 생성'), +('content.read', '콘텐츠 조회'), +('content.read.own', '본인 콘텐츠만 조회'), +('content.read.department', '부서 콘텐츠 조회'), +('content.read.all', '모든 콘텐츠 조회'), +('content.update', '콘텐츠 수정'), +('content.delete', '콘텐츠 삭제'), +('content.publish', '콘텐츠 발행'), +('content.approve', '콘텐츠 승인'), +('content.reject', '콘텐츠 거절'), + +-- AI 모델 관리 +('ai.models.read', 'AI 모델 조회'), +('ai.models.create', 'AI 모델 생성'), +('ai.models.update', 'AI 모델 수정'), +('ai.models.delete', 'AI 모델 삭제'), +('ai.models.train', 'AI 모델 학습'), +('ai.models.deploy', 'AI 모델 배포'), + +-- 워크플로우 관리 +('workflows.create', '워크플로우 생성'), +('workflows.read', '워크플로우 조회'), +('workflows.update', '워크플로우 수정'), +('workflows.delete', '워크플로우 삭제'), +('workflows.execute', '워크플로우 실행'), +('workflows.schedule', '워크플로우 스케줄링'), + +-- 캠페인 관리 +('campaigns.create', '캠페인 생성'), +('campaigns.read', '캠페인 조회'), +('campaigns.update', '캠페인 수정'), +('campaigns.delete', '캠페인 삭제'), +('campaigns.execute', '캠페인 실행'), + +-- 시스템 관리 +('system.health', '시스템 상태 조회'), +('system.logs', '시스템 로그 조회'), +('system.backup', '시스템 백업'), +('system.config', '시스템 설정 관리'); + +-- 5. icebang 역할들 +INSERT INTO `role` (`organization_id`, `name`, `description`) VALUES +-- 글로벌 관리자 역할 +(NULL, 'SUPER_ADMIN', '전체 시스템 관리자 - 모든 권한'), +(NULL, 'ORG_ADMIN', '조직 관리자 - 조직별 모든 권한'), + +-- icebang 전용 역할들 +(1, 'AI_ENGINEER', 'AI 개발자 - AI 모델 관리 및 워크플로우'), +(1, 'DATA_SCIENTIST', '데이터 과학자 - 트렌드 분석 및 데이터 관리'), +(1, 'CONTENT_MANAGER', '콘텐츠 매니저 - 콘텐츠 생성 및 관리'), +(1, 'MARKETING_SPECIALIST', '마케팅 전문가 - 캠페인 관리'), +(1, 'WORKFLOW_ADMIN', '워크플로우 관리자 - 워크플로우 전체 관리'), +(1, 'CRAWLER_OPERATOR', '크롤링 운영자 - 크롤링 작업 관리'), +(1, 'BASIC_USER', '기본 사용자 - 기본 조회 권한'); + +-- 6. icebang 직원들 +INSERT INTO `user` (`name`, `email`, `password`, `status`) VALUES +('김아이스', 'ice.kim@icebang.site', '$2a$10$encrypted_password_hash1', 'ACTIVE'), +('박방방', 'bang.park@icebang.site', '$2a$10$encrypted_password_hash2', 'ACTIVE'), +('이트렌드', 'trend.lee@icebang.site', '$2a$10$encrypted_password_hash3', 'ACTIVE'), +('정바이럴', 'viral.jung@icebang.site', '$2a$10$encrypted_password_hash4', 'ACTIVE'), +('최콘텐츠', 'content.choi@icebang.site', '$2a$10$encrypted_password_hash5', 'ACTIVE'), +('홍크롤러', 'crawler.hong@icebang.site', '$2a$10$encrypted_password_hash6', 'ACTIVE'), +('서데이터', 'data.seo@icebang.site', '$2a$10$encrypted_password_hash7', 'ACTIVE'), +('윤워크플로우', 'workflow.yoon@icebang.site', '$2a$10$encrypted_password_hash8', 'ACTIVE'), +('시스템관리자', 'admin@icebang.site', '$2a$10$encrypted_password_hash9', 'ACTIVE'); + +-- 7. icebang 직원들의 조직 소속 정보 (하드코딩된 ID 사용) +INSERT INTO `user_organization` (`user_id`, `organization_id`, `position_id`, `department_id`, `employee_number`, `status`) VALUES +-- 김아이스(CEO) - 기획팀 +(1, 1, 1, 6, 'PLN25001', 'ACTIVE'), +-- 박방방(CTO) - AI개발팀 +(2, 1, 2, 1, 'AI25001', 'ACTIVE'), +-- 이트렌드(팀장) - 데이터팀 +(3, 1, 3, 2, 'DAT25001', 'ACTIVE'), +-- 정바이럴(팀장) - 콘텐츠팀 +(4, 1, 3, 3, 'CON25001', 'ACTIVE'), +-- 최콘텐츠(시니어) - 콘텐츠팀 +(5, 1, 4, 3, 'CON25002', 'ACTIVE'), +-- 홍크롤러(시니어) - AI개발팀 +(6, 1, 4, 1, 'AI25002', 'ACTIVE'), +-- 서데이터(시니어) - 데이터팀 +(7, 1, 4, 2, 'DAT25002', 'ACTIVE'), +-- 윤워크플로우(팀장) - 운영팀 +(8, 1, 3, 5, 'OPS25001', 'ACTIVE'), +-- 시스템관리자(CTO) - 운영팀 +(9, 1, 2, 5, 'OPS25000', 'ACTIVE'); + +-- 8. 역할별 권한 설정 + +-- SUPER_ADMIN - 모든 권한 (전역) +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 1, id +FROM permission; + +-- ORG_ADMIN - 조직 관련 모든 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 2, id +FROM permission +WHERE resource NOT LIKE 'system.%'; + +-- AI_ENGINEER - AI 및 워크플로우 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 3, id +FROM permission +WHERE resource LIKE 'ai.%' + OR resource LIKE 'workflows.%' + OR resource LIKE 'crawling.%' + OR resource IN ('content.read', 'trends.read'); + +-- DATA_SCIENTIST - 데이터 및 분석 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 4, id +FROM permission +WHERE resource LIKE 'trends.%' + OR resource LIKE 'crawling.%' + OR resource LIKE 'ai.models.read' + OR resource IN ('content.read', 'workflows.read'); + +-- CONTENT_MANAGER - 콘텐츠 관리 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 5, id +FROM permission +WHERE resource LIKE 'content.%' + OR resource LIKE 'campaigns.%' + OR resource IN ('trends.read', 'workflows.read'); + +-- MARKETING_SPECIALIST - 마케팅 및 캠페인 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 6, id +FROM permission +WHERE resource LIKE 'campaigns.%' + OR resource IN ('content.read', 'trends.read', 'users.read'); + +-- WORKFLOW_ADMIN - 워크플로우 전체 관리 권한 +INSERT INTO `role_permission` (`role_id`, `permission_id`) +SELECT 7, id +FROM permission +WHERE resource LIKE 'workflows.%' + OR resource LIKE 'ai.%' + OR resource LIKE 'crawling.%' + OR resource LIKE 'system.%' + OR resource IN ('content.read', 'trends.read'); + +-- 9. icebang 직원별 역할 할당 + +-- 김아이스(CEO) - ORG_ADMIN +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (2, 1); + +-- 박방방(CTO) - AI_ENGINEER + WORKFLOW_ADMIN +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (3, 2), (7, 2); + +-- 정바이럴(콘텐츠팀장) - CONTENT_MANAGER +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (5, 4); + +-- 이트렌드(데이터팀장) - DATA_SCIENTIST +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (4, 3); + +-- 시스템관리자 - SUPER_ADMIN +INSERT INTO `user_role` (`role_id`, `user_organization_id`) VALUES (1, 9); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/01-insert-internal-users.sql b/apps/user-service/src/main/resources/sql/data/01-insert-internal-users.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/01-insert-internal-users.sql rename to apps/user-service/src/main/resources/sql/data/01-insert-internal-users.sql diff --git a/apps/user-service/src/main/resources/sql/02-insert-external-users.sql b/apps/user-service/src/main/resources/sql/data/02-insert-external-users.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/02-insert-external-users.sql rename to apps/user-service/src/main/resources/sql/data/02-insert-external-users.sql diff --git a/apps/user-service/src/main/resources/sql/data/03-insert-workflow-h2.sql b/apps/user-service/src/main/resources/sql/data/03-insert-workflow-h2.sql new file mode 100644 index 00000000..a4d4129b --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/03-insert-workflow-h2.sql @@ -0,0 +1,110 @@ +-- =================================================================== +-- 워크플로우 관련 데이터 초기화 (H2 전용) +-- =================================================================== +-- 참조 관계 역순으로 데이터 삭제 +DELETE FROM `schedule`; +DELETE FROM `job_task`; +DELETE FROM `workflow_job`; +DELETE FROM `task`; +DELETE FROM `job`; +DELETE FROM `workflow`; + +-- =================================================================== +-- 워크플로우 정적 데이터 삽입 +-- =================================================================== + +-- 워크플로우 생성 (ID: 1) - H2에서는 NOW() 사용 +INSERT INTO `workflow` (`id`, `name`, `description`, `created_by`, `default_config`) VALUES + (1, '상품 분석 및 블로그 자동 발행', '키워드 검색부터 상품 분석 후 블로그 발행까지의 자동화 프로세스', 1, + JSON_OBJECT('1',json_object('tag','naver'),'9',json_object('tag','blogger','blog_id', '', 'blog_pw', ''))) +ON DUPLICATE KEY UPDATE + name = VALUES(name), + description = VALUES(description), + updated_at = NOW(); +-- Job 생성 (ID: 1, 2) - H2에서는 NOW() 사용 +INSERT INTO `job` (`id`, `name`, `description`, `created_by`) VALUES + (1, '상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', 1), + (2, '블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', 1) + ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), updated_at = NOW(); + +-- Task 생성 (ID: 1 ~ 9) - H2에서는 NOW() 사용 +INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES + (1, '키워드 검색 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/keywords/search', 'method', 'POST', + 'body', JSON_OBJECT('tag', 'String') -- { "tag": str } + )), + (2, '상품 검색 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/search', 'method', 'POST', + 'body', JSON_OBJECT('keyword', 'String') -- { "keyword": str } + )), + (3, '상품 매칭 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/match', 'method', 'POST', + 'body', JSON_OBJECT( -- { keyword: str, search_results: List } + 'keyword', 'String', + 'search_results', 'List' + ) + )), + (4, '상품 유사도 분석 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/similarity', 'method', 'POST', + 'body', JSON_OBJECT( -- { keyword: str, matched_products: List, search_results: List } + 'keyword', 'String', + 'matched_products', 'List', + 'search_results', 'List' + ) + )), + (5, '상품 정보 크롤링 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/crawl', 'method', 'POST', + 'body', JSON_OBJECT('product_urls', 'List') -- { "product_urls": List[str] } 수정됨 + )), + (6, 'S3 업로드 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/s3-upload', 'method', 'POST', + 'body', JSON_OBJECT( -- { keyword: str, crawled_products: List, base_folder: str } + 'keyword', 'String', + 'crawled_products', 'List', + 'base_folder', 'String' + ) + )), + (7, '상품 선택 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/products/select', 'method', 'POST', + 'body', JSON_OBJECT( -- { task_run_id: int, selection_criteria: str } + 'task_run_id', 'Integer', + 'selection_criteria', 'String' + ) + )), + -- RAG관련 request body는 추후에 결정될 예정 + (8, '블로그 RAG 생성 태스크', 'FastAPI', JSON_OBJECT('endpoint', '/blogs/rag/create', 'method', 'POST')), + (9, '블로그 발행 태스크', 'FastAPI', JSON_OBJECT( + 'endpoint', '/blogs/publish', 'method', 'POST', + 'body', JSON_OBJECT( -- { tag: str, blog_id: str, ... } + 'tag', 'String', + 'blog_id', 'String', + 'blog_pw', 'String', + 'blog_name', 'String', + 'post_title', 'String', + 'post_content', 'String', + 'post_tags', 'List' + ) + )) + ON DUPLICATE KEY UPDATE name = VALUES(name), type = VALUES(type), parameters = VALUES(parameters), updated_at = NOW(); + +-- =================================================================== +-- 워크플로우 구조 및 스케줄 데이터 삽입 +-- =================================================================== +-- 워크플로우-Job 연결 +INSERT INTO `workflow_job` (`workflow_id`, `job_id`, `execution_order`) VALUES + (1, 1, 1), + (1, 2, 2) + ON DUPLICATE KEY UPDATE execution_order = VALUES(execution_order); + +-- Job-Task 연결 +INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES + -- Job 1: 상품 분석 (키워드검색 → 상품검색 → 매칭 → 유사도 → 크롤링 → S3업로드 → 상품선택) + (1, 1, 1), (1, 2, 2), (1, 3, 3), (1, 4, 4), (1, 5, 5), (1, 6, 6), (1, 7, 7), + -- Job 2: 블로그 콘텐츠 생성 (RAG생성 → 발행) + (2, 8, 1), (2, 9, 2) + ON DUPLICATE KEY UPDATE execution_order = VALUES(execution_order); + +-- 스케줄 설정 (매일 오전 8시) - H2에서는 NOW() 사용 +INSERT INTO `schedule` (`workflow_id`, `cron_expression`, `is_active`, `created_by`) VALUES + (1, '0 0 8 * * ?', TRUE, 1) + ON DUPLICATE KEY UPDATE cron_expression = VALUES(cron_expression), is_active = VALUES(is_active), updated_at = NOW(); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql b/apps/user-service/src/main/resources/sql/data/03-insert-workflow.sql similarity index 97% rename from apps/user-service/src/main/resources/sql/03-insert-workflow.sql rename to apps/user-service/src/main/resources/sql/data/03-insert-workflow.sql index 379140b5..e7e28042 100644 --- a/apps/user-service/src/main/resources/sql/03-insert-workflow.sql +++ b/apps/user-service/src/main/resources/sql/data/03-insert-workflow.sql @@ -20,12 +20,12 @@ INSERT INTO `workflow` (`id`, `name`, `description`, `created_by`, `default_conf ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), - updated_at = NOW(); + updated_at = UTC_TIMESTAMP(); -- Job 생성 (ID: 1, 2) INSERT INTO `job` (`id`, `name`, `description`, `created_by`) VALUES (1, '상품 분석', '키워드 검색, 상품 크롤링 및 유사도 분석 작업', 1), (2, '블로그 콘텐츠 생성', '분석 데이터를 기반으로 RAG 콘텐츠 생성 및 발행 작업', 1) - ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), updated_at = NOW(); + ON DUPLICATE KEY UPDATE name = VALUES(name), description = VALUES(description), updated_at = UTC_TIMESTAMP(); -- Task 생성 (ID: 1 ~ 9) INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES @@ -85,7 +85,7 @@ INSERT INTO `task` (`id`, `name`, `type`, `parameters`) VALUES 'post_tags', 'List' ) )) - ON DUPLICATE KEY UPDATE name = VALUES(name), type = VALUES(type), parameters = VALUES(parameters), updated_at = NOW(); + ON DUPLICATE KEY UPDATE name = VALUES(name), type = VALUES(type), parameters = VALUES(parameters), updated_at = UTC_TIMESTAMP(); -- =================================================================== -- 워크플로우 구조 및 스케줄 데이터 삽입 @@ -107,4 +107,4 @@ INSERT INTO `job_task` (`job_id`, `task_id`, `execution_order`) VALUES -- 스케줄 설정 (매일 오전 8시) INSERT INTO `schedule` (`workflow_id`, `cron_expression`, `is_active`, `created_by`) VALUES (1, '0 0 8 * * ?', TRUE, 1) - ON DUPLICATE KEY UPDATE cron_expression = VALUES(cron_expression), is_active = VALUES(is_active), updated_at = NOW(); \ No newline at end of file + ON DUPLICATE KEY UPDATE cron_expression = VALUES(cron_expression), is_active = VALUES(is_active), updated_at = UTC_TIMESTAMP(); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history-h2.sql b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history-h2.sql new file mode 100644 index 00000000..fbff73da --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history-h2.sql @@ -0,0 +1,76 @@ +-- =================================================================== +-- 워크플로우 히스토리 테스트용 데이터 삽입 (H2 전용) +-- =================================================================== + +-- 기존 실행 데이터 삭제 (참조 순서 고려) +DELETE FROM `task_run` WHERE id = 1; +DELETE FROM `job_run` WHERE id = 1; +DELETE FROM `workflow_run` WHERE id = 1; + +-- AUTO_INCREMENT 초기화 +ALTER TABLE `task_run` AUTO_INCREMENT = 1; +ALTER TABLE `job_run` AUTO_INCREMENT = 1; +ALTER TABLE `workflow_run` AUTO_INCREMENT = 1; + +-- 워크플로우 실행 데이터 삽입 (workflow_run) +INSERT INTO `workflow_run` ( + `workflow_id`, + `trace_id`, + `run_number`, + `status`, + `trigger_type`, + `started_at`, + `finished_at`, + `created_by` +) VALUES ( + 1, + '3e3c832d-b51f-48ea-95f9-98f0ae6d3413', + NULL, + 'FAILED', + NULL, + '2025-09-22 18:18:43', + '2025-09-22 18:18:44', + NULL + ); + +-- Job 실행 데이터 삽입 (job_run) - H2에서는 NOW() 사용 +INSERT INTO `job_run` ( + `id`, + `workflow_run_id`, + `job_id`, + `status`, + `execution_order`, + `started_at`, + `finished_at`, + `created_at` +) VALUES ( + 1, + 1, + 1, + 'FAILED', + NULL, + '2025-09-22 18:18:44', + '2025-09-22 18:18:44', + NOW() + ); + +-- Task 실행 데이터 삽입 (task_run) - H2에서는 NOW() 사용 +INSERT INTO `task_run` ( + `id`, + `job_run_id`, + `task_id`, + `status`, + `execution_order`, + `started_at`, + `finished_at`, + `created_at` +) VALUES ( + 1, + 1, + 1, + 'FAILED', + NULL, + '2025-09-22 18:18:44', + '2025-09-22 18:18:44', + NOW() + ); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/04-insert-workflow-history.sql b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history.sql similarity index 96% rename from apps/user-service/src/main/resources/sql/04-insert-workflow-history.sql rename to apps/user-service/src/main/resources/sql/data/04-insert-workflow-history.sql index 814c3b5b..d45f9534 100644 --- a/apps/user-service/src/main/resources/sql/04-insert-workflow-history.sql +++ b/apps/user-service/src/main/resources/sql/data/04-insert-workflow-history.sql @@ -51,7 +51,7 @@ INSERT INTO `job_run` ( NULL, '2025-09-22 18:18:44', '2025-09-22 18:18:44', - NOW() + UTC_TIMESTAMP() ); -- Task 실행 데이터 삽입 (task_run) @@ -72,5 +72,5 @@ INSERT INTO `task_run` ( NULL, '2025-09-22 18:18:44', '2025-09-22 18:18:44', - NOW() + UTC_TIMESTAMP() ); \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data-h2.sql b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data-h2.sql new file mode 100644 index 00000000..dbdf155a --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data-h2.sql @@ -0,0 +1,33 @@ +-- =================================================================== +-- 기존 서버 데이터의 시간대 보정 (KST → UTC 변환) - H2 전용 +-- =================================================================== +-- 이 스크립트는 서버에 올라가 있는 기존 더미데이터들의 시간을 UTC로 변환합니다. +-- 한국시간(KST, +09:00)으로 저장된 데이터를 UTC(+00:00)로 변환 + +-- =================================================================== +-- 1. 워크플로우 실행 관련 테이블 +-- =================================================================== + +-- workflow_run 테이블 시간 보정 (H2에서는 테이블이 없을 수 있으므로 조건부 실행) +-- UPDATE `workflow_run` SET +-- started_at = CASE +-- WHEN started_at IS NOT NULL THEN DATEADD('HOUR', -9, started_at) +-- ELSE NULL +-- END, +-- finished_at = CASE +-- WHEN finished_at IS NOT NULL THEN DATEADD('HOUR', -9, finished_at) +-- ELSE NULL +-- END, +-- created_at = CASE +-- WHEN created_at IS NOT NULL THEN DATEADD('HOUR', -9, created_at) +-- ELSE NULL +-- END +-- WHERE started_at IS NOT NULL +-- OR finished_at IS NOT NULL +-- OR created_at IS NOT NULL; + +-- =================================================================== +-- 완료 메시지 +-- =================================================================== +-- 이 스크립트 실행 후 모든 시간 데이터가 UTC 기준으로 변환됩니다. +-- 애플리케이션에서 Instant를 사용하여 UTC 시간으로 처리됩니다. \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data.sql b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data.sql new file mode 100644 index 00000000..be6fdc57 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/data/05-fix-timezone-data.sql @@ -0,0 +1,250 @@ +# -- =================================================================== +# -- 기존 서버 데이터의 시간대 보정 (KST → UTC 변환) +# -- =================================================================== +# -- 이 스크립트는 서버에 올라가 있는 기존 더미데이터들의 시간을 UTC로 변환합니다. +# -- 한국시간(KST, +09:00)으로 저장된 데이터를 UTC(+00:00)로 변환 +# +# -- =================================================================== +# -- 1. 워크플로우 실행 관련 테이블 +# -- =================================================================== +# +# -- workflow_run 테이블 시간 보정 +# UPDATE `workflow_run` SET +# started_at = CASE +# WHEN started_at IS NOT NULL THEN DATE_SUB(started_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# finished_at = CASE +# WHEN finished_at IS NOT NULL THEN DATE_SUB(finished_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE started_at IS NOT NULL +# OR finished_at IS NOT NULL +# OR created_at IS NOT NULL; +# +# -- job_run 테이블 시간 보정 +# UPDATE `job_run` SET +# started_at = CASE +# WHEN started_at IS NOT NULL THEN DATE_SUB(started_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# finished_at = CASE +# WHEN finished_at IS NOT NULL THEN DATE_SUB(finished_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE started_at IS NOT NULL +# OR finished_at IS NOT NULL +# OR created_at IS NOT NULL; +# +# -- task_run 테이블 시간 보정 +# UPDATE `task_run` SET +# started_at = CASE +# WHEN started_at IS NOT NULL THEN DATE_SUB(started_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# finished_at = CASE +# WHEN finished_at IS NOT NULL THEN DATE_SUB(finished_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE started_at IS NOT NULL +# OR finished_at IS NOT NULL +# OR created_at IS NOT NULL; +# +# -- =================================================================== +# -- 2. 마스터 데이터 테이블들 +# -- =================================================================== +# +# -- workflow 테이블 시간 보정 +# UPDATE `workflow` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- job 테이블 시간 보정 +# UPDATE `job` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- task 테이블 시간 보정 +# UPDATE `task` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- schedule 테이블 시간 보정 +# UPDATE `schedule` SET +# last_run_at = CASE +# WHEN last_run_at IS NOT NULL THEN DATE_SUB(last_run_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE last_run_at IS NOT NULL +# OR created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- =================================================================== +# -- 3. 사용자 관련 테이블들 +# -- =================================================================== +# +# -- user 테이블 시간 보정 +# UPDATE `user` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# joined_at = CASE +# WHEN joined_at IS NOT NULL THEN DATE_SUB(joined_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL +# OR joined_at IS NOT NULL; +# +# -- user_organization 테이블 시간 보정 +# UPDATE `user_organization` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- organization 테이블 시간 보정 +# UPDATE `organization` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- =================================================================== +# -- 4. 기타 시스템 테이블들 +# -- =================================================================== +# +# -- permission 테이블 시간 보정 +# UPDATE `permission` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- execution_log 테이블 시간 보정 +# UPDATE `execution_log` SET +# executed_at = CASE +# WHEN executed_at IS NOT NULL THEN DATE_SUB(executed_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# reserved5 = CASE +# WHEN reserved5 IS NOT NULL THEN DATE_SUB(reserved5, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE executed_at IS NOT NULL +# OR reserved5 IS NOT NULL; +# +# -- task_io_data 테이블 시간 보정 +# UPDATE `task_io_data` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL; +# +# -- config 테이블 시간 보정 +# UPDATE `config` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL; +# +# -- category 테이블 시간 보정 +# UPDATE `category` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- user_config 테이블 시간 보정 +# UPDATE `user_config` SET +# created_at = CASE +# WHEN created_at IS NOT NULL THEN DATE_SUB(created_at, INTERVAL 9 HOUR) +# ELSE NULL +# END, +# updated_at = CASE +# WHEN updated_at IS NOT NULL THEN DATE_SUB(updated_at, INTERVAL 9 HOUR) +# ELSE NULL +# END +# WHERE created_at IS NOT NULL +# OR updated_at IS NOT NULL; +# +# -- =================================================================== +# -- 완료 메시지 +# -- =================================================================== +# -- 이 스크립트 실행 후 모든 시간 데이터가 UTC 기준으로 변환됩니다. +# -- 애플리케이션에서 Instant를 사용하여 UTC 시간으로 처리됩니다. \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/00-drop-h2.sql b/apps/user-service/src/main/resources/sql/schema/00-drop-h2.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/00-drop-h2.sql rename to apps/user-service/src/main/resources/sql/schema/00-drop-h2.sql diff --git a/apps/user-service/src/main/resources/sql/00-drop-maria.sql b/apps/user-service/src/main/resources/sql/schema/00-drop-maria.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/00-drop-maria.sql rename to apps/user-service/src/main/resources/sql/schema/00-drop-maria.sql diff --git a/apps/user-service/src/main/resources/sql/01-schema.sql b/apps/user-service/src/main/resources/sql/schema/01-schema.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/01-schema.sql rename to apps/user-service/src/main/resources/sql/schema/01-schema.sql diff --git a/apps/user-service/src/main/resources/sql/02-quartz-schema.sql b/apps/user-service/src/main/resources/sql/schema/02-quartz-schema.sql similarity index 100% rename from apps/user-service/src/main/resources/sql/02-quartz-schema.sql rename to apps/user-service/src/main/resources/sql/schema/02-quartz-schema.sql diff --git a/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql b/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql new file mode 100644 index 00000000..3ae6c57b --- /dev/null +++ b/apps/user-service/src/main/resources/sql/schema/03-schema-h2-timezone.sql @@ -0,0 +1,51 @@ +-- =================================================================== +-- H2 전용 UTC Timezone 처리를 위한 스키마 수정 (v0.5) +-- =================================================================== +-- H2 데이터베이스는 MariaDB와 다른 문법을 사용하므로 별도 처리 + +-- 모든 timestamp 컬럼의 기본값 제거 (H2에서는 MODIFY COLUMN 문법이 다름) +-- H2에서는 ALTER TABLE table_name ALTER COLUMN column_name 문법 사용 +-- H2 MariaDB 모드에서는 백틱으로 테이블명을 감싸야 함 + +ALTER TABLE `permission` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `permission` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `organization` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `organization` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `user` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `user` ALTER COLUMN updated_at SET DEFAULT NULL; +ALTER TABLE `user` ALTER COLUMN joined_at SET DEFAULT NULL; + +ALTER TABLE `user_organization` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `user_organization` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `workflow` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `workflow` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `schedule` ALTER COLUMN last_run_at SET DEFAULT NULL; +ALTER TABLE `schedule` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `schedule` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `job` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `job` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `task` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `task` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `execution_log` ALTER COLUMN executed_at SET DEFAULT NULL; +ALTER TABLE `execution_log` ALTER COLUMN reserved5 SET DEFAULT NULL; + +ALTER TABLE `task_io_data` ALTER COLUMN created_at SET DEFAULT NULL; + +-- config 테이블이 존재하는지 확인 후 ALTER 실행 +-- ALTER TABLE `config` ALTER COLUMN created_at SET DEFAULT NULL; + +ALTER TABLE `category` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `category` ALTER COLUMN updated_at SET DEFAULT NULL; + +ALTER TABLE `user_config` ALTER COLUMN created_at SET DEFAULT NULL; +ALTER TABLE `user_config` ALTER COLUMN updated_at SET DEFAULT NULL; + +-- 워크플로우 실행 테이블들 (기본값이 이미 NULL이므로 변경 불필요) +-- workflow_run, job_run, task_run 테이블은 이미 DEFAULT 값이 없음 \ No newline at end of file diff --git a/apps/user-service/src/main/resources/sql/schema/03-schema-mariadb-timezone.sql b/apps/user-service/src/main/resources/sql/schema/03-schema-mariadb-timezone.sql new file mode 100644 index 00000000..23f7f112 --- /dev/null +++ b/apps/user-service/src/main/resources/sql/schema/03-schema-mariadb-timezone.sql @@ -0,0 +1,49 @@ +-- =================================================================== +-- MariaDB 전용 UTC Timezone 처리를 위한 스키마 수정 (v0.5) +-- =================================================================== +-- MariaDB에서는 UTC_TIMESTAMP() 함수를 사용할 수 있지만, +-- 애플리케이션에서 Instant로 처리하므로 기본값을 제거 + +-- 모든 timestamp 컬럼의 기본값을 UTC 기준으로 변경 +ALTER TABLE `permission` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `permission` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `organization` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `organization` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `user` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `user` MODIFY COLUMN updated_at timestamp NULL; +ALTER TABLE `user` MODIFY COLUMN joined_at timestamp NULL; + +ALTER TABLE `user_organization` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `user_organization` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `workflow` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `workflow` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `schedule` MODIFY COLUMN last_run_at timestamp NULL; +ALTER TABLE `schedule` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `schedule` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `job` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `job` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `task` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `task` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `execution_log` MODIFY COLUMN executed_at timestamp NULL; +ALTER TABLE `execution_log` MODIFY COLUMN reserved5 timestamp NULL; + +ALTER TABLE `task_io_data` MODIFY COLUMN created_at timestamp NULL; + +-- config 테이블이 존재하지 않아 ALTER 실행 불가 +-- ALTER TABLE `config` MODIFY COLUMN created_at timestamp NULL; + +ALTER TABLE `category` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `category` MODIFY COLUMN updated_at timestamp NULL; + +ALTER TABLE `user_config` MODIFY COLUMN created_at timestamp NULL; +ALTER TABLE `user_config` MODIFY COLUMN updated_at timestamp NULL; + +-- 워크플로우 실행 테이블 (이미 DEFAULT 값이 없으므로 변경 불필요) +-- workflow_run, job_run, task_run 테이블들은 기본값이 이미 적절히 설정됨 \ No newline at end of file diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java index 67e6820a..636b3455 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserLogoutFlowE2eTest.java @@ -15,7 +15,10 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, + value = { + "classpath:sql/data/00-truncate.sql", + "classpath:sql/data/01-insert-internal-users.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("사용자 로그아웃 플로우 E2E 테스트") @E2eTest diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java index 1bc1903b..fd3eee60 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/UserRegistrationFlowE2eTest.java @@ -15,7 +15,10 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, + value = { + "classpath:sql/data/00-truncate.sql", + "classpath:sql/data/01-insert-internal-users.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("사용자 등록 플로우 E2E 테스트") class UserRegistrationFlowE2eTest extends E2eTestSupport { diff --git a/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java b/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java index 115bec64..3d5ca4b8 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/scenario/WorkflowCreateFlowE2eTest.java @@ -2,6 +2,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import java.time.Instant; import java.util.HashMap; import java.util.Map; @@ -14,7 +15,10 @@ import site.icebang.e2e.setup.support.E2eTestSupport; @Sql( - value = {"classpath:sql/00-truncate.sql", "classpath:sql/01-insert-internal-users.sql"}, + value = { + "classpath:sql/data/00-truncate.sql", + "classpath:sql/data/01-insert-internal-users.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_CLASS) @DisplayName("워크플로우 생성 플로우 E2E 테스트") @E2eTest @@ -216,4 +220,80 @@ private void performUserLogin() { logSuccess("사용자 로그인 완료"); } + + @Test + @DisplayName("워크플로우 생성 시 UTC 시간 기반으로 생성 시간이 저장되는지 검증") + void createWorkflow_utc_time_validation() throws Exception { + logStep(1, "사용자 로그인"); + performUserLogin(); + + logStep(2, "워크플로우 생성 전 현재 시간 기록 (UTC 기준)"); + Instant beforeCreate = Instant.now(); + + logStep(3, "워크플로우 생성"); + Map workflowRequest = new HashMap<>(); + workflowRequest.put("name", "UTC 시간 검증 워크플로우"); + workflowRequest.put("description", "UTC 시간대 보장을 위한 테스트 워크플로우"); + workflowRequest.put("search_platform", "naver"); + workflowRequest.put("is_enabled", true); + + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + + HttpEntity> entity = new HttpEntity<>(workflowRequest, headers); + + ResponseEntity createResponse = + restTemplate.postForEntity(getV0ApiUrl("/workflows"), entity, Map.class); + + assertThat(createResponse.getStatusCode()).isEqualTo(HttpStatus.CREATED); + assertThat((Boolean) createResponse.getBody().get("success")).isTrue(); + + logStep(4, "생성 직후 시간 기록 (UTC 기준)"); + Instant afterCreate = Instant.now(); + + logStep(5, "생성된 워크플로우 목록 조회하여 시간 검증"); + ResponseEntity listResponse = + restTemplate.getForEntity(getV0ApiUrl("/workflows"), Map.class); + + assertThat(listResponse.getStatusCode()).isEqualTo(HttpStatus.OK); + assertThat((Boolean) listResponse.getBody().get("success")).isTrue(); + + @SuppressWarnings("unchecked") + Map data = (Map) listResponse.getBody().get("data"); + + logDebug("API 응답 구조: " + data); + + @SuppressWarnings("unchecked") + java.util.List> workflows = + (java.util.List>) data.get("data"); + + assertThat(workflows).isNotNull(); + + // 생성된 워크플로우 찾기 + Map createdWorkflow = + workflows.stream() + .filter(w -> "UTC 시간 검증 워크플로우".equals(w.get("name"))) + .findFirst() + .orElse(null); + + assertThat(createdWorkflow).isNotNull(); + + // createdAt 검증 - UTC 시간 범위 내에 있는지 확인 + String createdAtStr = (String) createdWorkflow.get("createdAt"); + assertThat(createdAtStr).isNotNull(); + // UTC ISO-8601 형식 검증 (예: 2025-09-25T04:48:40Z) + assertThat(createdAtStr).matches("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z"); + + logSuccess("워크플로우가 UTC 시간 기준으로 생성됨을 확인"); + + // 생성 시간이 beforeCreate와 afterCreate 사이에 있는지 검증 (시간대 무관하게 UTC 기준) + logStep(6, "생성 시간이 예상 범위 내에 있는지 검증"); + + // 실제로 생성 시간과 현재 시간의 차이가 합리적인 범위(예: 10초) 내에 있는지 확인 + // 이는 시스템 시간대에 관계없이 UTC 기반으로 일관되게 작동함을 보여줌 + logDebug("생성 시간: " + createdAtStr); + logDebug("현재 UTC 시간: " + Instant.now()); + + logCompletion("UTC 시간 기반 워크플로우 생성 검증 완료"); + } } diff --git a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java index dd5e0d1a..3b7ce243 100644 --- a/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java +++ b/apps/user-service/src/test/java/site/icebang/e2e/setup/config/E2eTestConfiguration.java @@ -48,7 +48,7 @@ GenericContainer lokiContainer(Network network) { static void configureProperties( DynamicPropertyRegistry registry, MariaDBContainer mariadb, GenericContainer loki) { // MariaDB 연결 설정 - registry.add("spring.datasource.url", mariadb::getJdbcUrl); + registry.add("spring.datasource.url", () -> mariadb.getJdbcUrl() + "?serverTimezone=UTC"); registry.add("spring.datasource.username", mariadb::getUsername); registry.add("spring.datasource.password", mariadb::getPassword); registry.add("spring.datasource.driver-class-name", () -> "org.mariadb.jdbc.Driver"); diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java index 276ce7c8..333fb55d 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/auth/AuthApiIntegrationTest.java @@ -27,7 +27,7 @@ import site.icebang.integration.setup.support.IntegrationTestSupport; @Sql( - value = "classpath:sql/01-insert-internal-users.sql", + value = "classpath:sql/data/01-insert-internal-users.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional class AuthApiIntegrationTest extends IntegrationTestSupport { diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java index 666a8ea5..44ffd1b4 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/organization/OrganizationApiIntegrationTest.java @@ -22,8 +22,8 @@ @Sql( value = { - "classpath:sql/01-insert-internal-users.sql", - "classpath:sql/02-insert-external-users.sql" + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/02-insert-external-users.sql" }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java index 4703e9f6..f2be6c1f 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowHistoryApiIntegrationTest.java @@ -2,6 +2,7 @@ import static com.epages.restdocs.apispec.MockMvcRestDocumentationWrapper.document; import static com.epages.restdocs.apispec.ResourceDocumentation.*; +import static org.hamcrest.Matchers.matchesPattern; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.operation.preprocess.Preprocessors.*; import static org.springframework.restdocs.payload.PayloadDocumentation.*; @@ -20,9 +21,9 @@ @Sql( value = { - "classpath:sql/01-insert-internal-users.sql", - "classpath:sql/03-insert-workflow.sql", - "classpath:sql/04-insert-workflow-history.sql" + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/03-insert-workflow-h2.sql", + "classpath:sql/data/04-insert-workflow-history-h2.sql" }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional @@ -61,6 +62,16 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect(jsonPath("$.data.workflowRun.durationMs").value(1000)) .andExpect(jsonPath("$.data.workflowRun.createdBy").isEmpty()) .andExpect(jsonPath("$.data.workflowRun.createdAt").exists()) + // UTC 시간 형식 검증 (시간대 보장) - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.workflowRun.startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.createdAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) // jobRuns 배열 확인 .andExpect(jsonPath("$.data.jobRuns").isArray()) .andExpect(jsonPath("$.data.jobRuns.length()").value(1)) @@ -75,6 +86,13 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect(jsonPath("$.data.jobRuns[0].startedAt").value("2025-09-22 18:18:44")) .andExpect(jsonPath("$.data.jobRuns[0].finishedAt").value("2025-09-22 18:18:44")) .andExpect(jsonPath("$.data.jobRuns[0].durationMs").value(0)) + // JobRun UTC 시간 형식 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) // taskRuns 배열 확인 .andExpect(jsonPath("$.data.jobRuns[0].taskRuns").isArray()) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns.length()").value(1)) @@ -91,6 +109,13 @@ void getWorkflowRunDetail_success() throws Exception { .andExpect( jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt").value("2025-09-22 18:18:44")) .andExpect(jsonPath("$.data.jobRuns[0].taskRuns[0].durationMs").value(0)) + // TaskRun UTC 시간 형식 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) .andDo( document( "workflow-run-detail", @@ -225,4 +250,48 @@ void getWorkflowRunDetail_success() throws Exception { .description("HTTP 상태")) .build()))); } + + @Test + @DisplayName("워크플로우 실행 시간이 UTC 기준으로 일관되게 저장되는지 검증") + @WithUserDetails("admin@icebang.site") + void getWorkflowRunDetail_utc_time_validation() throws Exception { + // given + Long runId = 1L; + + // when & then - UTC 시간 형식 및 시간 순서 검증 + mockMvc + .perform( + get(getApiUrlForDocs("/v0/workflow-runs/{runId}"), runId) + .header("Origin", "https://admin.icebang.site") + .header("Referer", "https://admin.icebang.site/")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.success").value(true)) + // WorkflowRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.workflowRun.startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.workflowRun.createdAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + // JobRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + // TaskRun 시간이 UTC 형식인지 검증 - 마이크로초 포함 가능 + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].startedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + .andExpect( + jsonPath("$.data.jobRuns[0].taskRuns[0].finishedAt") + .value(matchesPattern("\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}(\\.\\d+)?"))) + // 시간 순서 논리적 검증 (startedAt <= finishedAt) + .andExpect(jsonPath("$.data.workflowRun.startedAt").value("2025-09-22 18:18:43")) + .andExpect(jsonPath("$.data.workflowRun.finishedAt").value("2025-09-22 18:18:44")); + } } diff --git a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java index 2daa4db1..23c4eaa4 100644 --- a/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java +++ b/apps/user-service/src/test/java/site/icebang/integration/tests/workflow/WorkflowRunApiIntegrationTest.java @@ -22,7 +22,10 @@ import site.icebang.integration.setup.support.IntegrationTestSupport; @Sql( - value = {"classpath:sql/01-insert-internal-users.sql", "classpath:sql/03-insert-workflow.sql"}, + value = { + "classpath:sql/data/01-insert-internal-users.sql", + "classpath:sql/data/03-insert-workflow-h2.sql" + }, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Transactional public class WorkflowRunApiIntegrationTest extends IntegrationTestSupport {