Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,16 @@ jobs:
--health-timeout 5s
--health-retries 10

redis:
image: redis:7-alpine
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping || exit 1"
--health-interval 10s
--health-timeout 5s
--health-retries 10

kafka:
image: confluentinc/cp-kafka:7.6.1
ports:
Expand Down Expand Up @@ -92,12 +102,13 @@ jobs:

- name: 🕐 Wait for Kafka and Elasticsearch
run: |
echo "Waiting for Elasticsearch and Kafka to be ready..."
echo "Waiting for Elasticsearch, Kafka, and Redis to be ready..."
for i in {1..25}; do
es_ready=$(curl -fsS http://localhost:9200/_cluster/health > /dev/null && echo "yes" || echo "no")
kafka_ready=$(nc -z localhost 9092 && echo "yes" || echo "no")
redis_ready=$(nc -z localhost 6379 && echo "yes" || echo "no")

if [ "$es_ready" = "yes" ] && [ "$kafka_ready" = "yes" ]; then
if [ "$es_ready" = "yes" ] && [ "$kafka_ready" = "yes" ] && [ "$redis_ready" = "yes" ]; then
echo "✅ All services are ready!"
exit 0
fi
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
- **카테고리 분류**: TECH, LIFE, TRAVEL, FOOD, HOBBY 카테고리
- **실시간 통계**: 조회수, 좋아요 수, 댓글 수 추적
- **도메인 이벤트**: 비즈니스 이벤트 기반 사이드 이펙트 처리
- **검색 캐시**: Elasticsearch 키워드 검색 결과를 Redis에 3분 TTL로 캐싱하여 응답 지연 최소화

### 댓글 시스템
- **계층형 댓글**: 대댓글 지원으로 깊이 있는 토론
Expand Down
4 changes: 4 additions & 0 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,10 @@ jmh {
resultsFile.set(layout.buildDirectory.file("reports/jmh/post-search.json"))
}

tasks.named<Zip>("jmhJar") {
isZip64 = true
}



tasks.named<Test>("test") {
Expand Down
35 changes: 29 additions & 6 deletions src/gatling/scala/dooya/see/search/PostSearchSimulation.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,40 @@ class PostSearchSimulation extends Simulation {
.acceptHeader("application/json")
.contentTypeHeader("application/json")

private val feeder = Iterator.continually(Map(
"keyword" -> keywords(Random.nextInt(keywords.size))
))
private val feeder = Iterator.continually {
val baseKeyword = keywords(Random.nextInt(keywords.size))
Map(
"hotKeyword" -> baseKeyword,
"coldKeyword" -> s"$baseKeyword-${System.nanoTime()}"
)
}

private val dbSearch = exec(
http("db-search")
.get("/api/posts/search")
.queryParam("keyword", "${keyword}")
.queryParam("keyword", "${hotKeyword}")
.check(status.is(200))
)

private val esSearch = exec(
http("es-search")
http("es-cold-search")
.get("/api/v1/posts/elasticsearch")
.queryParam("keyword", "${coldKeyword}")
.check(status.is(200))
)

private val esCacheWarm = exec(
http("es-cache-warm")
.get("/api/v1/posts/elasticsearch")
.queryParam("keyword", "${hotKeyword}")
.check(status.is(200))
.silent
)

private val esCacheHit = exec(
http("es-cache-hit")
.get("/api/v1/posts/elasticsearch")
.queryParam("keyword", "${keyword}")
.queryParam("keyword", "${hotKeyword}")
.check(status.is(200))
)

Expand All @@ -44,6 +63,10 @@ class PostSearchSimulation extends Simulation {
.exec(dbSearch)
.pause(500.milliseconds, 2.seconds)
.exec(esSearch)
.pause(200.milliseconds, 500.milliseconds)
.exec(esCacheWarm)
.pause(100.milliseconds)
.exec(esCacheHit)

setUp(
scenarioBuilder.inject(
Expand Down
98 changes: 74 additions & 24 deletions src/jmh/java/dooya/see/benchmark/PostSearchBenchmark.java
Original file line number Diff line number Diff line change
@@ -1,12 +1,25 @@
package dooya.see.benchmark;

import dooya.see.SeeApplication;
import dooya.see.application.post.provided.PostFinder;
import dooya.see.application.post.dto.PostSearchResult;
import dooya.see.application.post.provided.PostFinder;
import dooya.see.application.post.required.PostSearchCacheRepository;
import dooya.see.domain.post.dto.PostSearchRequest;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.WebApplicationType;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.data.domain.Page;

Expand All @@ -19,27 +32,28 @@
@Fork(1)
public class PostSearchBenchmark {

@State(Scope.Benchmark)
public static class BenchmarkState {
private ConfigurableApplicationContext context;
private PostFinder postFinder;
private SearchBenchmarkScenario scenario;
private abstract static class AbstractBenchmarkState {
protected ConfigurableApplicationContext context;
protected PostFinder postFinder;
protected PostSearchCacheRepository cacheRepository;
protected SearchBenchmarkScenario scenario;
protected SearchBenchmarkScenario.SearchQuery cacheHitQuery;

@Setup(Level.Trial)
public void setUp() {
context = new SpringApplicationBuilder(SeeApplication.class)
.profiles("benchmark")
.profiles("benchmark", "benchmark-data")
.web(WebApplicationType.NONE)
.logStartupInfo(false)
.run();
postFinder = context.getBean(PostFinder.class);
cacheRepository = context.getBean(PostSearchCacheRepository.class);
scenario = context.getBean(SearchBenchmarkScenarioProvider.class).createScenario();

// warm-up search to prime caches
for (int i = 0; i < 20; i++) {
SearchBenchmarkScenario.SearchQuery query = scenario.next();
postFinder.search(buildRequest(query.keyword()), query.pageable());
postFinder.searchPosts(query.keyword(), query.pageable());
}
cacheHitQuery = scenario.next();
// warm up DB and cache flows with the hot keyword
postFinder.search(buildRequest(cacheHitQuery.keyword()), cacheHitQuery.pageable());
postFinder.searchPosts(cacheHitQuery.keyword(), cacheHitQuery.pageable());
}

@TearDown(Level.Trial)
Expand All @@ -48,26 +62,62 @@ public void tearDown() {
context.close();
}
}

protected SearchBenchmarkScenario.SearchQuery nextQuery() {
return scenario.next();
}

protected SearchBenchmarkScenario.SearchQuery cacheHitQuery() {
return cacheHitQuery;
}

protected static PostSearchRequest buildRequest(String keyword) {
return PostSearchRequest.builder()
.keyword(keyword)
.status(null)
.build();
}
}

@State(Scope.Benchmark)
public static class DatabaseSearchState extends AbstractBenchmarkState {
// inherits base set-up
}

@State(Scope.Benchmark)
public static class ColdSearchState extends AbstractBenchmarkState {
@Setup(Level.Invocation)
public void clearCache() {
cacheRepository.evictAll();
}
}

@State(Scope.Benchmark)
public static class CachedSearchState extends AbstractBenchmarkState {
// inherits warm-up behaviour
}

@Benchmark
public void databaseSearch(BenchmarkState state, Blackhole blackhole) {
SearchBenchmarkScenario.SearchQuery query = state.scenario.next();
Page<?> result = state.postFinder.search(buildRequest(query.keyword()), query.pageable());
public void elasticsearchCold(ColdSearchState state, Blackhole blackhole) {
SearchBenchmarkScenario.SearchQuery query = state.nextQuery();
Page<PostSearchResult> result = state.postFinder.searchPosts(query.keyword(), query.pageable());
blackhole.consume(result.getTotalElements());
}

@Benchmark
public void elasticsearchSearch(BenchmarkState state, Blackhole blackhole) {
SearchBenchmarkScenario.SearchQuery query = state.scenario.next();
public void elasticsearchCached(CachedSearchState state, Blackhole blackhole) {
SearchBenchmarkScenario.SearchQuery query = state.cacheHitQuery();
Page<PostSearchResult> result = state.postFinder.searchPosts(query.keyword(), query.pageable());
blackhole.consume(result.getTotalElements());
}

private static PostSearchRequest buildRequest(String keyword) {
return PostSearchRequest.builder()
.keyword(keyword)
.status(null)
.build();
@Benchmark
public void databaseSearch(DatabaseSearchState state, Blackhole blackhole) {
SearchBenchmarkScenario.SearchQuery query = state.nextQuery();
Page<?> result = state.postFinder.search(
AbstractBenchmarkState.buildRequest(query.keyword()),
query.pageable()
);
blackhole.consume(result.getTotalElements());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
package dooya.see.adapter.integration.cache;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import dooya.see.adapter.integration.cache.config.PostSearchCacheProperties;
import dooya.see.adapter.integration.cache.key.PostCacheKey;
import dooya.see.application.post.dto.PostSearchResult;
import dooya.see.application.post.required.PostSearchCacheRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;

import java.time.Duration;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.Set;

@Slf4j
@Component
@RequiredArgsConstructor
public class RedisPostSearchCacheRepository implements PostSearchCacheRepository {
private static final String SEARCH_KEY_PATTERN = "post:search:*";

private final RedisTemplate<String, String> redisTemplate;
private final ObjectMapper objectMapper;
private final PostSearchCacheProperties properties;

@Override
public Optional<Page<PostSearchResult>> find(String keyword, Pageable pageable) {
String key = buildKey(keyword, pageable);
String cached = redisTemplate.opsForValue().get(key);

if (cached == null) {
return Optional.empty();
}

try {
CachedPage cachedPage = objectMapper.readValue(cached, CachedPage.class);
return Optional.of(cachedPage.toPage(pageable));
} catch (JsonProcessingException e) {
log.warn("검색 캐시 역직렬화 실패: key={}", key, e);
redisTemplate.delete(key);
return Optional.empty();
}
}

@Override
public void save(String keyword, Pageable pageable, Page<PostSearchResult> page) {
String key = buildKey(keyword, pageable);
CachedPage cachedPage = CachedPage.from(page);

try {
String serialized = objectMapper.writeValueAsString(cachedPage);
Duration ttl = properties.getTtl();
if (ttl == null || ttl.isZero() || ttl.isNegative()) {
redisTemplate.opsForValue().set(key, serialized);
} else {
redisTemplate.opsForValue().set(key, serialized, ttl);
}
} catch (JsonProcessingException e) {
log.warn("검색 캐시 직렬화 실패: key={}", key, e);
}
}

@Override
public void evictAll() {
Set<String> keys = redisTemplate.keys(SEARCH_KEY_PATTERN);
if (keys == null || keys.isEmpty()) {
return;
}
redisTemplate.delete(keys);
}

private String buildKey(String keyword, Pageable pageable) {
String normalized = normalize(keyword);
return PostCacheKey.search(normalized, pageable.getPageNumber(), pageable.getPageSize());
}

private String normalize(String keyword) {
if (keyword == null) {
return "";
}
return keyword.trim().toLowerCase(Locale.ROOT);
}

private record CachedPage(
List<PostSearchResult> content,
long totalElements
) {
private Page<PostSearchResult> toPage(Pageable pageable) {
return new PageImpl<>(content, pageable, totalElements);
}

private static CachedPage from(Page<PostSearchResult> page) {
return new CachedPage(page.getContent(), page.getTotalElements());
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package dooya.see.adapter.integration.cache.config;

import org.springframework.boot.context.properties.ConfigurationProperties;

import java.time.Duration;

@ConfigurationProperties(prefix = "see.cache.post-search")
public class PostSearchCacheProperties {
/**
* 검색 결과 캐시 TTL.
*/
private Duration ttl = Duration.ofMinutes(3);

public Duration getTtl() {
return ttl;
}

public void setTtl(Duration ttl) {
this.ttl = ttl;
}
}
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
package dooya.see.adapter.integration.cache.config;

import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.StringRedisSerializer;

@Configuration
@EnableConfigurationProperties(PostSearchCacheProperties.class)
public class RedisConfig {
@Bean
RedisTemplate<String, String> redisTemplate(RedisConnectionFactory connectionFactory) {
Expand Down
Loading
Loading