Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .claude/settings.local.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"permissions": {
"allow": [
"Bash(git reset:*)",
"Bash(git add:*)",
"Bash(git commit:*)",
"Bash(rm:*)",
"Bash(git mv:*)"
],
"deny": [],
"ask": []
}
}
Original file line number Diff line number Diff line change
@@ -1,14 +1,21 @@
package com.techfork.domain.post.batch;

import com.techfork.domain.post.entity.Post;
import com.techfork.domain.post.repository.PostRepository;
import com.techfork.domain.post.entity.PostKeyword;
import com.techfork.global.util.JdbcBatchExecutor;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.Chunk;
import org.springframework.batch.item.ItemWriter;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;

/**
* 요약이 추가된 Post를 저장하는 Writer
*/
Expand All @@ -18,10 +25,85 @@
@RequiredArgsConstructor
public class PostSummaryWriter implements ItemWriter<Post> {

private final PostRepository postRepository;
private final JdbcBatchExecutor jdbcBatchExecutor;

@PersistenceContext
private EntityManager entityManager;

@Override
public void write(Chunk<? extends Post> chunk) {
postRepository.saveAll(chunk.getItems());
List<? extends Post> posts = chunk.getItems();
if (posts.isEmpty()) {
return;
}

updatePostSummaries(posts);
deleteOldKeywords(posts);
insertNewKeywords(posts);

log.info("PostSummaryWriter: {}개 게시글 처리 완료", posts.size());

entityManager.clear();
}

private void updatePostSummaries(List<? extends Post> posts) {
String sql = "UPDATE posts SET summary = ? WHERE id = ?";

@SuppressWarnings("unchecked")
List<Post> postList = (List<Post>) posts;

int totalUpdated = jdbcBatchExecutor.batchExecute(sql, postList, (ps, post, i) -> {
ps.setString(1, post.getSummary());
ps.setLong(2, post.getId());
});

log.debug("UPDATE posts: {}개 업데이트", totalUpdated);
}

private void deleteOldKeywords(List<? extends Post> posts) {
List<Long> postIds = posts.stream()
.map(Post::getId)
.collect(Collectors.toList());

if (postIds.isEmpty()) {
return;
}

String sql = "DELETE FROM post_keywords WHERE post_id = ?";

int deletedCount = jdbcBatchExecutor.batchExecute(sql, postIds, (ps, id, i) ->
ps.setLong(1, id)
);
log.debug("DELETE post_keywords: {}개 삭제", deletedCount);
}

private void insertNewKeywords(List<? extends Post> posts) {
// Post에서 모든 PostKeyword를 평탄화
List<KeywordInsertDto> keywordDtos = new ArrayList<>();
for (Post post : posts) {
for (PostKeyword keyword : post.getKeywords()) {
keywordDtos.add(new KeywordInsertDto(keyword.getKeyword(), post.getId()));
}
}

if (keywordDtos.isEmpty()) {
log.debug("INSERT post_keywords: 삽입할 키워드 없음");
return;
}

String sql = "INSERT INTO post_keywords (keyword, post_id) VALUES (?, ?)";

int inserted = jdbcBatchExecutor.batchExecute(sql, keywordDtos, (ps, dto, i) -> {
ps.setString(1, dto.keyword);
ps.setLong(2, dto.postId);
});

log.debug("INSERT post_keywords: {}개 삽입", inserted);
}

/**
* 키워드 삽입을 위한 DTO
*/
private record KeywordInsertDto(String keyword, Long postId) {
}
}
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.techfork.domain.source.batch;

import com.techfork.domain.post.entity.Post;
import com.techfork.global.util.JdbcBulkInsert;
import com.techfork.global.util.JdbcBatchExecutor;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.configuration.annotation.StepScope;
Expand All @@ -22,7 +22,7 @@
@RequiredArgsConstructor
public class PostBatchWriter implements ItemWriter<Post> {

private final JdbcBulkInsert jdbcBulkInsert;
private final JdbcBatchExecutor jdbcBatchExecutor;

private static final String INSERT_SQL = """
INSERT INTO posts
Expand All @@ -38,15 +38,15 @@ public void write(Chunk<? extends Post> chunk) {

List<? extends Post> items = chunk.getItems();

int inserted = jdbcBulkInsert.batchInsert(INSERT_SQL, items, (ps, post, i) -> {
int inserted = jdbcBatchExecutor.batchExecute(INSERT_SQL, items, (ps, post, i) -> {
ps.setString(1, post.getTitle());
ps.setString(2, post.getFullContent());
ps.setString(3, post.getPlainContent());
ps.setString(4, post.getCompany());
ps.setString(5, post.getUrl());
ps.setString(6, post.getLogoUrl());
ps.setTimestamp(7, JdbcBulkInsert.toTimestamp(post.getPublishedAt()));
ps.setTimestamp(8, JdbcBulkInsert.toTimestamp(post.getCrawledAt()));
ps.setTimestamp(7, JdbcBatchExecutor.toTimestamp(post.getPublishedAt()));
ps.setTimestamp(8, JdbcBatchExecutor.toTimestamp(post.getCrawledAt()));
ps.setLong(9, 0L);
ps.setLong(10, post.getTechBlog().getId());
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,26 +12,26 @@
import java.util.List;

/**
* JDBC Batch Insert를 위한 유틸리티 클래스
* JPA의 saveAll보다 훨씬 빠른 대량 삽입 제공
* JDBC Batch 실행을 위한 유틸리티 클래스
* INSERT, UPDATE, DELETE를 배치로 처리하여 성능 최적화
*/
@Slf4j
@Component
@RequiredArgsConstructor
public class JdbcBulkInsert {
public class JdbcBatchExecutor {

private final JdbcTemplate jdbcTemplate;

/**
* Batch Insert 실행
* Batch 쿼리 실행 (INSERT, UPDATE, DELETE 모두 지원)
*
* @param sql INSERT 쿼리 (PreparedStatement 형식)
* @param items 삽입할 데이터 리스트
* @param sql SQL 쿼리 (PreparedStatement 형식)
* @param items 처리할 데이터 리스트
* @param setter 각 항목에 대한 PreparedStatement 설정 로직
* @param <T> 데이터 타입
* @return 실제로 삽입된 행의 수
* @return 실제로 처리된 행의 수
*/
public <T> int batchInsert(String sql, List<T> items, BatchParameterSetter<T> setter) {
public <T> int batchExecute(String sql, List<T> items, BatchParameterSetter<T> setter) {
if (items == null || items.isEmpty()) {
return 0;
}
Expand All @@ -55,7 +55,7 @@ public int getBatchSize() {
}
}

log.debug("Bulk insert 완료: {}개 항목 중 {}개 삽입됨", items.size(), totalInserted);
log.debug("Batch operation 완료: {}개 항목 중 {}개 처리됨", items.size(), totalInserted);
return totalInserted;
}

Expand Down
Loading