์ค๋ ์งํํ ๋ด์ฉ๋ค ๐ง
์ฝ๋ ๋ฆฌํฉํ ๋งํ๊ธฐ
Redis์ MySQL ๋๊ธฐํ ๋ก์ง ๋ฆฌํฉํ ๋ง
๋๋ณด๊ธฐ
์์ ํ๊ธฐ ์ ๋ก์ง
@Component
@RequiredArgsConstructor
public class HistoryScheduler {
private final UserFindByService userFindByService;
private final HistoryRepository historyRepository;
private final RedisTemplate<String, String> redisTemplate;
/**
* Redis์ ์ ์ฅ๋ ๊ฒ์๋ฅผ ์ฃผ๊ธฐ์ ์ผ๋ก DB์ ์ ์ฅํ๋ ์ค์ผ์ค๋ฌ ๋ฉ์๋์
๋๋ค.
*
* 30๋ถ๋ง๋ค ์คํ์ด ๋๋ฉฐ, ๊ฐ ์ฌ์ฉ์์ ๊ฒ์์ด๋ฅผ Redis์์ ๊ฐ์ ธ์์ DB์ ์ ์ฅํฉ๋๋ค.
* ์ด๋ฏธ ์ ์ฅ๋ ๊ฒ์์ด๋ ์ค๋ณต ์ ์ฅํ์ง ์์ต๋๋ค.
*/
@Scheduled(fixedRate = 1800000) // 30๋ถ๋ง๋ค ์คํ
public void saveSearchHistoryToDb() {
Set<String> keys = redisTemplate.keys("user:*:search_history");
if (keys != null) {
for (String key : keys) {
Long userId = Long.parseLong(key.split(":")[1]);
User user = userFindByService.findById(userId);
Set<String> searchTerms = redisTemplate.opsForZSet().range(key, 0, -1);
if (searchTerms != null) {
searchTerms.forEach(term -> {
if (!historyRepository.existsByUserIdAndName(userId, term)) {
historyRepository.save(History.toEntity(user, term));
}
});
}
}
}
}
}
์์ ํ ํ ๋ก์ง
@Component
@RequiredArgsConstructor
public class HistoryScheduler {
private final UserFindByService userFindByService;
private final HistoryRepository historyRepository;
private final RedisTemplate<String, String> redisTemplate;
/**
* Redis์ ์ ์ฅ๋ ๊ฒ์๋ฅผ ์ฃผ๊ธฐ์ ์ผ๋ก DB์ ์ ์ฅํ๋ ์ค์ผ์ค๋ฌ ๋ฉ์๋์
๋๋ค.
*
* 30๋ถ๋ง๋ค ์คํ์ด ๋๋ฉฐ, ๊ฐ ์ฌ์ฉ์์ ๊ฒ์์ด๋ฅผ Redis์์ ๊ฐ์ ธ์์ DB์ ์ ์ฅํฉ๋๋ค.
* ์ด๋ฏธ ์ ์ฅ๋ ๊ฒ์์ด๋ ์ค๋ณต ์ ์ฅํ์ง ์์ต๋๋ค.
*/
@Scheduled(fixedRate = 1800000) // 30๋ถ๋ง๋ค ์คํ
public void saveSearchHistoryToDb() {
Set<String> keys = redisTemplate.keys("user:*:search_history");
if (keys != null) {
for (String key : keys) {
Long userId = Long.parseLong(key.split(":")[1]);
User user = userFindByService.findById(userId);
Set<String> searchTerms = redisTemplate.opsForZSet().range(key, 0, -1);
if (searchTerms != null && !searchTerms.isEmpty()) {
// ํด๋น ์ ์ ์ ๊ธฐ์กด ๊ฒ์์ด ์กฐํ
Set<String> existingSearchTermSet = historyRepository.findNamesByUserId(userId);
// ์ค๋ณต๋์ง ์์ ๊ฒ์์ด ํํฐ๋ง
List<History> historyList = searchTerms.stream()
.filter(term -> !existingSearchTermSet.contains(term))
.map(term -> History.toEntity(user, term))
.toList();
// ์๋ก์ด ๊ฒ์์ด๊ฐ ์๋ค๋ฉด ์ ์ฅ
if (!historyList.isEmpty()) {
historyRepository.saveAll(historyList);
}
}
}
}
}
}
Elasticsearch ๊ด๋ จ ์ฝ๋ ๋ฆฌํฉํ ๋ง
๋๋ณด๊ธฐ
์์ ํ๊ธฐ ์ ๋ก์ง
/**
* ํํฐ๋ง๋ ์ฑ์ฉ๊ณต๊ณ ์กฐํ
*/
@Transactional
public Page<ReadJobOpeningElasticResponseDto> readJobOpeningUsingElasticSearchFilter(
ReadJobOpeningElasticRequestDto requestDto,
Long userId,
Pageable pageable
) {
if (requestDto.getSearchTerm() != null) {
historyService.saveSearchTerm(userId, requestDto.getSearchTerm());
}
JobOpeningDocumentFilter filter = new JobOpeningDocumentFilter(requestDto);
var boolQueryBuilder = filter.build();
int pageSize = pageable.getPageSize();
int pageNumber = pageable.getPageNumber();
int from = calculateFrom(pageNumber, pageSize, IndexName.MAX_JOP_OPENING_SIZE);
SearchRequest searchRequest = new SearchRequest.Builder()
.index(IndexName.JOB_OPENING_DOCUMENT)
.query(q -> q.bool(boolQueryBuilder.build()))
.sort(s -> s.field(f -> f.field(IndexName.CREATED_AT).order(SortOrder.Desc)))
.from(from)
.size(pageSize)
.build();
List<JobOpeningDocument> jobOpeningDocuments = elasticsearchClientService.fetchJobOpeningDocumentList(searchRequest);
List<ReadJobOpeningElasticResponseDto> dtoList = ReadJobOpeningElasticResponseDto.toDto(jobOpeningDocuments);
return new PageImpl<>(dtoList, pageable, IndexName.MAX_JOP_OPENING_SIZE);
}
์์ ํ ํ ๋ก์ง
/**
* ํํฐ๋ง๋ ์ฑ์ฉ๊ณต๊ณ ์กฐํ
*/
@Transactional
public Page<ReadJobOpeningElasticResponseDto> readJobOpeningUsingElasticSearchFilter(
ReadJobOpeningElasticRequestDto requestDto,
Long userId,
Pageable pageable
) {
if (requestDto.getSearchTerm() != null) {
historyService.saveSearchTerm(userId, requestDto.getSearchTerm());
}
int pageSize = pageable.getPageSize();
int pageNumber = pageable.getPageNumber();
int from = calculateFrom(pageNumber, pageSize, IndexName.MAX_JOP_OPENING_SIZE);
List<JobOpeningDocument> jobOpeningDocumentList = jobOpeningDocumentRepository.searchJobOpeningWithFilter(requestDto, pageSize, from);
List<ReadJobOpeningElasticResponseDto> dtoList = ReadJobOpeningElasticResponseDto.toDto(jobOpeningDocumentList);
return new PageImpl<>(dtoList, pageable, IndexName.MAX_JOP_OPENING_SIZE);
}
public interface JobOpeningDocumentCustomRepository {
List<JobOpeningDocument> searchJobOpeningWithFilter(ReadJobOpeningElasticRequestDto requestDto, int pageSize, int from);
}
@Repository
@RequiredArgsConstructor
public class JobOpeningDocumentRepositoryImpl implements JobOpeningDocumentCustomRepository{
private final ElasticsearchClientService elasticsearchClientService;
@Override
public List<JobOpeningDocument> searchJobOpeningWithFilter(ReadJobOpeningElasticRequestDto requestDto, int pageSize, int from) {
JobOpeningDocumentFilter jobOpeningDocumentFilter = new JobOpeningDocumentFilter(requestDto);
BoolQuery.Builder builder = jobOpeningDocumentFilter.build();
SearchRequest searchRequest = new SearchRequest.Builder()
.index(IndexName.JOB_OPENING_DOCUMENT)
.query(q -> q.bool(builder.build()))
.sort(s -> s
.field(f -> f.
field(IndexName.CREATED_AT).
order(SortOrder.Desc)))
.from(from)
.size(pageSize)
.build();
return elasticsearchClientService.fetchJobOpeningDocumentList(searchRequest);
}
}
public interface JobOpeningDocumentRepository extends ElasticsearchRepository<JobOpeningDocument, String> {
}
Elasticsearch ์๋ ์์ฑ ๊ธฐ๋ฅ ๊ณต๋ถํ๊ธฐ
๋ฐฉ๋ฒ 1: @Setting์ ์ด์ฉํ์ฌ Nori/Ngram ์ ์ฉํ๊ธฐ
Spring Data Elasticsearch์์ @Setting์ ์ฌ์ฉํ๋ฉด ๋งคํ์ JSON ํ์ผ๋ก ๋ถ๋ฆฌํ ์ ์๋ค.
@Setting์ ์ ์ฉํ JobOpeningDocument
import org.springframework.data.elasticsearch.annotations.Setting;
@Document(indexName = IndexName.JOB_OPENING_DOCUMENT)
@Setting(settingPath = "/elasticsearch/job-opening-settings.json") // JSON ํ์ผ์ ์ค์
public class JobOpeningDocument {
@Id
@Field(type = FieldType.Keyword)
private String id;
@Field(type = FieldType.Text, analyzer = "nori_analyzer", searchAnalyzer = "nori_analyzer")
private String title; // ํ๊ธ ํํ์ ๋ถ์ ์ ์ฉ
@Field(type = FieldType.Text, analyzer = "ngram_analyzer", searchAnalyzer = "standard")
private String company; // N-gram ๊ธฐ๋ฐ ๊ฒ์
@Field(type = FieldType.Text, analyzer = "nori_analyzer")
private String position; // ์ง๋ฌด ๊ฒ์์๋ ํํ์ ๋ถ์ ์ ์ฉ
@Field(type = FieldType.Keyword)
private String location;
@Field(type = FieldType.Integer)
private int salary;
}
ํต์ฌ ํฌ์ธํธ
- @Setting(settingPath = "/elasticsearch/job-opening-settings.json")๋ฅผ ์ฌ์ฉํด ๋งคํ์ JSON์ผ๋ก ๋ถ๋ฆฌ.
- analyzer = "nori_analyzer" → ํ๊ธ ํํ์ ๋ถ์๊ธฐ ์ ์ฉ
- analyzer = "ngram_analyzer" → n-gram ๊ฒ์ ์ ์ฉ (ํ์ฌ๋ช ๊ฒ์์ฉ)
job-opening-settings.json (Elasticsearch ์ธ๋ฑ์ค ์ค์ )
src/main/resources/elasticsearch/job-opening-settings.json ํ์ผ ์์ฑ ํ ์๋ ๋ด์ฉ์ ์ถ๊ฐ.
{
"analysis": {
"tokenizer": {
"nori_tokenizer": {
"type": "nori_tokenizer"
},
"ngram_tokenizer": {
"type": "ngram",
"min_gram": 2,
"max_gram": 5,
"token_chars": ["letter", "digit"]
}
},
"analyzer": {
"nori_analyzer": {
"type": "custom",
"tokenizer": "nori_tokenizer"
},
"ngram_analyzer": {
"type": "custom",
"tokenizer": "ngram_tokenizer",
"filter": ["lowercase"]
}
}
}
}
์ค์ ์ค๋ช
- nori_analyzer → ํ๊ธ ํํ์ ๋ถ์ (nori_tokenizer ์ฌ์ฉ)
- ngram_analyzer → n-gram ๋ถ์ (2~5๊ธ์ ๋จ์๋ก ๊ฒ์ ๊ฐ๋ฅ)
๋ด์ผ ๊ณํ โฐ
- Elasticsearch ์๋ ์์ฑ ๊ธฐ๋ฅ ๊ณต๋ถํ๊ธฐ
- Elasticsearch ์๋ ์์ฑ ๊ธฐ๋ฅ ๊ตฌํํ๊ธฐ
+ ์ถ๊ฐ ๊ณํ์ด ์๊ธธ ์๋ ์์ต๋๋ค ~_~
'โ๏ธ Today I Learned(TIL) > ์คํ๋ฅดํ ๋ด์ผ๋ฐฐ์์บ ํ' ์นดํ ๊ณ ๋ฆฌ์ ๋ค๋ฅธ ๊ธ
[ TIL ] ์ต์ข ํ๋ก์ ํธ_Day24 (1) | 2025.03.08 |
---|---|
[ TIL ] ์ต์ข ํ๋ก์ ํธ_Day 23 (1) | 2025.03.06 |
[ TIL ] ์ต์ข ํ๋ก์ ํธ_Day 18 (0) | 2025.03.01 |
[ TIL ] ์ต์ข ํ๋ก์ ํธ_Day 17 (0) | 2025.02.27 |
[ TIL ] ์ต์ข ํ๋ก์ ํธ_Day 16 (0) | 2025.02.27 |