Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
6d82001
remover: ์˜๋ฏธ ์—†๋Š” ํŒŒ์ผ ์‚ญ์ œ
yongho9064 Oct 2, 2025
5da0493
feat[setting]: yml ํŒŒ์ผ ์ˆ˜์ •
yongho9064 Oct 2, 2025
4447865
config[ai]: ์ž„๋ฒ ๋”ฉ ๋ชจ๋ธ ์„ค์ •
yongho9064 Oct 2, 2025
b5739fb
feat[chat]: ํ‚ค์›Œ๋“œ ์ถ”์ถœ ์ˆ˜์ •
yongho9064 Oct 2, 2025
39be582
dependencies: ์Šคํ”„๋ง ๋ฐฐ์น˜, ์˜ฌ๋ฆฌ๋งˆ ์˜์กด์„ฑ ์ถ”๊ฐ€
yongho9064 Oct 2, 2025
3957b82
sql: ์ฟผ๋ฆฌ๋ฌธ ์ œ๊ฑฐ
yongho9064 Oct 2, 2025
d6ace96
config[db]: ๋‹ค์ค‘ DB ์„ค์ •
yongho9064 Oct 2, 2025
df4e2e7
docker: ollama ์ถ”๊ฐ€
yongho9064 Oct 2, 2025
b3ee3b4
feat: qdrant ์ •ํ™•์„ฑ ์ˆ˜์ •
yongho9064 Oct 2, 2025
26794b1
feat[batch]: ์Šคํ”„๋ง ๋ฐฐ์น˜ ์ ์šฉ -> ์ถ”ํ›„ ์‚ฌ์šฉ
yongho9064 Oct 2, 2025
89ca0de
chore[env]: envํŒŒ์ผ ์ •๋ฆฌ
DooHyoJeong Oct 2, 2025
116d5ef
refactor[cicd]: env ํŒŒ์ผ ์ •๋ฆฌ
DooHyoJeong Oct 2, 2025
b0f5b19
Merge remote-tracking branch 'origin/develop' into develop
yongho9064 Oct 2, 2025
95ebd95
chore[deploy]: instance id ๋ณ€๊ฒฝ
DooHyoJeong Oct 2, 2025
1c75b5a
Merge pull request #196 from prgrms-web-devcourse-final-project/choreโ€ฆ
DooHyoJeong Oct 2, 2025
d58f3d9
remover: ์˜๋ฏธ ์—†๋Š” ํŒŒ์ผ ์‚ญ์ œ
yongho9064 Oct 2, 2025
982d189
feat[setting]: yml ํŒŒ์ผ ์ˆ˜์ •
yongho9064 Oct 2, 2025
ed6100b
config[ai]: ์ž„๋ฒ ๋”ฉ ๋ชจ๋ธ ์„ค์ •
yongho9064 Oct 2, 2025
53d39f9
feat[chat]: ํ‚ค์›Œ๋“œ ์ถ”์ถœ ์ˆ˜์ •
yongho9064 Oct 2, 2025
5ccf6ad
dependencies: ์Šคํ”„๋ง ๋ฐฐ์น˜, ์˜ฌ๋ฆฌ๋งˆ ์˜์กด์„ฑ ์ถ”๊ฐ€
yongho9064 Oct 2, 2025
509fd62
sql: ์ฟผ๋ฆฌ๋ฌธ ์ œ๊ฑฐ
yongho9064 Oct 2, 2025
619eb10
config[db]: ๋‹ค์ค‘ DB ์„ค์ •
yongho9064 Oct 2, 2025
8803cc3
docker: ollama ์ถ”๊ฐ€
yongho9064 Oct 2, 2025
bb10d6e
feat: qdrant ์ •ํ™•์„ฑ ์ˆ˜์ •
yongho9064 Oct 2, 2025
b069a0d
feat[batch]: ์Šคํ”„๋ง ๋ฐฐ์น˜ ์ ์šฉ -> ์ถ”ํ›„ ์‚ฌ์šฉ
yongho9064 Oct 2, 2025
0c094d8
Merge remote-tracking branch 'origin/feat/chat' into feat/chat
yongho9064 Oct 2, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/CI-CD_Pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ jobs:
aws-region: ${{ secrets.AWS_REGION }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
instance-ids: "i-0492b8e4ad48a0586"
instance-ids: "i-03509f63569ddb509"
working-directory: /
comment: Deploy
command: |
Expand Down
3 changes: 3 additions & 0 deletions backend/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-actuator'
implementation 'org.springframework.boot:spring-boot-starter-oauth2-client'
implementation group: 'org.springframework.boot', name: 'spring-boot-starter-mail', version: '3.0.5'
implementation 'org.springframework.boot:spring-boot-starter-batch'

// API Documentation (๋ฌธ์„œํ™”)
implementation 'org.apache.commons:commons-lang3:3.18.0'
Expand Down Expand Up @@ -78,6 +79,8 @@ dependencies {
implementation 'org.springframework.ai:spring-ai-starter-model-openai'
implementation 'org.springframework.ai:spring-ai-advisors-vector-store'
implementation 'org.springframework.ai:spring-ai-starter-model-chat-memory-repository-jdbc'
implementation 'org.springframework.ai:spring-ai-starter-model-ollama'
implementation 'org.springframework.ai:spring-ai-starter-model-huggingface'

// Testing (ํ…Œ์ŠคํŠธ)
testImplementation 'org.springframework.boot:spring-boot-starter-test'
Expand Down
23 changes: 22 additions & 1 deletion backend/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,28 @@ services:
timeout: 5s
retries: 10

ollama:
image: ollama/ollama:latest
container_name: ollama
restart: unless-stopped
ports:
- "11434:11434"
volumes:
- ollama-data:/root/.ollama
entrypoint: [ "/bin/sh", "-c" ]
command: >
"ollama serve &
sleep 5 &&
ollama pull daynice/kure-v1:567m &&
wait"
healthcheck:
test: [ "CMD", "curl", "-f", "http://localhost:11434/api/version" ]
interval: 10s
timeout: 5s
retries: 10

volumes:
mysql-data:
redis-data:
qdrant-data:
qdrant-data:
ollama-data:
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
import lombok.Data;
import lombok.NoArgsConstructor;

import java.util.List;

public class ExtractionDto {

@Data
Expand All @@ -19,7 +17,7 @@ public static class TitleExtractionDto {
@AllArgsConstructor
@NoArgsConstructor
public static class KeywordExtractionDto {
private List<String> keyword;
private String keyword;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,13 @@ public class ChatBotService {
// ๋ฉค๋ฒ„ ์กฐํšŒ -> ๋ฒกํ„ฐ ๊ฒ€์ƒ‰ (ํŒ๋ก€, ๋ฒ•๋ น) -> ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ (์‹œ์Šคํ…œ, ์œ ์ €) -> ์ฑ„ํŒ… ํด๋ผ์ด์–ธํŠธ ํ˜ธ์ถœ (์ŠคํŠธ๋ฆผ) -> ์‘๋‹ต ์ €์žฅ, ์ œ๋ชฉ/ํ‚ค์›Œ๋“œ ์ถ”์ถœ
public Flux<ChatResponse> sendMessage(Long memberId, ChatRequest chatChatRequestDto, Long roomId) {

if(memberId == null) {
log.error("ํ•ด๋‹น ๋ฉค๋ฒ„๋Š” ์กด์žฌํ•˜์ง€ ์•Š๊ฑฐ๋‚˜, accessToken์ด ๋งŒ๋ฃŒ๋˜๊ฑฐ๋‚˜ ์ž˜๋ชป๋˜์—ˆ์Šต๋‹ˆ๋‹ค.");
}

Member member = memberRepository.findById(memberId)
.orElseThrow(() -> new IllegalArgumentException("์กด์žฌํ•˜์ง€ ์•Š๋Š” ํšŒ์›์ž…๋‹ˆ๋‹ค.")
);

// ๋ฒกํ„ฐ ๊ฒ€์ƒ‰ (ํŒ๋ก€, ๋ฒ•๋ น)
List<Document> similarCaseDocuments = qdrantService.searchDocument(chatChatRequestDto.getMessage(), "type", "ํŒ๋ก€", 3);
List<Document> similarLawDocuments = qdrantService.searchDocument(chatChatRequestDto.getMessage(), "type", "๋ฒ•๋ น", 2);
List<Document> similarCaseDocuments = qdrantService.searchDocument(chatChatRequestDto.getMessage(), "type", "ํŒ๋ก€");
List<Document> similarLawDocuments = qdrantService.searchDocument(chatChatRequestDto.getMessage(), "type", "๋ฒ•๋ น");

// ํŒ๋ก€์™€ ๋ฒ•๋ น ์ •๋ณด๋ฅผ ๊ตฌ๋ถ„ ์žˆ๊ฒŒ ํฌ๋งทํŒ…
String caseContext = formatting(similarCaseDocuments);
Expand Down Expand Up @@ -167,18 +163,19 @@ private void handlerTasks(ChatRequest chatDto, History history, String fullRespo
private void extractAndUpdateKeywordRanks(String message) {
KeywordExtractionDto keywordResponse = keywordExtract(message, keywordExtraction, KeywordExtractionDto.class);

for (String keyword : keywordResponse.getKeyword()) {
KeywordRank keywordRank = keywordRankRepository.findByKeyword(keyword);
if (keywordRank == null) {
keywordRank = KeywordRank.builder()
.keyword(keyword)
.score(1L)
.build();
} else {
keywordRank.setScore(keywordRank.getScore() + 1);
}
keywordRankRepository.save(keywordRank);
KeywordRank keywordRank = keywordRankRepository.findByKeyword(keywordResponse.getKeyword());

if (keywordRank == null) {
keywordRank = KeywordRank.builder()
.keyword(keywordResponse.getKeyword())
.score(1L)
.build();
} else {
keywordRank.setScore(keywordRank.getScore() + 1);
}

keywordRankRepository.save(keywordRank);

}

private void setHistoryTitle(ChatRequest chatDto, History history, String fullResponse) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package com.ai.lawyer.global.batch;

/*@Slf4j
@Component
@EnableScheduling
@RequiredArgsConstructor
public class BatchScheduler {

private final JobLauncher jobLauncher;
private final Job dataVectorizationJob;

@Scheduled(cron = "#{${batch.scheduler.run-every-minute} ? '* * * * * *' : '* * 2 * * *'}")
public void runVectorizationJob() {
log.info("์ „์ฒด ๋ฐ์ดํ„ฐ(ํŒ๋ก€, ๋ฒ•๋ น) ๋ฒกํ„ฐํ™” ์Šค์ผ€์ค„๋Ÿฌ ์‹คํ–‰...");
try {
JobParameters jobParameters = new JobParametersBuilder()
.addString("requestDate", LocalDateTime.now().toString())
.toJobParameters();

jobLauncher.run(dataVectorizationJob, jobParameters); // Job ์‹คํ–‰
} catch (Exception e) {
log.error("์ „์ฒด ๋ฐ์ดํ„ฐ ๋ฒกํ„ฐํ™” ๋ฐฐ์น˜ ์ž‘์—… ์‹คํ–‰ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ", e);
}
}
}*/
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
package com.ai.lawyer.global.batch;

/*@Slf4j
@Configuration
@RequiredArgsConstructor
public class DataVectorizationJobConfig {

private final JobRepository jobRepository;
private final PlatformTransactionManager transactionManager;
private final EntityManagerFactory entityManagerFactory;
private final VectorStore vectorStore;

private final JangRepository jangRepository;
private final JoRepository joRepository;
private final HangRepository hangRepository;
private final HoRepository hoRepository;

private final TokenTextSplitter tokenSplitter = TokenTextSplitter.builder()
.withChunkSize(800)
.withMinChunkSizeChars(0)
.withMinChunkLengthToEmbed(5)
.withMaxNumChunks(10000)
.withKeepSeparator(true)
.build();

private static final int CHUNK_SIZE = 10; // ๋ฐฐ์น˜ ์ฒ˜๋ฆฌ ์‹œ ํ•œ ๋ฒˆ์— ์ฝ์–ด์˜ฌ ๋ฐ์ดํ„ฐ ์ˆ˜

@Value("${batch.page.size.precedent}")
private int precedentPageSize; // ํ•˜๋ฃจ์— ์ฒ˜๋ฆฌํ•  ํŒ๋ก€ ์ˆ˜

@Value("${batch.page.size.law}")
private int lawPageSize; // ํ•˜๋ฃจ์— ์ฒ˜๋ฆฌํ•  ๋ฒ•๋ น ์ˆ˜

@Bean
public TaskExecutor taskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(10);
executor.setMaxPoolSize(20);
executor.setQueueCapacity(100);
executor.setThreadNamePrefix("async-thread-");
executor.initialize();
return executor;
}

// -------------- ์ „์ฒด ๋ฐ์ดํ„ฐ ๋ฒกํ„ฐํ™” ์ •์˜ --------------
@Bean
public Job dataVectorizationJob() {
return new JobBuilder("dataVectorizationJob", jobRepository)
.start(precedentVectorizationStep()) // ํŒ๋ก€ ๋ฒกํ„ฐํ™” Step ์‹คํ–‰
.next(lawVectorizationStep()) // ๋ฒ•๋ น ๋ฒกํ„ฐํ™” Step ์‹คํ–‰
.build();
}

// -------------- ํŒ๋ก€ ๋ฒกํ„ฐํ™” ---------------
@Bean
public Step precedentVectorizationStep() {
log.info(">>>>>> ํŒ๋ก€ ๋ฒกํ„ฐํ™” ์‹œ์ž‘");
return new StepBuilder("precedentVectorizationStep", jobRepository)
.<Precedent, List<Document>>chunk(CHUNK_SIZE, transactionManager)
.reader(precedentItemReader())
.processor(precedentItemProcessor())
.writer(documentItemWriter())
.taskExecutor(taskExecutor())
.build();
}

@Bean
public JpaPagingItemReader<Precedent> precedentItemReader() {
return new JpaPagingItemReaderBuilder<Precedent>()
.name("precedentItemReader")
.entityManagerFactory(entityManagerFactory)
.pageSize(CHUNK_SIZE)
.maxItemCount(precedentPageSize)
.queryString("SELECT p FROM Precedent p ORDER BY p.id ASC")
.build();
}

@Bean
public ItemProcessor<Precedent, List<Document>> precedentItemProcessor() {

return precedent -> {
String content = precedent.getPrecedentContent();
if (content == null || content.isBlank()) return null;

Document originalDoc = new Document(content, Map.of(
"type", "ํŒ๋ก€",
"caseNumber", precedent.getCaseNumber(),
"court", precedent.getCourtName(),
"caseName", precedent.getCaseName()
));

List<Document> chunkDocs = tokenSplitter.split(originalDoc);
List<Document> finalChunks = new ArrayList<>();

// ์ฒญํฌ๋ณ„๋กœ ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ์— ์ธ๋ฑ์Šค ์ถ”๊ฐ€ -> ๊ตฌ๋ถ„ ์šฉ๋„
for (int i = 0; i < chunkDocs.size(); i++) {
Document chunk = chunkDocs.get(i);
Map<String, Object> newMetadata = new HashMap<>(chunk.getMetadata());
newMetadata.put("chunkIndex", i);
finalChunks.add(new Document(chunk.getText(), newMetadata));
}
return finalChunks;
};
}

// -------------- ๋ฒ•๋ น ๋ฐฑํ„ฐํ™” ---------------
@Bean
public Step lawVectorizationStep() {
log.info(">>>>>> ๋ฒ•๋ น ๋ฒกํ„ฐํ™” ์‹œ์ž‘");
return new StepBuilder("lawVectorizationStep", jobRepository)
.<Law, List<Document>>chunk(CHUNK_SIZE, transactionManager) // ๋ฒ•๋ น์€ ํ•œ ๋ฒˆ์— 10๊ฐœ์”ฉ ์ฒ˜๋ฆฌ
.reader(lawItemReader())
.processor(lawItemProcessor())
.writer(documentItemWriter())
.taskExecutor(taskExecutor())
.build();
}

@Bean
public JpaPagingItemReader<Law> lawItemReader() {
return new JpaPagingItemReaderBuilder<Law>()
.name("lawItemReader")
.entityManagerFactory(entityManagerFactory)
.pageSize(CHUNK_SIZE)
.maxItemCount(lawPageSize)
.queryString("SELECT l FROM Law l ORDER BY l.id ASC")
.build();
}

@Bean
public ItemProcessor<Law, List<Document>> lawItemProcessor() {
return law -> {
List<Document> finalChunks = new ArrayList<>();

List<Jang> jangs = jangRepository.findByLaw(law);

for (Jang jang : jangs) {

StringBuilder contentBuilder = new StringBuilder();

contentBuilder.append(law.getLawName()).append("\n");

if (jang.getContent() != null && !jang.getContent().isBlank()) {
contentBuilder.append(jang.getContent()).append("\n");
}

List<Jo> jos = joRepository.findByJang(jang);
for (Jo jo : jos) {

if (jo.getContent() != null && !jo.getContent().isBlank()) {
contentBuilder.append(jo.getContent()).append("\n");
}

List<Hang> hangs = hangRepository.findByJo(jo);
for (Hang hang : hangs) {
if (hang.getContent() != null && !hang.getContent().isBlank()) {
contentBuilder.append(hang.getContent()).append("\n");
}

List<Ho> hos = hoRepository.findByHang(hang);
for (Ho ho : hos) {
if (ho.getContent() != null && !ho.getContent().isBlank()) {
contentBuilder.append(ho.getContent()).append("\n");
}
}
}
}

// === Jang ๋‹จ์œ„๋กœ ๋ฌธ์„œํ™” ===
String finalContent = contentBuilder.toString();

if (!finalContent.isBlank()) {
Map<String, Object> metadata = new HashMap<>();
metadata.put("type", "๋ฒ•๋ น");
metadata.put("lawName", law.getLawName());
metadata.put("jangId", jang.getId());

Document originalDoc = new Document(finalContent, metadata);

List<Document> chunkDocs = tokenSplitter.split(originalDoc);

for (int i = 0; i < chunkDocs.size(); i++) {
Document chunk = chunkDocs.get(i);
Map<String, Object> newMetadata = new HashMap<>(chunk.getMetadata());
newMetadata.put("chunkIndex", i);
finalChunks.add(new Document(chunk.getText(), newMetadata));
}
}
}

return finalChunks.isEmpty() ? null : finalChunks;
};
}

@Bean
public ItemWriter<List<Document>> documentItemWriter() {
return chunk -> {
List<Document> totalDocuments = chunk.getItems().stream()
.flatMap(List::stream)
.collect(Collectors.toList());

if (!totalDocuments.isEmpty()) {
vectorStore.add(totalDocuments);
log.info(">>>>>> {}๊ฐœ์˜ Document ์ฒญํฌ๋ฅผ ๋ฒกํ„ฐ ์ €์žฅ์†Œ์— ์ €์žฅํ–ˆ์Šต๋‹ˆ๋‹ค.", totalDocuments.size());
}
};
}
}*/
15 changes: 9 additions & 6 deletions backend/src/main/java/com/ai/lawyer/global/config/AIConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,24 @@
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.memory.ChatMemoryRepository;
import org.springframework.ai.chat.memory.repository.jdbc.JdbcChatMemoryRepository;
import org.springframework.ai.embedding.EmbeddingModel;
import org.springframework.ai.ollama.OllamaEmbeddingModel;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.PlatformTransactionManager;

@Configuration
public class AIConfig {

@Bean
@Primary
public EmbeddingModel primaryOllamaEmbeddingModel(OllamaEmbeddingModel ollamaEmbeddingModel) {
return ollamaEmbeddingModel;
}

@Bean
public ChatMemoryRepository chatMemoryRepository(JdbcTemplate jdbcTemplate, PlatformTransactionManager transactionManager) {
return JdbcChatMemoryRepository.builder()
Expand All @@ -26,9 +34,4 @@ public ChatClient openAiChatClient(OpenAiChatModel openAiChatModel) {
return ChatClient.create(openAiChatModel);
}

@Bean
public TokenTextSplitter tokenTextSplitter() {
return new TokenTextSplitter(500, 150, 5, 10000, true);
}

}
Loading
Loading