Skip to content

Commit 368c486

Browse files
authored
Merge pull request #27 from dkimds/main
통신을 GET에서 POST로 변경
2 parents 90efef7 + ea663f9 commit 368c486

10 files changed

Lines changed: 827 additions & 460 deletions

File tree

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,4 @@ mlartifacts
1111
mlruns
1212
*.ipynb
1313
/.idea
14+
vllm/

chains/__init__.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
"""
2+
AI Chain modules for different conversation types and routing.
3+
"""
4+
5+
from .classification import ClassificationChain
6+
from .news import NewsChain
7+
from .finance import FinanceChain
8+
from .general import GeneralChain
9+
from .reset import ResetChain
10+
from .router import ChainRouter
11+
12+
__all__ = [
13+
'ClassificationChain',
14+
'NewsChain',
15+
'FinanceChain',
16+
'GeneralChain',
17+
'ResetChain',
18+
'ChainRouter'
19+
]

chains/classification.py

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
"""
2+
Classification chain for determining the type of user query.
3+
"""
4+
5+
from langchain_openai import ChatOpenAI
6+
from langchain_core.output_parsers import StrOutputParser
7+
from langchain_core.prompts import PromptTemplate
8+
from langchain_core.runnables import RunnableLambda
9+
from typing import Dict, Any
10+
11+
12+
class ClassificationChain:
13+
"""
14+
Classifies user questions into categories: 최신소식, 전문지식, 리셋, or 기타
15+
"""
16+
17+
def __init__(self, model: str = "gpt-4o-mini"):
18+
"""
19+
Initialize the classification chain.
20+
21+
Args:
22+
model: OpenAI model to use for classification
23+
"""
24+
self.model = model
25+
self._setup_chain()
26+
27+
def _setup_chain(self):
28+
"""Set up the classification chain with prompt and model."""
29+
30+
# Classification prompt template
31+
self.prompt = PromptTemplate.from_template(
32+
"""주어진 사용자 질문과 대화 히스토리를 보고 `최신소식`, `전문지식`, `리셋`, 또는 `기타` 중 하나로 분류하세요.
33+
리셋 관련 키워드: "리셋", "초기화", "지워", "새로시작", "reset", "clear" 등
34+
한 단어 이상으로 응답하지 마세요.
35+
36+
<chat_history>
37+
{chat_history}
38+
</chat_history>
39+
40+
<question>
41+
{question}
42+
</question>
43+
44+
Classification:"""
45+
)
46+
47+
# Create the chain
48+
self.chain = (
49+
self.prompt
50+
| ChatOpenAI(model=self.model)
51+
| StrOutputParser()
52+
)
53+
54+
def classify(self, question: str, chat_history: str = "") -> str:
55+
"""
56+
Classify a user question.
57+
58+
Args:
59+
question: The user's question
60+
chat_history: Previous conversation history
61+
62+
Returns:
63+
Classification result as string
64+
"""
65+
return self.chain.invoke({
66+
"question": question,
67+
"chat_history": chat_history
68+
})
69+
70+
def invoke(self, inputs: Dict[str, Any]) -> str:
71+
"""
72+
Invoke the classification chain with inputs.
73+
74+
Args:
75+
inputs: Dictionary containing 'question' and 'chat_history'
76+
77+
Returns:
78+
Classification result as string
79+
"""
80+
return self.chain.invoke(inputs)
81+
82+
async def ainvoke(self, inputs: Dict[str, Any]) -> str:
83+
"""
84+
Asynchronously invoke the classification chain.
85+
86+
Args:
87+
inputs: Dictionary containing 'question' and 'chat_history'
88+
89+
Returns:
90+
Classification result as string
91+
"""
92+
return await self.chain.ainvoke(inputs)
93+
94+
def get_chain(self):
95+
"""Get the underlying chain object."""
96+
return self.chain

chains/finance.py

Lines changed: 162 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,162 @@
1+
"""
2+
Finance chain for handling financial expert knowledge queries.
3+
"""
4+
5+
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
6+
from langchain_core.output_parsers import StrOutputParser
7+
from langchain_core.prompts import PromptTemplate
8+
from langchain_core.runnables import RunnablePassthrough, RunnableLambda
9+
from langchain_milvus import Milvus
10+
from typing import Dict, Any, Optional
11+
import os
12+
13+
14+
class FinanceChain:
15+
"""
16+
Handles queries about financial expert knowledge using vector database search.
17+
"""
18+
19+
def __init__(self, model: str = "gpt-4o-mini", search_k: int = 3):
20+
"""
21+
Initialize the finance chain.
22+
23+
Args:
24+
model: OpenAI model to use for response generation
25+
search_k: Number of search results to retrieve from vector DB
26+
"""
27+
self.model = model
28+
self.search_k = search_k
29+
self.use_milvus = False
30+
self.vectorstore = None
31+
self._setup_milvus()
32+
self._setup_chain()
33+
34+
def _setup_milvus(self):
35+
"""Set up Milvus vector database connection."""
36+
try:
37+
embeddings = OpenAIEmbeddings()
38+
milvus_uri = os.getenv("MILVUS_URI", "http://localhost:19530")
39+
print(f"Milvus 연결 시도: {milvus_uri}")
40+
41+
self.vectorstore = Milvus(
42+
embedding_function=embeddings,
43+
connection_args={
44+
"uri": milvus_uri,
45+
},
46+
collection_name="coindesk_articles",
47+
)
48+
print("Milvus 연결 성공")
49+
self.use_milvus = True
50+
except Exception as e:
51+
print(f"Milvus 연결 실패: {e}")
52+
print("주의: Milvus 연결 실패로 벡터 검색 기능을 사용할 수 없습니다.")
53+
self.use_milvus = False
54+
self.vectorstore = None
55+
56+
def _setup_chain(self):
57+
"""Set up the finance chain with retriever, prompt and model."""
58+
59+
# Finance prompt template
60+
self.prompt = PromptTemplate(
61+
template="""You are an expert in finance. \
62+
Always answer questions starting with "전문가에 따르면..". \
63+
64+
Previous conversation:
65+
{chat_history}
66+
67+
Respond to the following question based the context, statistical information, and previous conversation when possible:
68+
Context: {context}
69+
Question: {question}
70+
Answer:""",
71+
input_variables=["context", "question", "chat_history"]
72+
)
73+
74+
# Create the chain based on Milvus availability
75+
if self.use_milvus and self.vectorstore:
76+
self.retriever = self.vectorstore.as_retriever(search_kwargs={"k": self.search_k})
77+
context_chain = RunnableLambda(lambda x: x["question"]) | self.retriever | self._format_docs
78+
79+
self.chain = (
80+
{
81+
"context": context_chain,
82+
"question": RunnablePassthrough(),
83+
"chat_history": lambda x: x.get("chat_history", "")
84+
}
85+
| self.prompt
86+
| ChatOpenAI(model=self.model)
87+
| StrOutputParser()
88+
)
89+
else:
90+
# Fallback chain when Milvus is not available
91+
fallback_prompt = PromptTemplate.from_template(
92+
"""전문가에 따르면, 현재 벡터 데이터베이스에 연결할 수 없어 전문 지식을 제공하기 어렵습니다.
93+
시스템 관리자에게 문의해주세요.
94+
95+
Previous conversation:
96+
{chat_history}
97+
98+
Question: {question}
99+
"""
100+
)
101+
102+
self.chain = (
103+
fallback_prompt
104+
| ChatOpenAI(model=self.model)
105+
| StrOutputParser()
106+
)
107+
108+
def _format_docs(self, docs):
109+
"""Format retrieved documents for use in the prompt."""
110+
return "\n\n".join(doc.page_content for doc in docs)
111+
112+
def process(self, question: str, chat_history: str = "") -> str:
113+
"""
114+
Process a finance-related question.
115+
116+
Args:
117+
question: The user's question
118+
chat_history: Previous conversation history
119+
120+
Returns:
121+
Response based on financial expert knowledge
122+
"""
123+
return self.chain.invoke({
124+
"question": question,
125+
"chat_history": chat_history
126+
})
127+
128+
def invoke(self, inputs: Dict[str, Any]) -> str:
129+
"""
130+
Invoke the finance chain with inputs.
131+
132+
Args:
133+
inputs: Dictionary containing 'question' and optional 'chat_history'
134+
135+
Returns:
136+
Finance-based response as string
137+
"""
138+
return self.chain.invoke(inputs)
139+
140+
async def ainvoke(self, inputs: Dict[str, Any]) -> str:
141+
"""
142+
Asynchronously invoke the finance chain.
143+
144+
Args:
145+
inputs: Dictionary containing 'question' and optional 'chat_history'
146+
147+
Returns:
148+
Finance-based response as string
149+
"""
150+
return await self.chain.ainvoke(inputs)
151+
152+
def get_chain(self):
153+
"""Get the underlying chain object."""
154+
return self.chain
155+
156+
def get_retriever(self) -> Optional[Any]:
157+
"""Get the vector database retriever if available."""
158+
return self.retriever if self.use_milvus else None
159+
160+
def is_milvus_available(self) -> bool:
161+
"""Check if Milvus is available for use."""
162+
return self.use_milvus

chains/general.py

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
"""
2+
General chain for handling miscellaneous queries.
3+
"""
4+
5+
from langchain_openai import ChatOpenAI
6+
from langchain_core.output_parsers import StrOutputParser
7+
from langchain_core.prompts import PromptTemplate
8+
from typing import Dict, Any
9+
10+
11+
class GeneralChain:
12+
"""
13+
Handles general queries that don't fit into specific categories.
14+
"""
15+
16+
def __init__(self, model: str = "gpt-4o-mini"):
17+
"""
18+
Initialize the general chain.
19+
20+
Args:
21+
model: OpenAI model to use for response generation
22+
"""
23+
self.model = model
24+
self._setup_chain()
25+
26+
def _setup_chain(self):
27+
"""Set up the general chain with prompt and model."""
28+
29+
# General prompt template
30+
self.prompt = PromptTemplate.from_template(
31+
"""Previous conversation:
32+
{chat_history}
33+
34+
Respond to the following question concisely:
35+
If the question is not about expert knowledge or recent events, reply:
36+
37+
"도와드리지 못해서 죄송합니다. 저는 비트코인 관련 전문지식과 최신소식만 답변드릴 수 있습니다."
38+
39+
Only respond with factual, concise answers supported by the context when applicable.
40+
Question: {question}
41+
Answer:
42+
"""
43+
)
44+
45+
# Create the chain
46+
self.chain = (
47+
self.prompt
48+
| ChatOpenAI(model=self.model)
49+
| StrOutputParser()
50+
)
51+
52+
def process(self, question: str, chat_history: str = "") -> str:
53+
"""
54+
Process a general question.
55+
56+
Args:
57+
question: The user's question
58+
chat_history: Previous conversation history
59+
60+
Returns:
61+
General response or polite refusal
62+
"""
63+
return self.chain.invoke({
64+
"question": question,
65+
"chat_history": chat_history
66+
})
67+
68+
def invoke(self, inputs: Dict[str, Any]) -> str:
69+
"""
70+
Invoke the general chain with inputs.
71+
72+
Args:
73+
inputs: Dictionary containing 'question' and optional 'chat_history'
74+
75+
Returns:
76+
General response as string
77+
"""
78+
return self.chain.invoke(inputs)
79+
80+
async def ainvoke(self, inputs: Dict[str, Any]) -> str:
81+
"""
82+
Asynchronously invoke the general chain.
83+
84+
Args:
85+
inputs: Dictionary containing 'question' and optional 'chat_history'
86+
87+
Returns:
88+
General response as string
89+
"""
90+
return await self.chain.ainvoke(inputs)
91+
92+
def get_chain(self):
93+
"""Get the underlying chain object."""
94+
return self.chain

0 commit comments

Comments
 (0)