Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
.vscode/
*.iml
.DS_Store

# 환경 변수 & 민감 정보
.env
*.secret
Expand All @@ -26,3 +25,5 @@ __pycache__/
.ipynb_checkpoints/
*.pyc
/venv/
/env
/.vs
2 changes: 1 addition & 1 deletion AI/configs/config.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
{
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

BOM 문자를 제거하세요.

JSON 파일 첫 줄에 BOM 문자()가 추가되었습니다. 이는 JSON 파싱 오류를 일으킬 수 있습니다.

다음과 같이 수정하세요:

-{
+{
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
{
{
🤖 Prompt for AI Agents
AI/configs/config.json lines 1-1: 파일의 첫 줄에 UTF-8 BOM 문자()가 포함되어 있어 JSON 파서 오류를
유발하므로 해당 BOM 문자를 제거하세요; 편집기(또는 도구)를 사용해 파일을 UTF-8 without BOM 형식으로 저장하거나 파일의 맨
앞에서 보이지 않는 BOM 문자 하나를 삭제한 뒤 저장하면 됩니다.

"db": {
"host": "ep-misty-lab-adgec0kl-pooler.c-2.us-east-1.aws.neon.tech",
"user": "neondb_owner",
Expand Down
68 changes: 48 additions & 20 deletions AI/finder/main.py
Original file line number Diff line number Diff line change
@@ -1,47 +1,75 @@
# finder/run_finder.py
import csv
import sys
import os

from libs.utils import news_processing
from finder import ticker_selector
import time
import requests
import pandas as pd
from langchain_community.llms import Ollama


# ---- 경로 세팅 ----
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(project_root)

from libs.utils import news_processing
from finder import ticker_selector
from libs.llm_clients.ollama_client import get_ollama_client # ← 분리된 유틸 임포트

def run_finder():
'''
"""
전체 프로세스를 조율하여 최종 Top 3 투자 종목 반환
'''
"""
# --- 1단계: 의존성 객체 및 데이터 준비 ---
llm = Ollama(model="llama3.2")

try:
stability_df = pd.read_csv('data/stability_score_2025.csv')
llm = get_ollama_client() # ✅ 헬스체크 및 모델 확인 포함
except Exception as e:
print(str(e))
return []

csv_path = os.path.join(project_root, "data", "stability_score_2025.csv")
Comment on lines +26 to +28
Copy link

@coderabbitai coderabbitai bot Nov 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

임시 하드코딩으로 Finder 단계가 무력화됩니다

run_finder() 호출을 주석 처리하고 고정된 티커를 반환하면서 Finder 단계가 완전히 우회됩니다. 이렇게 되면 실제 뉴스/지표 기반 분석 결과가 전혀 반영되지 않아 파이프라인 핵심 기능이 깨집니다. 원래 로직을 복구하고, 필요하면 빈 결과에 대해서만 안전하게 종료하도록 바꿔주세요.

-    #top_tickers = run_finder()
-    top_tickers = ['AAPL', 'MSFT', 'GOOGL'] # 임시 데이터
+    top_tickers = run_finder()
+    if not top_tickers:
+        print("Finder에서 종목을 찾지 못했습니다.")
+        return []

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In AI/finder/main.py around lines 26 to 28, the Finder stage has been bypassed
by commenting out the run_finder() call and returning a hardcoded ticker list;
restore the original call to run_finder() (uncomment or reintroduce it) and
remove the temporary hardcoded return so the pipeline uses actual news/metric
results, and add a safe early-exit guard that only returns an empty list when
run_finder() legitimately yields no candidates (or raises a handled exception)
so downstream logic remains stable.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

다음 패치때 변경 예정

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@twq110, 알겠습니다! 다음 패치에서 수정하시는 것으로 확인했습니다.

필요하시면 추적을 위한 이슈를 생성해드릴 수 있습니다. 이슈를 생성하시겠습니까?


try:
stability_df = pd.read_csv(csv_path)
except FileNotFoundError:
print("오류: 'data/stability_score_2025.csv' 파일을 찾을 수 없습니다.")
print(f"오류: {csv_path} 파일을 찾을 수 없습니다.")
return []

# --- 2단계: 주간 뉴스 데이터 수집 및 요약 ---
weekly_news_df = news_processing.get_weekly_news_summary(days=5, llm_client=llm)
try:
weekly_news_df = news_processing.get_weekly_news_summary(days=5, llm_client=llm)
except requests.exceptions.ConnectionError as e:
print(f"[LLM 연결 오류] 뉴스 요약 단계에서 LLM 서버 연결 실패: {e}")
return []
except requests.exceptions.Timeout as e:
print(f"[LLM 타임아웃] 뉴스 요약 단계에서 응답 지연: {e}")
return []
except Exception as e:
print(f"[예기치 못한 오류] 뉴스 요약 단계: {e}")
return []

if weekly_news_df.empty:
if weekly_news_df is None or getattr(weekly_news_df, "empty", False):
print("분석할 뉴스 데이터가 없어 프로세스를 종료합니다.")
return []

# --- 3단계: 뉴스 데이터와 재무 데이터를 기반으로 Top 3 종목 선정 ---
top_3_tickers = ticker_selector.select_top_stocks(
news_summary_df=weekly_news_df,
stability_df=stability_df,
llm_client=llm
)
try:
top_3_tickers = ticker_selector.select_top_stocks(
news_summary_df=weekly_news_df,
stability_df=stability_df,
llm_client=llm
)
except requests.exceptions.ConnectionError as e:
print(f"[LLM 연결 오류] 종목 선정 단계에서 LLM 서버 연결 실패: {e}")
return []
except requests.exceptions.Timeout as e:
print(f"[LLM 타임아웃] 종목 선정 단계에서 응답 지연: {e}")
return []
except Exception as e:
print(f"[예기치 못한 오류] 종목 선정 단계: {e}")
return []

print("\n🎉 [Finder 모듈 최종 결과] 투자 추천 Top 3 종목 🎉")
print(top_3_tickers)

return top_3_tickers

if __name__ == '__main__':
run_finder()
run_finder()
15 changes: 9 additions & 6 deletions AI/libs/core/pipeline.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os
import os
import sys
from typing import List, Dict
import json
Expand All @@ -12,19 +12,19 @@

# --- 모듈 import ---
from finder.main import run_finder
from AI.transformer.main import run_transformer
from AI.libs.utils.fetch_ohlcv import fetch_ohlcv
from transformer.main import run_transformer
from libs.utils.fetch_ohlcv import fetch_ohlcv
from xai.run_xai import run_xai
from AI.libs.utils.get_db_conn import get_db_conn
from libs.utils.get_db_conn import get_db_conn
# ---------------------------------

def run_weekly_finder() -> List[str]:
"""
주간 종목 발굴(Finder)을 실행하고 결과(종목 리스트)를 반환합니다.
"""
print("--- [PIPELINE-STEP 1] Finder 모듈 실행 시작 ---")
top_tickers = run_finder()
# top_tickers = ['AAPL', 'MSFT', 'GOOGL'] # 임시 데이터
#top_tickers = run_finder()
top_tickers = ['AAPL', 'MSFT', 'GOOGL'] # 임시 데이터
print(f"--- [PIPELINE-STEP 1] Finder 모듈 실행 완료 ---")
return top_tickers

Expand Down Expand Up @@ -145,12 +145,15 @@ def run_pipeline():
"""
전체 파이프라인(Finder -> Transformer -> XAI)을 실행합니다.
"""
#--- 설정 파일 로드 ---
config : Dict = {}
try:
with open(os.path.join(project_root, 'configs', 'config.json'), 'r') as f:
config = json.load(f)
except FileNotFoundError:
print("[WARN] configs/config.json 파일을 찾을 수 없어 DB 연결이 필요 없는 기능만 작동합니다.")

#--- 파이프라인 단계별 실행 ---
top_tickers = run_weekly_finder()
if not top_tickers:
print("Finder에서 종목을 찾지 못해 파이프라인을 중단합니다.")
Expand Down
3 changes: 3 additions & 0 deletions AI/libs/llm_clients/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#AI/libs/llm_clients/ollama_client.py
from libs.llm_clients.ollama_client import get_ollama_client
__all__ = ["get_ollama_client"]
68 changes: 68 additions & 0 deletions AI/libs/llm_clients/ollama_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# libs/llm_clients/ollama_client.py
import os
import requests
from typing import Optional
from langchain_community.llms import Ollama

# ---- 기본 설정 (환경변수로 오버라이드 가능) ----
OLLAMA_BASE_URL = os.environ.get("OLLAMA_BASE_URL", "http://127.0.0.1:11434")
OLLAMA_MODEL = os.environ.get("OLLAMA_MODEL", "llama3.2")

def _ollama_alive(base_url: str, timeout: float = 3.0) -> bool:
"""
Ollama 서버 헬스체크: /api/tags 로 간단 확인
"""
try:
r = requests.get(f"{base_url}/api/tags", timeout=timeout)
return r.ok
except requests.exceptions.RequestException:
return False

def _model_available(base_url: str, model: str) -> bool:
"""
지정 모델이 로컬 Ollama에 존재하는지 확인
"""
try:
r = requests.get(f"{base_url}/api/tags", timeout=5)
r.raise_for_status()
tags = r.json().get("models", [])
names = {m.get("name") for m in tags if isinstance(m, dict)}
# ollama는 "llama3.2" 또는 "llama3.2:latest" 식으로 존재 가능
return model in names or f"{model}:latest" in names
except Exception:
return False

def get_ollama_client(
model: Optional[str] = None,
base_url: Optional[str] = None,
# langchain 0.2+에서 request_timeout 인자를 직접 받지 않는 경우가 있어 주석 처리
# request_timeout: float = 60.0,
) -> Ollama:
"""
Ollama LangChain LLM 클라이언트 생성
- 서버와 모델 존재 여부를 사전 점검
"""
model = model or OLLAMA_MODEL
base_url = base_url or OLLAMA_BASE_URL

if not _ollama_alive(base_url):
raise RuntimeError(
f"[연결 실패] Ollama 서버에 접속할 수 없습니다. llama3.2 설치 여부 확인해주세요.\n"
f"- base_url: {base_url}\n"
f"- 조치: (1) 'ollama serve' 실행 여부 확인 (2) 방화벽/프록시 (NO_PROXY=localhost,127.0.0.1) (3) 11434 포트 개방\n"
f"- 테스트: curl {base_url}/api/tags"
)

if not _model_available(base_url, model):
raise RuntimeError(
f"[모델 없음] '{model}' 모델이 Ollama에 없습니다.\n"
f"- 조치: ollama pull {model}\n"
f"- 보유 모델 확인: curl {base_url}/api/tags"
)

return Ollama(
model=model,
base_url=base_url,
# 필요 시 model_kwargs로 세부 파라미터 전달 가능
# model_kwargs={"num_ctx": 4096},
)
8 changes: 8 additions & 0 deletions AI/libs/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# AI/libs/utils/__init__.py
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

BOM 문자를 제거하세요.

파일 첫 줄에 BOM 문자()가 포함되어 있습니다. 이는 인코딩 문제를 일으킬 수 있습니다.

다음과 같이 수정하세요:

-# AI/libs/utils/__init__.py
+# AI/libs/utils/__init__.py
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
# AI/libs/utils/__init__.py
# AI/libs/utils/__init__.py
🤖 Prompt for AI Agents
AI/libs/utils/__init__.py lines 1-1: the file contains a leading BOM character
on the first line; remove the BOM so the file is plain UTF-8 (no BOM). Open the
file in an editor that can show invisible characters or use a tool/command to
strip the BOM, delete the leading invisible character, and re-save the file as
UTF-8 without BOM; verify by reopening and ensuring the first character is the
expected ASCII character (or start of file) and no BOM remains.

from .fetch_ohlcv import fetch_ohlcv
from .get_db_conn import get_db_conn

__all__ = [
"fetch_ohlcv",
"get_db_conn",
]
9 changes: 4 additions & 5 deletions AI/libs/utils/fetch_ohlcv.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import psycopg2
import pandas as pd
import pandas as pd

# DB 접속 커넥션 생성
from AI.libs.utils.get_db_conn import get_db_conn
from .get_db_conn import get_db_conn

# OHLCV 데이터 불러오기
def fetch_ohlcv(
Expand All @@ -23,12 +22,12 @@ def fetch_ohlcv(
config (dict): DB 접속 정보 포함한 설정

Returns:
DataFrame: 컬럼 = [date, open, high, low, close, volume]
DataFrame: 컬럼 = [ticker, date, open, high, low, close, volume, adjusted_close]
"""
conn = get_db_conn(config)

query = """
SELECT date, open, high, low, close, volume
SELECT ticker, date, open, high, low, close, adjusted_close, volume
FROM public.price_data
WHERE ticker = %s
AND date BETWEEN %s AND %s
Expand Down
3 changes: 2 additions & 1 deletion AI/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pandas
pandas
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

BOM(Byte Order Mark) 문자를 제거하세요.

파일 첫 줄에 BOM 문자()가 포함되어 있습니다. 이는 인코딩 문제를 일으킬 수 있으며, pip 설치 시 오류를 유발할 수 있습니다.

다음과 같이 수정하세요:

-pandas
+pandas
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
pandas
pandas
🤖 Prompt for AI Agents
In AI/requirements.txt around lines 1 to 1, the first line contains a BOM (Byte
Order Mark) character before "pandas"; remove the leading BOM so the file is
saved as UTF-8 without BOM and the line begins exactly with "pandas" (no hidden
characters), then re-save the file and commit the change.

psycopg2-binary
langchain-community
tqdm
Expand All @@ -11,3 +11,4 @@ yfinance
groq
requests
beautifulsoup4
pathlib
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

불필요한 의존성을 제거하세요.

pathlib은 Python 3.4 이상의 표준 라이브러리이므로 requirements.txt에 포함할 필요가 없습니다.

다음 줄을 삭제하세요:

-pathlib
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
pathlib
🤖 Prompt for AI Agents
In AI/requirements.txt around line 14, remove the unnecessary "pathlib"
dependency entry (it is part of the Python standard library since 3.4); delete
that line from requirements.txt so the file no longer lists pathlib.

49 changes: 49 additions & 0 deletions AI/tests/quick_db_check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# quick_db_check.py
import os
import sys
import json
from typing import Dict, Union

import psycopg2


# --- 프로젝트 루트 경로 설정 ---------------------------------------------------
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(project_root)

# --- 설정 파일 로드 ------------------------------------------------------------
cfg_path = os.path.join(project_root, "configs", "config.json")

config: Dict = {}
if os.path.isfile(cfg_path):
with open(cfg_path, "r", encoding="utf-8") as f:
config = json.load(f)
print("[INFO] configs/config.json 로드 완료")
else:
print(f"[WARN] 설정 파일이 없습니다: {cfg_path}")

db_cfg: Union[str, Dict] = (config or {}).get("db", {})

# --- DB 연결 테스트 ------------------------------------------------------------
conn = None
try:
# db 설정이 dict면 키워드 인자로, 문자열(DSN)이면 그대로 사용
if isinstance(db_cfg, dict):
conn = psycopg2.connect(**db_cfg) # 예: {"host": "...", ...}
else:
conn = psycopg2.connect(dsn=str(db_cfg))

with conn:
with conn.cursor() as cur:
cur.execute("SELECT version();")
print("✅ 연결 성공:", cur.fetchone()[0])

cur.execute("SELECT current_database(), current_user;")
db, user = cur.fetchone()
print(f"ℹ️ DB/USER: {db} / {user}")
except Exception as e:
print("❌ 연결 실패:", repr(e))
finally:
if conn is not None:
conn.close()

Loading