emotion-chatbot / src /emotion_engine.py
kootaeng2
Refactor: Separate model storage to Hugging Face Hub
d2df06f
raw
history blame
1.5 kB
# emotion_engine.py (μˆ˜μ • ν›„ μ΅œμ’… 버전)
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
import os
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
import os
def load_emotion_classifier():
# --- 이 뢀뢄을 μˆ˜μ •ν•©λ‹ˆλ‹€ ---
# 둜컬 경둜 λŒ€μ‹ , Hugging Face Hub의 λͺ¨λΈ IDλ₯Ό μ‚¬μš©ν•©λ‹ˆλ‹€.
MODEL_PATH = "koons/korean-emotion-classifier-final" # "μ‚¬μš©μžμ΄λ¦„/λͺ¨λΈμ΄λ¦„" ν˜•μ‹
print(f"Hugging Face Hub λͺ¨λΈ '{MODEL_PATH}'μ—μ„œ λͺ¨λΈμ„ λΆˆλŸ¬μ˜΅λ‹ˆλ‹€...")
try:
# local_files_only μ˜΅μ…˜μ„ μ œκ±°ν•˜μ—¬ μ˜¨λΌμΈμ—μ„œ λ‹€μš΄λ‘œλ“œν•˜λ„λ‘ ν•©λ‹ˆλ‹€.
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH)
model = AutoModelForSequenceClassification.from_pretrained(MODEL_PATH)
print("βœ… Hugging Face Hub λͺ¨λΈ λ‘œλ”© 성곡!")
except Exception as e:
print(f"❌ λͺ¨λΈ λ‘œλ”© 쀑 였λ₯˜: {e}")
return None
device = 0 if torch.cuda.is_available() else -1
emotion_classifier = pipeline("text-classification", model=model, tokenizer=tokenizer, device=device)
return emotion_classifier
# predict_emotion ν•¨μˆ˜λŠ” κ·ΈλŒ€λ‘œ λ‘‘λ‹ˆλ‹€.
def predict_emotion(classifier, text):
if not text or not text.strip(): return "λ‚΄μš© μ—†μŒ"
if classifier is None: return "였λ₯˜: 감정 뢄석 μ—”μ§„ μ€€λΉ„ μ•ˆλ¨."
result = classifier(text)
return result[0]['label']