kootaeng2 commited on
Commit
d2df06f
ยท
1 Parent(s): 4099675

Refactor: Separate model storage to Hugging Face Hub

Browse files
Files changed (2) hide show
  1. .gitattributes +0 -4
  2. src/emotion_engine.py +13 -16
.gitattributes CHANGED
@@ -1,4 +0,0 @@
1
- korean-emotion-classifier-final/model.safetensors filter=lfs diff=lfs merge=lfs -text
2
- *safetensores filter=lfs diff=lfs merge=lfs -text
3
- *.safetensors filter=lfs diff=lfs merge=lfs -text
4
- *.pt filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
src/emotion_engine.py CHANGED
@@ -4,29 +4,26 @@ import torch
4
  from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
5
  import os
6
 
 
 
 
 
7
  def load_emotion_classifier():
8
  # --- ์ด ๋ถ€๋ถ„์„ ์ˆ˜์ •ํ•ฉ๋‹ˆ๋‹ค ---
9
- # ํ˜„์žฌ ์Šคํฌ๋ฆฝํŠธ ํŒŒ์ผ์˜ ์ ˆ๋Œ€ ๊ฒฝ๋กœ๋ฅผ ์ฐพ์Šต๋‹ˆ๋‹ค. (์˜ˆ: /app/src/emotion_engine.py)
10
- script_path = os.path.abspath(__file__)
11
- # ์Šคํฌ๋ฆฝํŠธ๊ฐ€ ์žˆ๋Š” ๋””๋ ‰ํ„ฐ๋ฆฌ๋ฅผ ์ฐพ์Šต๋‹ˆ๋‹ค. (์˜ˆ: /app/src)
12
- src_dir = os.path.dirname(script_path)
13
- # ๊ทธ ์ƒ์œ„ ๋””๋ ‰ํ„ฐ๋ฆฌ, ์ฆ‰ ํ”„๋กœ์ ํŠธ ๋ฃจํŠธ๋ฅผ ์ฐพ์Šต๋‹ˆ๋‹ค. (์˜ˆ: /app)
14
- base_dir = os.path.dirname(src_dir)
15
- # ํ”„๋กœ์ ํŠธ ๋ฃจํŠธ์™€ ๋ชจ๋ธ ํด๋” ์ด๋ฆ„์„ ํ•ฉ์ณ ์ •ํ™•ํ•œ ๊ฒฝ๋กœ๋ฅผ ๋งŒ๋“ญ๋‹ˆ๋‹ค.
16
- MODEL_PATH = os.path.join(base_dir, "korean-emotion-classifier-final")
17
-
18
- print(f"--- ๋ฐฐํฌ ํ™˜๊ฒฝ ๋ชจ๋ธ ๊ฒฝ๋กœ ํ™•์ธ: [{MODEL_PATH}] ---")
19
-
20
  try:
21
- # local_files_only ์˜ต์…˜์€ ๋กœ์ปฌ ๊ฒฝ๋กœ๋ฅผ ๋ช…์‹œํ•  ๋•Œ ์•ˆ์ „์„ ์œ„ํ•ด ์œ ์ง€ํ•˜๋Š” ๊ฒƒ์ด ์ข‹์Šต๋‹ˆ๋‹ค.
22
- tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH, local_files_only=True)
23
- model = AutoModelForSequenceClassification.from_pretrained(MODEL_PATH, local_files_only=True)
24
- print("โœ… ๋กœ์ปฌ ๋ชจ๋ธ ํŒŒ์ผ ์ง์ ‘ ๋กœ๋”ฉ ์„ฑ๊ณต!")
25
 
26
  except Exception as e:
27
  print(f"โŒ ๋ชจ๋ธ ๋กœ๋”ฉ ์ค‘ ์˜ค๋ฅ˜: {e}")
28
  return None
29
- # --- ์—ฌ๊ธฐ๊นŒ์ง€ ์ˆ˜์ • ---
30
 
31
  device = 0 if torch.cuda.is_available() else -1
32
  emotion_classifier = pipeline("text-classification", model=model, tokenizer=tokenizer, device=device)
 
4
  from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
5
  import os
6
 
7
+ import torch
8
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
9
+ import os
10
+
11
  def load_emotion_classifier():
12
  # --- ์ด ๋ถ€๋ถ„์„ ์ˆ˜์ •ํ•ฉ๋‹ˆ๋‹ค ---
13
+ # ๋กœ์ปฌ ๊ฒฝ๋กœ ๋Œ€์‹ , Hugging Face Hub์˜ ๋ชจ๋ธ ID๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค.
14
+ MODEL_PATH = "koons/korean-emotion-classifier-final" # "์‚ฌ์šฉ์ž์ด๋ฆ„/๋ชจ๋ธ์ด๋ฆ„" ํ˜•์‹
15
+
16
+ print(f"Hugging Face Hub ๋ชจ๋ธ '{MODEL_PATH}'์—์„œ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค...")
17
+
 
 
 
 
 
 
18
  try:
19
+ # local_files_only ์˜ต์…˜์„ ์ œ๊ฑฐํ•˜์—ฌ ์˜จ๋ผ์ธ์—์„œ ๋‹ค์šด๋กœ๋“œํ•˜๋„๋ก ํ•ฉ๋‹ˆ๋‹ค.
20
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH)
21
+ model = AutoModelForSequenceClassification.from_pretrained(MODEL_PATH)
22
+ print("โœ… Hugging Face Hub ๋ชจ๋ธ ๋กœ๋”ฉ ์„ฑ๊ณต!")
23
 
24
  except Exception as e:
25
  print(f"โŒ ๋ชจ๋ธ ๋กœ๋”ฉ ์ค‘ ์˜ค๋ฅ˜: {e}")
26
  return None
 
27
 
28
  device = 0 if torch.cuda.is_available() else -1
29
  emotion_classifier = pipeline("text-classification", model=model, tokenizer=tokenizer, device=device)