minmingbb's picture
Upload 2 files
61bd662 verified
๏ปฟ# ==========================================
# Hugging Face ๋ชจ๋ธ ์‚ฌ์šฉ - ๊ฐ์ • ๋ถ„์„ Gradio
# ==========================================
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification
from peft import PeftModel
# ๋ชจ๋ธ ๋กœ๋“œ
print("๋ชจ๋ธ ๋กœ๋“œ ์ค‘...")
BASE_MODEL = "klue/bert-base"
LORA_MODEL = "minmingbb/nsmc-sentiment-lora" # ์—ฌ๋Ÿฌ๋ถ„์˜ Model
tokenizer = AutoTokenizer.from_pretrained(LORA_MODEL)
base_model = AutoModelForSequenceClassification.from_pretrained(
BASE_MODEL,
num_labels=2
)
model = PeftModel.from_pretrained(base_model, LORA_MODEL)
model.eval()
device = "cuda" if torch.cuda.is_available() else "cpu"
model.to(device)
print(f"์™„๋ฃŒ! (Device: {device})")
# ๊ฐ์ • ๋ถ„์„ ํ•จ์ˆ˜
def analyze_sentiment(text):
if not text.strip():
return "ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”", {}
# ํ† ํฌ๋‚˜์ด์ง•
inputs = tokenizer(
text,
return_tensors="pt",
truncation=True,
max_length=128,
padding=True
).to(device)
# ์˜ˆ์ธก
with torch.no_grad():
outputs = model(**inputs)
probs = torch.softmax(outputs.logits, dim=-1)[0]
# ๊ฒฐ๊ณผ
pred = torch.argmax(probs).item()
label = "๐Ÿ˜Š ๊ธ์ •" if pred == 1 else "๐Ÿ˜ž ๋ถ€์ •"
confidence = probs[pred].item()
result = f"**{label}** (ํ™•์‹ ๋„: {confidence*100:.1f}%)"
prob_dict = {
"๐Ÿ˜ž ๋ถ€์ •": float(probs[0]),
"๐Ÿ˜Š ๊ธ์ •": float(probs[1])
}
return result, prob_dict
# Gradio UI
demo = gr.Interface(
fn=analyze_sentiment,
inputs=gr.Textbox(
label="์˜ํ™” ๋ฆฌ๋ทฐ",
placeholder="์˜ํ™”์— ๋Œ€ํ•œ ๋ฆฌ๋ทฐ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=3
),
outputs=[
gr.Markdown(label="๋ถ„์„ ๊ฒฐ๊ณผ"),
gr.Label(label="๊ฐ์ • ํ™•๋ฅ ", num_top_classes=2)
],
title="์˜ํ™” ๋ฆฌ๋ทฐ ๊ฐ์ • ๋ถ„์„",
description="LoRA๋กœ ํŒŒ์ธํŠœ๋‹๋œ NSMC ๊ฐ์ • ๋ถ„์„ ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค.",
examples=[
["์ •๋ง ์žฌ๋ฏธ์žˆ๋Š” ์˜ํ™”์˜€์–ด์š”! ๊ฐ•๋ ฅ ์ถ”์ฒœํ•ฉ๋‹ˆ๋‹ค."],
["์‹œ๊ฐ„ ๋‚ญ๋น„์˜€์Šต๋‹ˆ๋‹ค. ๋ณ„๋กœ์˜€์–ด์š”."],
["๋ฐฐ์šฐ๋“ค์˜ ์—ฐ๊ธฐ๊ฐ€ ํ›Œ๋ฅญํ–ˆ์Šต๋‹ˆ๋‹ค."],
["์Šคํ† ๋ฆฌ๊ฐ€ ์ง€๋ฃจํ•˜๊ณ  ์žฌ๋ฏธ์—†์—ˆ์–ด์š”."],
],
theme="soft",
allow_flagging="never"
)
# ์‹คํ–‰
demo.launch(share=True, debug=True)