YAML Metadata Warning:empty or missing yaml metadata in repo card
Check out the documentation for more information.
Mindcast SD_SC ν΅ν© λͺ¨λΈ
π κ°λ¨ν μ¬μ©λ²
from transformers import AutoTokenizer, AutoModelForSequenceClassification
# λͺ¨λΈ λ° ν ν¬λμ΄μ λ‘λ
tokenizer = AutoTokenizer.from_pretrained("merrybabyxmas/mindcast-unified-sd-sc")
model = AutoModelForSequenceClassification.from_pretrained(
"merrybabyxmas/mindcast-unified-sd-sc",
trust_remote_code=True
)
# ν
μ€νΈ ν ν¬λμ΄μ§
text = "μ μ§μ§ μ΅κ³ λ€~ μμ λ§νλ€"
inputs = tokenizer(text, return_tensors="pt")
# μμΈ‘
with torch.no_grad():
outputs = model(**inputs, return_intermediate_results=True)
# κ²°κ³Ό ν΄μ
emotion_probs = torch.softmax(outputs['logits'], dim=-1)
sarcasm_pred = outputs['sarcasm_predictions'][0].item()
emotion_pred = torch.argmax(emotion_probs, dim=-1)[0].item()
print(f"Sarcasm: {model.config.sarcasm_labels[sarcasm_pred]}")
print(f"Emotion: {model.config.emotion_labels[emotion_pred]}")
π― λ κ°λ¨ν λ°©λ²
# λͺ¨λΈμ predict ν¨μ μ¬μ©
result = model.predict(inputs['input_ids'], inputs['attention_mask'], tokenizer)
print(result)
# {'sarcasm': 'Sarcastic', 'emotion': 'λΆλ
Έ', 'model_used': 'sarcastic'}
- Downloads last month
- 1
Inference Providers NEW
This model isn't deployed by any Inference Provider. π Ask for provider support