from transformers import pipeline, AutoTokenizer
from optimum.onnxruntime import ORTModelForSequenceClassification

model_id = "dilexsan/bertweet_base_sentimental_onnx"

tokenizer = AutoTokenizer.from_pretrained(model_id)

model = ORTModelForSequenceClassification.from_pretrained(model_id)

sentiment_analyzer = pipeline(
    "text-classification", 
    model=model, 
    tokenizer=tokenizer
)

text = "I am so happy with this service, it was great!"
result = sentiment_analyzer(text)

print(result)
Downloads last month
35
Inference Providers NEW
This model isn't deployed by any Inference Provider. ๐Ÿ™‹ Ask for provider support