英文

Deberta用于情感分析

这是一个在亚马逊多评论数据集上经过微调的Deberta模型,共使用了100万条评论进行训练。

如何使用该模型

import torch
from transformers import AutoModelForSequenceClassification, AutoTokenizer

def get_sentiment(sentence):
    bert_dict = {}
    vectors = tokenizer(sentence, return_tensors='pt').to(device)
    outputs = bert_model(**vectors).logits
    probs = torch.nn.functional.softmax(outputs, dim = 1)[0]
    bert_dict['neg'] = round(probs[0].item(), 3)
    bert_dict['neu'] = round(probs[1].item(), 3)
    bert_dict['pos'] = round(probs[2].item(), 3)
    return bert_dict

MODEL_NAME = 'RashidNLP/Amazon-Deberta-Base-Sentiment'
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

bert_model = AutoModelForSequenceClassification.from_pretrained(MODEL_NAME, num_labels = 3).to(device)
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)

get_sentiment("This is quite a mess you have made")