| import gradio as gr | |
| from transformers import AutoModelForSequenceClassification | |
| import torch | |
| from transformers import AutoTokenizer | |
| tokenizer = AutoTokenizer.from_pretrained("Gerard-1705/bertin_base_climate_detection_es") | |
| model = AutoModelForSequenceClassification.from_pretrained("Gerard-1705/bertin_base_climate_detection_es") | |
| id2label = {0: "NEGATIVE", 1: "POSITIVE"} | |
| label2id = {"NEGATIVE": 0, "POSITIVE": 1} | |
| def inference_fun(user_input): | |
| inputs = tokenizer(user_input, return_tensors="pt") | |
| with torch.no_grad(): | |
| logits = model(**inputs).logits | |
| predicted_class_id = logits.argmax().item() | |
| output_tag = model.config.id2label[predicted_class_id] | |
| return output_tag | |
| iface = gr.Interface(fn=inference_fun, inputs="text", outputs="text") | |
| iface.launch() |