Spaces:
Sleeping
Sleeping
| import pathlib | |
| import gradio as gr | |
| import torch | |
| from scipy.special import softmax | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| # load model | |
| MODEL_PATH = pathlib.Path("data") / "roberta-large-wmt" | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH) | |
| model = AutoModelForSequenceClassification.from_pretrained(MODEL_PATH) | |
| model.to("cuda" if torch.cuda.is_available() else "cpu") | |
| def detect(text: str) -> str: | |
| inputs = tokenizer(text, return_tensors="pt") | |
| with torch.no_grad(): | |
| logits = model(**inputs).logits | |
| predicted_class_id = logits.argmax(dim=1).item() | |
| label = model.config.id2label[predicted_class_id] | |
| result = "machine-generated" if label == "0" else "human-generated" | |
| return ", ".join( | |
| [ | |
| f"The text is {result}", | |
| f"with a certainty of ${100 * softmax(logits, axis=1)[0][int(label)]:.2f}%", | |
| ] | |
| ) | |
| with gr.Blocks() as demo: | |
| gr.Markdown( | |
| """ | |
| # Hello! | |
| This is the demo for <a href="https://arxiv.org/abs/2305.12680" target="_blank">G3Detector</a>. | |
| """ | |
| ) | |
| inp = gr.Textbox(label="Text", placeholder="Paste text here...") | |
| out = gr.Textbox(label="Result") | |
| detect_btn = gr.Button("Detect") | |
| detect_btn.click(fn=detect, inputs=inp, outputs=out, api_name="G3Detector") | |
| demo.launch() | |