File size: 1,748 Bytes
6b3d060
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import os
import time
import gradio as gr
import numpy as np
from huggingface_hub import hf_hub_download

from src.infer import load_model, predict

os.environ.setdefault("HF_HOME", "/data/.huggingface")

_model = None
_ckpt_path = None

def _warmup():
    global _model, _ckpt_path
    if _model is not None:
        return
    t0 = time.time()
    _ckpt_path = hf_hub_download(
        repo_id="rhasan/empathy",
        filename="UPLME_NewsEmp_tuned-lambdas.ckpt",
        repo_type="model",
        local_dir="/data/uplme_ckpt"
    )
    load_model(_ckpt_path)
    return f"Model loaded in {time.time() - t0:.1f} seconds."

def predict_with_ci(essay: str, article: str) -> dict:
    _warmup()
    mean, var = predict(essay, article)
    # scores were originally in [1, 7]
    # lets scale them to [0, 100]
    mean = (mean - 1) / 6 * 100

    std = np.sqrt(var)
    ci_low = max(0.0, mean - 1.96 * std)
    ci_upp = min(100.0, mean + 1.96 * std)
    return {"mean": mean, "ci": (ci_low, ci_upp)}

with gr.Blocks(title="Empathy Prediction") as demo:
    gr.Markdown("# Empathy Prediction with Uncertainty Estimation")
    with gr.Row():
        with gr.Column():
            essay_input = gr.Textbox(label="Essay", lines=10, placeholder="Enter the essay text here...")
            article_input = gr.Textbox(label="Article", lines=10, placeholder="Enter the article text here...")
            button = gr.Button("Predict")
        with gr.Column():
            output_mean = gr.Number(label="Predicted Empathy Mean", precision=4)
            ci = gr.Number(label="95\% CI", precision=4)
        
        button.click(fn=predict_with_ci, inputs=[essay_input, article_input], outputs=[output_mean, ci])

if __name__ == "__main__":
    demo.launch()