empathy / app.py
rhasan's picture
infer working locally - first try at HF
6b3d060
raw
history blame
1.75 kB
import os
import time
import gradio as gr
import numpy as np
from huggingface_hub import hf_hub_download
from src.infer import load_model, predict
os.environ.setdefault("HF_HOME", "/data/.huggingface")
_model = None
_ckpt_path = None
def _warmup():
global _model, _ckpt_path
if _model is not None:
return
t0 = time.time()
_ckpt_path = hf_hub_download(
repo_id="rhasan/empathy",
filename="UPLME_NewsEmp_tuned-lambdas.ckpt",
repo_type="model",
local_dir="/data/uplme_ckpt"
)
load_model(_ckpt_path)
return f"Model loaded in {time.time() - t0:.1f} seconds."
def predict_with_ci(essay: str, article: str) -> dict:
_warmup()
mean, var = predict(essay, article)
# scores were originally in [1, 7]
# lets scale them to [0, 100]
mean = (mean - 1) / 6 * 100
std = np.sqrt(var)
ci_low = max(0.0, mean - 1.96 * std)
ci_upp = min(100.0, mean + 1.96 * std)
return {"mean": mean, "ci": (ci_low, ci_upp)}
with gr.Blocks(title="Empathy Prediction") as demo:
gr.Markdown("# Empathy Prediction with Uncertainty Estimation")
with gr.Row():
with gr.Column():
essay_input = gr.Textbox(label="Essay", lines=10, placeholder="Enter the essay text here...")
article_input = gr.Textbox(label="Article", lines=10, placeholder="Enter the article text here...")
button = gr.Button("Predict")
with gr.Column():
output_mean = gr.Number(label="Predicted Empathy Mean", precision=4)
ci = gr.Number(label="95\% CI", precision=4)
button.click(fn=predict_with_ci, inputs=[essay_input, article_input], outputs=[output_mean, ci])
if __name__ == "__main__":
demo.launch()