import os import time import gradio as gr import numpy as np from huggingface_hub import hf_hub_download from src.infer import load_model, predict os.environ.setdefault("HF_HOME", "/data/.huggingface") _model = None _ckpt_path = None def _warmup(): global _model, _ckpt_path if _model is not None: return t0 = time.time() _ckpt_path = hf_hub_download( repo_id="rhasan/UPLME", filename="UPLME_NewsEmp_tuned-lambdas.ckpt", repo_type="model", local_dir="/data/uplme_ckpt" ) load_model(_ckpt_path) return f"Model loaded in {time.time() - t0:.1f} seconds." def predict_with_ci(essay: str, article: str) -> tuple[float, float, float]: _warmup() mean, var = predict(essay, article) # scores were originally in [1, 7] # lets scale them to [0, 100] mean = (mean - 1) / 6 * 100 std = np.sqrt(var) ci_low = max(0.0, mean - 1.96 * std) ci_upp = min(100.0, mean + 1.96 * std) return mean, ci_low, ci_upp with gr.Blocks(title="Empathy Prediction") as demo: gr.Markdown("# Empathy Prediction with Uncertainty Estimation") with gr.Row(): with gr.Column(): essay_input = gr.Textbox(label="Essay", lines=10, placeholder="Enter the essay text here...") article_input = gr.Textbox(label="Article", lines=10, placeholder="Enter the article text here...") button = gr.Button("Predict") with gr.Column(): output_mean = gr.Number(label="Predicted Empathy Mean", precision=2) ci_low = gr.Number(label="95% CI Lower Bound", precision=2) ci_upp = gr.Number(label="95% CI Upper Bound", precision=2) button.click(fn=predict_with_ci, inputs=[essay_input, article_input], outputs=[output_mean, ci_low, ci_upp]) if __name__ == "__main__": demo.launch()