MuhammadMubashir commited on
Commit
1ba80b7
Β·
verified Β·
1 Parent(s): 70829e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -17
app.py CHANGED
@@ -1,15 +1,73 @@
1
- import streamlit as st
2
  import requests
 
 
 
 
 
 
 
 
 
 
3
  from PIL import Image
4
 
5
- # Hugging Face Deployment Compatibility
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  st.set_page_config(page_title="Agentic RAG Legal Assistant", layout="wide")
7
 
8
- # Load background and sidebar images
9
- bg_image = "https://source.unsplash.com/1600x900/?law,court" # Background image
10
- sidebar_image = "https://source.unsplash.com/400x600/?law,justice" # Sidebar image
11
 
12
- # Custom CSS for background styling
13
  st.markdown(
14
  f"""
15
  <style>
@@ -26,23 +84,17 @@ st.markdown(
26
  unsafe_allow_html=True,
27
  )
28
 
29
- # Sidebar Title
30
  st.sidebar.title("βš–οΈ Legal AI Assistant")
31
  st.sidebar.markdown("Your AI-powered legal research assistant.")
32
 
33
- # Main Heading
34
  st.markdown("# πŸ›οΈ Agentic RAG Legal Assistant")
35
  st.markdown("### Your AI-powered assistant for legal research and case analysis.")
36
 
37
- # Initialize conversation history
38
  if "chat_history" not in st.session_state:
39
  st.session_state.chat_history = []
40
 
41
- # User input
42
  user_query = st.text_input("πŸ” Enter your legal question:", "")
43
-
44
- # FastAPI backend URL
45
- API_URL = "http://127.0.0.1:8000/query/" # Change this to your deployed FastAPI URL
46
 
47
  if st.button("Ask AI") and user_query:
48
  with st.spinner("Fetching response..."):
@@ -53,10 +105,8 @@ if st.button("Ask AI") and user_query:
53
  except Exception as e:
54
  ai_response = f"Error: {e}"
55
 
56
- # Update chat history
57
  st.session_state.chat_history.append((user_query, ai_response))
58
 
59
- # Display chat history
60
  st.markdown("---")
61
  st.markdown("### πŸ“œ Chat History")
62
  for user_q, ai_r in st.session_state.chat_history:
@@ -64,6 +114,5 @@ for user_q, ai_r in st.session_state.chat_history:
64
  st.markdown(f"**πŸ€– AI:** {ai_r}")
65
  st.markdown("---")
66
 
67
- # Footer
68
  st.markdown("---")
69
- st.markdown("πŸš€ Powered by OpenAI, Pinecone, and LangChain.")
 
1
+ import os
2
  import requests
3
+ import streamlit as st
4
+ from fastapi import FastAPI, HTTPException
5
+ from langchain.chains import ConversationalRetrievalChain
6
+ from langchain.chat_models import ChatAnthropic
7
+ from langchain.vectorstores import Pinecone
8
+ from langchain.embeddings.huggingface import HuggingFaceEmbeddings
9
+ from langchain.memory import ConversationBufferMemory
10
+ from datasets import load_dataset
11
+ from dotenv import load_dotenv
12
+ from pinecone import Pinecone
13
  from PIL import Image
14
 
15
+ # Load environment variables
16
+ load_dotenv()
17
+
18
+ # Initialize FastAPI
19
+ app = FastAPI()
20
+
21
+ # API Keys
22
+ PINECONE_API_KEY = os.getenv("pcsk_7QKLEa_RCEnZawP7NgzW9FhfbjX8szFb66WYpcturDk5EHpvHHH97REiXKcgyqrhuuYH1d")
23
+ PINECONE_ENV = os.getenv("us-east-1")
24
+ INDEX_NAME = "agenticrag"
25
+
26
+ if not PINECONE_API_KEY:
27
+ raise ValueError("Pinecone API Key is missing. Please set it in environment variables.")
28
+
29
+ # Initialize Hugging Face Embeddings & Pinecone
30
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
31
+ pc = Pinecone(api_key=PINECONE_API_KEY)
32
+ vector_store = Pinecone.from_existing_index(index_name=INDEX_NAME, embedding=embeddings)
33
+
34
+ # Load LLM & Memory
35
+ llm = ChatAnthropic(model="claude-2", temperature=0)
36
+ memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
37
+
38
+ # Build RAG Chain
39
+ qa_chain = ConversationalRetrievalChain.from_llm(
40
+ llm=llm,
41
+ retriever=vector_store.as_retriever(),
42
+ memory=memory,
43
+ return_source_documents=True
44
+ )
45
+
46
+ @app.post("/query/")
47
+ async def query_agent(query: str):
48
+ try:
49
+ response = qa_chain.run(query)
50
+ return {"response": response}
51
+ except Exception as e:
52
+ raise HTTPException(status_code=500, detail=str(e))
53
+
54
+ @app.get("/")
55
+ def read_root():
56
+ return {"message": "Welcome to the Agentic RAG Legal Assistant!"}
57
+
58
+ # Load dataset
59
+ dataset = load_dataset("c4lliope/us-congress")
60
+ chunks = [str(text) for text in dataset['train']['text']]
61
+ embedding_vectors = embeddings.embed_documents(chunks)
62
+ pinecone_data = [(str(i), embedding_vectors[i], {"text": chunks[i]}) for i in range(len(chunks))]
63
+ vector_store.upsert(vectors=pinecone_data)
64
+
65
+ # Streamlit UI
66
  st.set_page_config(page_title="Agentic RAG Legal Assistant", layout="wide")
67
 
68
+ bg_image = "https://source.unsplash.com/1600x900/?law,court"
69
+ sidebar_image = "https://source.unsplash.com/400x600/?law,justice"
 
70
 
 
71
  st.markdown(
72
  f"""
73
  <style>
 
84
  unsafe_allow_html=True,
85
  )
86
 
 
87
  st.sidebar.title("βš–οΈ Legal AI Assistant")
88
  st.sidebar.markdown("Your AI-powered legal research assistant.")
89
 
 
90
  st.markdown("# πŸ›οΈ Agentic RAG Legal Assistant")
91
  st.markdown("### Your AI-powered assistant for legal research and case analysis.")
92
 
 
93
  if "chat_history" not in st.session_state:
94
  st.session_state.chat_history = []
95
 
 
96
  user_query = st.text_input("πŸ” Enter your legal question:", "")
97
+ API_URL = "http://127.0.0.1:8000/query/"
 
 
98
 
99
  if st.button("Ask AI") and user_query:
100
  with st.spinner("Fetching response..."):
 
105
  except Exception as e:
106
  ai_response = f"Error: {e}"
107
 
 
108
  st.session_state.chat_history.append((user_query, ai_response))
109
 
 
110
  st.markdown("---")
111
  st.markdown("### πŸ“œ Chat History")
112
  for user_q, ai_r in st.session_state.chat_history:
 
114
  st.markdown(f"**πŸ€– AI:** {ai_r}")
115
  st.markdown("---")
116
 
 
117
  st.markdown("---")
118
+ st.markdown("πŸš€ Powered by Anthropic Claude, Pinecone, and LangChain.")