File size: 787 Bytes
0ae184c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
from duckduckgo_search import DDGS
from transformers import AutoTokenizer, AutoModelForCausalLM

def search_web(query):
    with DDGS() as ddgs:
        results = ddgs.text(query, max_results=5)
    return "\n".join([r["body"] for r in results])

tokenizer = AutoTokenizer.from_pretrained("google/gemma-7b-it")
model = AutoModelForCausalLM.from_pretrained("google/gemma-7b-it")

def ask(question):
    context = search_web(question)
    prompt = f"Use this information:\n{context}\n\nQuestion: {question}\nAnswer:"

    inputs = tokenizer(prompt, return_tensors="pt")
    output = model.generate(**inputs, max_new_tokens=200)
    return tokenizer.decode(output[0], skip_special_tokens=True)

if __name__ == "__main__":
    print(ask("When was the Eiffel Tower built?"))
    python rag.py