Spaces:
Sleeping
Sleeping
| import requests | |
| import os | |
| # Your Hugging Face API token (Replace 'your_token_here' with your actual token) | |
| API_TOKEN = os.getenv("HF_API_TOKEN") | |
| # Define model and API endpoint | |
| MODEL_ID = "bigcode/starcoder" | |
| API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}" | |
| HEADERS = {"Authorization": f"Bearer {API_TOKEN}"} | |
| def translate_code(code_snippet, source_lang, target_lang): | |
| """ | |
| Translate code using Hugging Face API (No local download needed). | |
| """ | |
| prompt = f"Translate the following {source_lang} code to {target_lang}:\n\n{code_snippet}\n\nTranslated {target_lang} Code:" | |
| response = requests.post(API_URL, headers=HEADERS, json={"inputs": prompt}) | |
| if response.status_code == 200: | |
| return response.json()[0]["generated_text"] | |
| else: | |
| return f"Error: {response.status_code}, {response.text}" | |
| # Example usage | |
| source_code = """ | |
| def add(a, b): | |
| return a + b | |
| """ | |
| translated_code = translate_code(source_code, "Python", "Java") | |
| print("Translated Java Code:\n", translated_code) | |