import os import json import firebase_admin from firebase_admin import credentials, db from transformers import RagTokenizer, RagRetriever, RagSequenceForGeneration import gradio as gr # Initialize Firebase Admin SDK firebase_credential = os.getenv("FIREBASE_CREDENTIALS") if not firebase_credential: raise RuntimeError("FIREBASE_CREDENTIALS environment variable is not set.") # Save Firebase credentials to a temporary file with open("serviceAccountKey.json", "w") as f: f.write(firebase_credential) # Initialize Firebase App cred = credentials.Certificate("serviceAccountKey.json") firebase_admin.initialize_app(cred, {"databaseURL": "https://your-database-name.firebaseio.com/"}) # Load the RAG model, tokenizer, and retriever tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-base") retriever = RagRetriever.from_pretrained("facebook/rag-token-base", use_dummy_dataset=True) # Use a dummy dataset for now model = RagSequenceForGeneration.from_pretrained("facebook/rag-token-base") # Function to generate answers using the RAG model def generate_answer(question, context=""): # Tokenize the question and context inputs = tokenizer(question, return_tensors="pt") # Retrieve relevant documents (dummy dataset for this example) # In a real-world case, you would provide a proper knowledge base or corpus retrieved_docs = retriever(question=question, input_ids=inputs["input_ids"]) # Generate the answer using the RAG model outputs = model.generate(input_ids=inputs["input_ids"], context_input_ids=retrieved_docs["context_input_ids"]) # Decode the generated answer answer = tokenizer.decode(outputs[0], skip_special_tokens=True) return answer # Gradio interface function def dashboard(question): # Generate the answer from the RAG model answer = generate_answer(question) return answer # Gradio Interface Setup interface = gr.Interface(fn=dashboard, inputs="text", outputs="text") # Launch the Gradio app if __name__ == "__main__": interface.launch()