Spaces:
Build error
Build error
| import os | |
| from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, ServiceContext | |
| from llama_index.core.llms import OpenAI | |
| from llama_index.core.tools import QueryEngineTool, ToolMetadata | |
| # Build Service Context with preferred model | |
| llm = OpenAI(model="gpt-3.5-turbo", temperature=0) | |
| service_context = ServiceContext.from_defaults(llm=llm) | |
| # Load and index documents | |
| documents = SimpleDirectoryReader("kb").load_data() | |
| index = VectorStoreIndex.from_documents(documents, service_context=service_context) | |
| query_engine = index.as_query_engine() | |
| # Tool wrapper to integrate with a multi-tool agent if needed | |
| rag_tool = QueryEngineTool( | |
| query_engine=query_engine, | |
| metadata=ToolMetadata(name="RAGSearch", description="Answers questions using a local knowledge base") | |
| ) | |
| # Agent class (Hugging Face-compatible) | |
| class BasicAgent: | |
| def __init__(self): | |
| self.tool = rag_tool | |
| def __call__(self, question: str) -> str: | |
| print(f"🧠 RAG Agent received: {question}") | |
| response = self.tool.query_engine.query(question) | |
| return str(response) |