import os import chromadb from llama_index.core import VectorStoreIndex from llama_index.core.tools import QueryEngineTool from llama_index.vector_stores.chroma import ChromaVectorStore from llama_index.embeddings.huggingface import HuggingFaceEmbedding from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI # from llama_index.llms.litellm import LiteLLM from llama_index.core.agent.workflow import ReActAgent def initialize_code_agent(): hf_token = os.environ.get('HF_TOKEN') deepseek_token = os.environ.get('DEEPSEEK_TOKEN') code_db = chromadb.PersistentClient(path="./code_db") code_chroma_collection = code_db.get_or_create_collection('code') code_vector_store = ChromaVectorStore(chroma_collection=code_chroma_collection) embedding_model = HuggingFaceEmbedding( model_name="BAAI/bge-small-en-v1.5", device="cpu", token=hf_token, ) index = VectorStoreIndex.from_vector_store(code_vector_store, embed_model=embedding_model) code_llm = HuggingFaceInferenceAPI( model_name="deepseek-ai/deepseek-coder-1.3b-instruct", api_key=deepseek_token, token=hf_token, ) code_query_engine = index.as_query_engine( llm=code_llm, similarity_top_k=3 ) code_query_engine_tool = QueryEngineTool.from_defaults( query_engine=code_query_engine, name="my_code_query_engine", description="Code Query engine for the agent", return_direct=False ) return ReActAgent( name="code_engine", description="Query engine for the agent", tools=[code_query_engine_tool], system_prompt="You are a calculator assistant. Use your tools for any math operation.", llm=code_llm )