from openai import OpenAI
from pinecone import Pinecone
from honeyhive.tracer import HoneyHiveTracer
from honeyhive.tracer.custom import trace
# Initialize HoneyHive Tracer
HoneyHiveTracer.init(
    api_key="MY_HONEYHIVE_API_KEY",
    project="MY_HONEYHIVE_PROJECT_NAME",
    session_name="pinecone-docs"
)
# Initialize clients
openai_client = OpenAI()
pc = Pinecone(api_key="MY_PINECONE_API_KEY")
index = pc.Index("MY_PINECONE_INDEX_NAME")
def embed_query(query):
    res = openai_client.embeddings.create(
        model="text-embedding-ada-002",
        input=query
    )
    query_vector = res.data[0].embedding
    return query_vector
documents = [
    "Jack is a software engineer.",
    "Jill is a nurse.",
    "Jane is a teacher.",
    "John is a doctor.",
]
index.upsert(vectors=[
    {
        "id": "A", "values": embed_query(documents[0]), "metadata": {"_node_content": documents[0]}
    },
    {
        "id": "B", "values": embed_query(documents[1]), "metadata": {"_node_content": documents[1]}
    }
])
@trace(
    config={
        "embedding_model": "text-embedding-ada-002",
        "top_k": 3
    }
)
def get_relevant_documents(query):
    query_vector = embed_query(query)
    res = index.query(vector=query_vector, top_k=3, include_metadata=True)
    print(res)
    return [item['metadata']['_node_content'] for item in res['matches']]
@trace(
    config={
        "model": "gpt-4o",
        "prompt": "You are a helpful assistant" 
    },
    metadata={
        "version": 1
    }
)
def generate_response(context, query):
    prompt = f"Context: {context}\n\nQuestion: {query}\n\nAnswer:"
    response = openai_client.chat.completions.create(
        model="gpt-4o",
        messages=[
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": prompt}
        ]
    )
    return response.choices[0].message.content
@trace()
def rag_pipeline(query):
    docs = get_relevant_documents(query)
    response = generate_response("\n".join(docs), query)
    return response
def main():
    query = "What does Jack do?"
    response = rag_pipeline(query)
    print(f"Query: {query}")
    print(f"Response: {response}")
if __name__ == "__main__":
    main()