# Import required libraries from dotenv import load_dotenv from openai import OpenAI import json import os import requests from pypdf import PdfReader import gradio as gr # Load environment variables from .env file (for API keys, etc.) load_dotenv(override=True) # Helper function to send push notifications using Pushover def push(text): requests.post( "https://api.pushover.net/1/messages.json", data={ "token": os.getenv("PUSHOVER_TOKEN"), "user": os.getenv("PUSHOVER_USER"), "message": text, } ) # Tool: Record user details (e.g., when a user provides their email) def record_user_details(email, name="Name not provided", notes="not provided"): push(f"Recording {name} with email {email} and notes {notes}") return {"recorded": "ok"} # Tool: Record any question that could not be answered def record_unknown_question(question): push(f"Recording {question}") return {"recorded": "ok"} # JSON schema for the record_user_details tool (for OpenAI function calling) record_user_details_json = { "name": "record_user_details", "description": "Use this tool to record that a user is interested in being in touch and provided an email address", "parameters": { "type": "object", "properties": { "email": { "type": "string", "description": "The email address of this user" }, "name": { "type": "string", "description": "The user's name, if they provided it" }, "notes": { "type": "string", "description": "Any additional information about the conversation that's worth recording to give context" } }, "required": ["email"], "additionalProperties": False } } # JSON schema for the record_unknown_question tool (for OpenAI function calling) record_unknown_question_json = { "name": "record_unknown_question", "description": "Always use this tool to record any question that couldn't be answered as you didn't know the answer", "parameters": { "type": "object", "properties": { "question": { "type": "string", "description": "The question that couldn't be answered" }, }, "required": ["question"], "additionalProperties": False } } # List of available tools for the OpenAI agent tools = [ {"type": "function", "function": record_user_details_json}, {"type": "function", "function": record_unknown_question_json} ] # Main class representing "Me" (the agent persona) class Me: def __init__(self): # Initialize OpenAI client and load profile information self.openai = OpenAI() self.name = "Harish" # Read LinkedIn profile from PDF reader = PdfReader("me/linkedin.pdf") self.linkedin = "" for page in reader.pages: text = page.extract_text() if text: self.linkedin += text # Read summary from text file with open("me/summary.txt", "r", encoding="utf-8") as f: self.summary = f.read() # Handle tool calls from the OpenAI agent (function calling) def handle_tool_call(self, tool_calls): results = [] for tool_call in tool_calls: tool_name = tool_call.function.name arguments = json.loads(tool_call.function.arguments) print(f"Tool called: {tool_name}", flush=True) tool = globals().get(tool_name) result = tool(**arguments) if tool else {} results.append({"role": "tool","content": json.dumps(result),"tool_call_id": tool_call.id}) return results # Compose the system prompt for the agent, including summary and LinkedIn profile def system_prompt(self): system_prompt = f"You are acting as {self.name}. You are answering questions on {self.name}'s website, \ particularly questions related to {self.name}'s career, background, skills and experience. \ Your responsibility is to represent {self.name} for interactions on the website as faithfully as possible. \ You are given a summary of {self.name}'s background and LinkedIn profile which you can use to answer questions. \ Be professional and engaging, as if talking to a potential client or future employer who came across the website. \ If you don't know the answer to any question, use your record_unknown_question tool to record the question that you couldn't answer, even if it's about something trivial or unrelated to career. \ If the user is engaging in discussion, try to steer them towards getting in touch via email; ask for their email and record it using your record_user_details tool. " system_prompt += f"\n\n## Summary:\n{self.summary}\n\n## LinkedIn Profile:\n{self.linkedin}\n\n" system_prompt += f"With this context, please chat with the user, always staying in character as {self.name}." return system_prompt # Main chat method: handles conversation, tool calls, and responses def chat(self, message, history): # Build message history for OpenAI API messages = [{"role": "system", "content": self.system_prompt()}] + history + [{"role": "user", "content": message}] done = False while not done: # Call OpenAI chat completion with tools response = self.openai.chat.completions.create(model="gpt-4o-mini", messages=messages, tools=tools) if response.choices[0].finish_reason=="tool_calls": # If a tool call is required, handle it and continue message = response.choices[0].message tool_calls = message.tool_calls results = self.handle_tool_call(tool_calls) messages.append(message) messages.extend(results) else: # Otherwise, return the agent's response done = True return response.choices[0].message.content if __name__ == "__main__": me = Me() gr.ChatInterface(me.chat, type="messages").launch()