File size: 1,721 Bytes
3bc7f47 a8b730c f8d30bb 3b4a171 3bc7f47 3b4a171 3bc7f47 a8b730c 3bc7f47 3b4a171 c440694 f8d30bb c440694 5f3bb70 c440694 9bc6407 c440694 3bc7f47 490ea32 c440694 6e231e4 3b4a171 3bc7f47 c440694 3bc7f47 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
import gradio as gr
import openai
import os
from openai import OpenAI
client = OpenAI()
# Load your OpenAI API key from the environment variable
openai.api_key = os.getenv("OPENAI_API_KEY")
# Read the static CV file
def load_cv():
with open("templated_CV.txt", 'r') as file:
return file.read()
# Extract information from the CV
cv_text = load_cv()
# Initialize a history list to keep track of the conversation
history = []
def chat_with_ai(user_input):
global history
# Append user message to history
history.append({"role": "user", "content": user_input})
# Limit history to the last 20 messages
if len(history) > 20:
history = history[-20:]
# Prepare the messages for the API call, including the CV text
messages = [
{"role": "system", "content": f"Assume you are Karthik Raja, and this the contents of your coverletter: {cv_text}. Assuming you are KarthikRaja, answer the questions accordingly. PLease make sure to not add more info other than that provided in the cover letter."},
] + history
# Make the API call
completion = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=messages
)
assistant_message = completion.choices[0].message
# Append assistant message to history
history.append(assistant_message)
return assistant_message.content
def main(user_input):
response = chat_with_ai(user_input)
return response
iface = gr.Interface(
fn=main,
inputs=gr.Textbox(label="Ask a question, that you would like to ask Karthik"),
outputs="text",
title="AI Clone",
description="Interact with an AI clone for recruiting or for fun :)"
)
iface.launch()
|