File size: 2,667 Bytes
7992c94 5eac214 7992c94 5eac214 7992c94 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
class Host:
def __init__(self, client, show_state: dict):
self.model = "deepseek/deepseek-v3.2-exp"
self.client = client
self.turn_limit = 7
self.show_state = show_state
system_prompt = """You are the Host of a Talk Show called "The Emergent Show", where guests come and have a chat.
There is a TV Display near you which the crew members use to display images based on your conversation.
Do not talk about TV unless you see this: [TV Shows: <description>] which means that crew has displayed something on the TV.
Keep your responses short, like three or four sentences, and be witty."""
self.system_message = {"role": "system", "content": system_prompt}
self.messages = [self.system_message]
def get_response(self, user_message) -> tuple[str, bool]:
# Add intro to let host know someone joined
self.show_state["time_since_last_guest_message"] = 0
shouldWrapUp = False
if len(self.messages) == 1:
msg_prefix = f"<Production Instructions>\nA guest entered the show, now they will talk to you\n</Production Instructions>\n"
user_message = msg_prefix + user_message + "\n"
# Turn based limit for wrapping up the show
if len(self.messages) >= self.turn_limit * 2:
print("Adding wrapup prompt")
wrapup_prompt = (
"\n<Production Instructions>\nThe show has been going on for a while now. "
"Kindly wrap up the show in a polite manner.\n</Production Instructions>"
)
user_message = user_message + wrapup_prompt
shouldWrapUp = True
self.messages.append({"role": "user", "content": user_message})
response = self.client.responses.create(model=self.model, input=self.messages)
host_response = response.output_text
self.messages.append({"role": "assistant", "content": host_response})
if shouldWrapUp:
self.clear_context()
return host_response, True
return host_response, False
def clear_context(self):
self.messages = [self.system_message]
def guest_took_too_long_to_respond(self):
wrapup_prompt = (
"\n<Production Instructions>\nThe guest has taken too long to respond. "
"Kindly wrap up the show in a polite manner.\n</Production Instructions>"
)
self.messages.append({"role": "user", "content": wrapup_prompt})
response = self.client.responses.create(model=self.model, input=self.messages)
host_response = response.output_text
self.clear_context()
return host_response
|