File size: 3,752 Bytes
c066489 bddc184 13b2053 9e98957 3b0d04c 9e98957 c066489 afb4c93 fddab77 3b0d04c 174a913 3b0d04c afb4c93 f2aae0d afb4c93 c066489 afb4c93 c53a1c5 bddc184 c53a1c5 9e98957 c066489 9e98957 c066489 87521b4 afb4c93 320328c 3b0d04c afb4c93 320328c 3b0d04c afb4c93 772ca38 c066489 9e98957 c066489 9e98957 772ca38 9e98957 c066489 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
import openai
import gradio as gr
import os
import logging
import json
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
openai.api_key = os.environ['key']
initMsg = os.environ['init']
roleAns = os.environ['roleAns']
classAsk = os.environ['classAsk']
classAns = os.environ['classAns']
defaultMsg = os.environ['defaultMsg']
mod = 3;
def localVar(lv):
lv = lv or {"race":"", "cls":""}
return lv
def trimMessages(messages):
# Assuming messages is an array of objects with "role" and "content" properties
content = {}
for message in messages:
if message["role"] == "assistant":
if "Current Status:" in message["content"]:
content = message["content"]
start_index = message["content"].index("Current Status:") + len("Current Status:")
end_index = message["content"].index("Wielding:")
new_content = message["content"][:start_index] + message["content"][end_index:]
message["content"] = new_content
if content != {}:
messages.append({"role": "assistant", "content": content})
return messages
def getChooseRole(msg):
start_index = msg.index("As a") + len("As a")
if start_index >= 0:
end_index = msg.index(",")
str = trim(msg[start_index:] + msg[:end_index])
if str.lower() == "dm":
return ""
else:
return str
else:
return ""
def getChooseClass(msg):
full = getChooseRole(msg)
if full != "":
start_index = full.index(" ") + len(" ")
end_index = msg.index(",")
return trim(full[start_index:] + full[:end_index])
else:
return ""
def chatbot(input, messages):
messages = messages or [{"role": "system", "content": initMsg}]
#if len(messages) == 1:
#memory = trimMessages(messages)
#logging.info("put memory:"+printMessages(memory))
#chat = openai.ChatCompletion.create(
# model="gpt-3.5-turbo", messages=memory,
# max_tokens=1048,n=1,temperature=0.5,
#)
#logging.info("put memory return:"+chat.choices[0].message.content)
if input:
messages.append({"role": "user", "content": input})
chat = openai.ChatCompletion.create(
model="gpt-3.5-turbo", messages=messages[-2:],
max_tokens=2048,n=1,temperature=0.5,
)
reply = chat.choices[0].message.content
logging.info("AI Reply:"+reply)
ans = ""
if len(messages) == 2:
lv.race = getChooseRole(reply)
lv.cls = ""
lv = localVar(lv)
ans = "I choose "+ lv.race +" as my race." + classAsk
logging.info("ChooseRole:"+ans)
if len(messages) == 4:
lv = localVar(lv)
lv.cls = getChooseClass(reply)
lv = localVar(lv)
ans = "My Race is "+ lv.race +",I choose "+ lv.cls +" as my class." + classAns
logging.info("ChooseClass:"+ans)
messages.append({"role": "assistant", "content": ans})
return reply, printMessages(messages), messages
def printMessages(messages):
delimiter = '\n'
msg_string = delimiter.join([f"{obj['role']}:{obj['content']}" for obj in messages])
logging.info("messages:"+msg_string)
return msg_string
app = gr.Interface(fn=chatbot, inputs=[gr.Textbox(lines=7, label="You ask and answer questions below"), "state"],
outputs=[gr.Textbox(label="DND Game Reply", placeholder=roleAns), gr.Textbox(label="History"), "state"], title="DND Game",#, gr.Textbox(label="History"),
description="DND Game",theme="compact")
app.launch(share=False) |