Unverified Commit 5c7c4435 authored by jinyuan sun's avatar jinyuan sun Committed by GitHub
Browse files

Merge pull request #38 from buerbaumer/upgrade

Upgrade the prompt and fix two typos
parents deece138 4f3d7436
Loading
Loading
Loading
Loading
+5 −5
Original line number Original line Diff line number Diff line
@@ -74,7 +74,7 @@ stashed_commands = []
# Save API Key in ~/.PyMOL/apikey.txt
# Save API Key in ~/.PyMOL/apikey.txt
API_KEY_FILE = os.path.expanduser('~')+"/.PyMOL/apikey.txt"
API_KEY_FILE = os.path.expanduser('~')+"/.PyMOL/apikey.txt"
OPENAI_KEY_ENV = "OPENAI_API_KEY"
OPENAI_KEY_ENV = "OPENAI_API_KEY"
GPT_MODEL = "gpt-3.5-turbo-1106"
GPT_MODEL = "gpt-3.5-turbo"
client = None
client = None


def set_api_key(api_key):
def set_api_key(api_key):
@@ -107,7 +107,7 @@ def load_api_key():
        print("API key loaded from environment variable.")
        print("API key loaded from environment variable.")
    return client
    return client
    
    
def update_model(mdoel_name):
def update_model(model_name):
    global GPT_MODEL
    global GPT_MODEL
    GPT_MODEL = mdoel_name
    GPT_MODEL = mdoel_name
    print("Model updated to: ", GPT_MODEL)
    print("Model updated to: ", GPT_MODEL)
@@ -120,7 +120,7 @@ def chat_with_gpt(message, max_history=10):


    try:
    try:
        messages = [
        messages = [
            {"role": "system", "content": "You are an AI language model specialized in providing command line code solutions related to PyMOL. Generate clear and effective solutions in a continuous manner. When providing demos or examples, try to use 'fetch' if object name is not provided. Prefer academic style visulizations. Code within triple backticks, comment and code should not in the same line."}
            {"role": "system", "content": "You are an AI language model specialized in providing command line code solutions related to PyMOL. Generate clear and effective solutions in a continuous manner. You think step-by-step before you conclude correctly. When providing demos or examples, try to use 'fetch' if object name is not provided. Prefer academic style visulizations. Code within triple backticks, comment and code should not in the same line."}
        ]
        ]


        # Keep only the max_history latest exchanges to avoid making the conversation too long
        # Keep only the max_history latest exchanges to avoid making the conversation too long
+2 −1
Original line number Original line Diff line number Diff line
@@ -28,6 +28,7 @@ class ChatMol:
        self.chatgpt_conversation_history = []
        self.chatgpt_conversation_history = []
        self.claude_conversation_messages = []
        self.claude_conversation_messages = []
        self.chatgpt_sys_prompt = "You are an expert familiar with PyMOL and specialized in providing PyMOL command line code solutions accuratly, and concisely. "
        self.chatgpt_sys_prompt = "You are an expert familiar with PyMOL and specialized in providing PyMOL command line code solutions accuratly, and concisely. "
        self.chatgpt_sys_prompt += "You think step-by-step before you conclude correctly. "
        self.chatgpt_sys_prompt += "When providing demos or examples, try to use 'fetch' if object name is not provided. "
        self.chatgpt_sys_prompt += "When providing demos or examples, try to use 'fetch' if object name is not provided. "
        self.chatgpt_sys_prompt += "Prefer academic style visulizations. Code within triple backticks, comment and code should not in the same line."
        self.chatgpt_sys_prompt += "Prefer academic style visulizations. Code within triple backticks, comment and code should not in the same line."


+1 −1
Original line number Original line Diff line number Diff line
@@ -231,7 +231,7 @@ def func_schema_gen(registry):
            if (p_desc.find('Optional') == -1 and p_desc.find('optional') == -1):
            if (p_desc.find('Optional') == -1 and p_desc.find('optional') == -1):
                required.append(param_name)
                required.append(param_name)
            props[param_name] = {'type':"string","description":param_desc[param_name]}
            props[param_name] = {'type':"string","description":param_desc[param_name]}
        params['reguired'] = required
        params['required'] = required
        func_schema['parameters'] = params
        func_schema['parameters'] = params
        func_sche_dict[r['service_name']] = func_schema
        func_sche_dict[r['service_name']] = func_schema
        pprint.pprint(func_schema)
        pprint.pprint(func_schema)
+1 −1
Original line number Original line Diff line number Diff line
@@ -210,7 +210,7 @@ if "messages" not in st.session_state or st.session_state.messages == []:
    st.session_state.messages = [
    st.session_state.messages = [
        {
        {
            "role": "system",
            "role": "system",
            "content": f"You are ChatMol copilot, a helpful copilot in molecule analysis with tools. Use tools only when you need them. Answer to questions related molecular modelling. When providing file path for downloading, use the realpath of the file without modification, it should be looks like: [link name](http://localhost:3333/work_dr/filename.suffix), the current work_dir is {work_dir}",
            "content": f"You are ChatMol copilot, a helpful copilot in molecule analysis with tools. You think step-by-step before you conclude correctly. Use tools only when you need them. Answer to questions related molecular modelling. When providing file path for downloading, use the realpath of the file without modification, it should be looks like: [link name](http://localhost:3333/work_dr/filename.suffix), the current work_dir is {work_dir}",
        }
        }
    ]
    ]
    if st.session_state.openai_model.startswith("glm"):
    if st.session_state.openai_model.startswith("glm"):