Commit 8e54d0ef authored by JinyuanSun's avatar JinyuanSun
Browse files

update chatmol_pkg and chatmol-streamlit

parent 57307452
Loading
Loading
Loading
Loading
+18 −0
Original line number Diff line number Diff line
# chatmol-streamlit
Streamlit app for chatmol

## Installation
You will need PyMol and chatmol package installed:

```bash
conda install -c conda-forge pymol-open-source
pip install streanmlit==1.35.0
pip install openai anthropic
pip install chatmol
```

## Usage
Brefore running the app, make sure the Pymol is correctly installed.
```bash
streamlit run chatmol-streamlit.py
```
 No newline at end of file
+83 −0
Original line number Diff line number Diff line
import streamlit as st
import chatmol as cm

st.sidebar.title("ChatMol")
st.sidebar.markdown("Welcome to ChatMol! ChatMol is a tool that allows you to interact with PyMOL using natural language.")

openai_llms = ['gpt-4o', 'gpt-4-turbo', 'gpt-3.5-turbo']
claude_llms = ['claude-3-5-sonnet-20240620', 'claude-3-sonnet-20240229', 'claude-3-haiku-20240307', 'claude-3-opus-20240229']
chatmol_llms = ['chatlite']

introduction_of_models = {
    'gpt-4o': "GPT-4o (“o” for “omni”) is most advanced model of OpenAI. It has the same high intelligence as GPT-4 Turbo but is much more efficient—it generates text 2x faster and is 50% cheaper.",
    'gpt-4-turbo': "GPT-4 can solve difficult problems with greater accuracy than any of previous models of OpenAI, thanks to its broader general knowledge and advanced reasoning capabilities.",
    'gpt-3.5-turbo': "GPT-3.5 Turbo models can understand and generate natural language or code and have been optimized for chat.",
    'chatlite': "A model provided by ChatMol freely available to all, which is optimized for PyMOL commands generation but not good for general chat.",
    'claude-3-5-sonnet-20240620': "Most intelligent model of Anthropic, combining top-tier performance with improved speed. Currently the only model in the Claude 3.5 family.\n - Advanced research and analysis\n - Complex problem-solving\n - Sophisticated language understanding and generation\n - High-level strategic planning",
    'claude-3-sonnet-20240229': "Balances intelligence and speed for high-throughput tasks.\n - Data processing over vast amounts of knowledge\n - Sales forecasting and targeted marketing\n - Code generation and quality control",
    'claude-3-haiku-20240307': "Near-instant responsiveness that can mimic human interactions.\n - Live support chat\n - Translations\n - Content moderation\n - Extracting knowledge from unstructured data",
    'claude-3-opus-20240229': "Strong performance on highly complex tasks, such as math and coding.\n - Task automation across APIs and databases, and powerful coding tasks\n - R&D, brainstorming and hypothesis generation, and drug discovery\n - Strategy, advanced analysis of charts and graphs, financials and market trends, and forecasting"
}

if "ps" not in st.session_state:
    st.session_state["cm"] = cm
    if st.button("Start PyMOL"):
        st.session_state["ps"] = cm.start_pymol_gui()

if "available_llms" not in st.session_state:
    st.session_state["available_llms"] = []
    if st.session_state["cm"].defaul_client.client is not None:
        st.session_state["available_llms"].extend(openai_llms)
    st.session_state["available_llms"].extend(chatmol_llms)
    if st.session_state["cm"].defaul_client.client_anthropic is not None:
        st.session_state["available_llms"].extend(claude_llms)

if "llm" not in st.session_state:
    st.session_state["llm"] = ''
    
st.session_state["llm"] = st.sidebar.selectbox("Select LLM", st.session_state["available_llms"])

st.sidebar.write(introduction_of_models.get(st.session_state["llm"], "No introduction available"))

if st.session_state["llm"] in openai_llms+claude_llms:
    if st.sidebar.button("check api availability"):
        with st.spinner("Checking..."):
            results = st.session_state["cm"].defaul_client.test_api_access()
        for k, v in results.items():
            if v:
                st.sidebar.info(v)
            else:
                st.sidebar.info(f"{k.split('_')[0]} is available")

if "messages" not in st.session_state:
    st.session_state['messages'] = []

for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.write(message["content"])

if prompt := st.chat_input("What is up?"):
    st.session_state.messages.append({"role": "user", "content": prompt})
    with st.chat_message("user"):
        st.write(prompt)

    with st.spinner("Thinking..."):
        pymol_console = st.session_state["ps"].pymol_console
        if prompt.endswith("?"):
            if st.session_state["llm"] in openai_llms:
                response = st.session_state["cm"].chat_with_gpt(f"This is the log: \n\n{st.session_state['ps'].pymol_console}\n\n. This is my question: \n\n{prompt}")
            elif st.session_state["llm"] in claude_llms:
                response = st.session_state["cm"].chat_with_claude(f"This is the log: \n\n{st.session_state['ps'].pymol_console}\n\n. This is my question: \n\n{prompt}")
            elif st.session_state["llm"] in chatmol_llms:
                response = st.session_state["cm"].chatlite(f"This is the log: \n\n{st.session_state['ps'].pymol_console}\n\n. This is my question: \n\n{prompt}")
        else:
            if st.session_state["llm"] in openai_llms:
                response = st.session_state["ps"].chatgpt(f"This is the log: \n\n{st.session_state['ps'].pymol_console}\n\n. This is my instruction: \n\n{prompt}")
            elif st.session_state["llm"] in claude_llms:
                response = st.session_state["ps"].claude(f"This is the log: \n\n{st.session_state['ps'].pymol_console}\n\n. This is my instruction: \n\n{prompt}")
            elif st.session_state["llm"] in chatmol_llms:
                response = st.session_state["ps"].chatlite(f"This is the log: \n\n{st.session_state['ps'].pymol_console}\n\n. This is my instruction: \n\n{prompt}")
    
        st.session_state.messages.append({"role": "assistant", "content": response})
    with st.chat_message("assistant"):
        response = st.write(response)
 No newline at end of file
+71 −30
Original line number Diff line number Diff line
@@ -6,7 +6,7 @@ import anthropic

class ChatMol:
    def __init__(self,
                api_key=None, 
                openai_api_key=None, 
                verbose=False,
                gpt_model="gpt-3.5-turbo-1106",
                chatgpt_max_history=10,
@@ -17,22 +17,20 @@ class ChatMol:
                ):
        self.in_pymol = in_pymol
        if in_pymol:
            # self.API_KEY_FILE = os.path.expanduser('~')+"/.PyMOL/apikey.txt"
            # from pymol import cmd
            self.stashed_commands = []
        self.API_KEY_FILE = os.path.expanduser('~')+"/.cache/chatmol/apikey.txt"
        # self.client_anthropic = anthropic.Anthropic(api_key="my_api_key")
        # self.OPENAI_KEY_ENV = "OPENAI_API_KEY"
        # self.api_key = api_key or self.load_api_key()
        # self.client = OpenAI(api_key=self.api_key)
        self.API_KEY_FILE = os.path.expanduser('~')+"/.cache/chatmol/apikey.json"
        self.OPENAI_KEY_ENV = "OPENAI_API_KEY"
        self.client = None
        self.client_anthropic = None
        self.warnings = []
        self.init_clients()
        self.lite_conversation_history = ""
        self.chatgpt_conversation_history = []
        self.claude_conversation_messages = []
        self.chatgpt_sys_prompt = "You are an AI language model specialized in providing PyMOL command line code solutions. "
        "Generate clear and effective solutions in a continuous manner. When providing demos or examples, try to use 'fetch' if object name is not provided. "
        "Prefer academic style visulizations. Code within triple backticks, comment and code should not in the same line."
        "Keep the response short, accurate, and concise."
        self.chatgpt_sys_prompt = "You are an expert familiar with PyMOL and specialized in providing PyMOL command line code solutions accuratly, and concisely. "
        self.chatgpt_sys_prompt += "When providing demos or examples, try to use 'fetch' if object name is not provided. "
        self.chatgpt_sys_prompt += "Prefer academic style visulizations. Code within triple backticks, comment and code should not in the same line."

        self.chatgpt_max_history = chatgpt_max_history
        self.gpt_model = gpt_model
        self.claude_model = claude_model
@@ -40,40 +38,83 @@ class ChatMol:
        self.chatgpt_max_tokens = chatgpt_max_tokens
        self.verbose = False

    def set_api_key(self, api_key):
        api_key = api_key.strip()
    def set_api_key(self, name, api_key):
        current_api_keys = {}
        if os.path.exists(self.API_KEY_FILE):
            with open(self.API_KEY_FILE, "r") as api_key_file:
                current_api_keys = json.load(api_key_file)
        if name in ["openai", "anthropic"]:
            current_api_keys[name] = api_key
            os.makedirs(os.path.dirname(self.API_KEY_FILE), exist_ok=True)
        try:
            with open(self.API_KEY_FILE, "w") as api_key_file:
                api_key_file.write(api_key)
            print("API key set and saved to file successfully.")
        except Exception as e:
            print(f"API key set successfully but could not be saved to file due to: {e}. You may need to reset the API key next time.")
    
    def load_api_key(self):
        api_key = os.getenv(self.OPENAI_KEY_ENV)
        if api_key:
            print("API key loaded from environment variable.")
            return api_key
                json.dump(current_api_keys, api_key_file)
        else:
            Warning("API key name must be either 'openai' or 'anthropic'.")
            return None
            
    def load_api_key(self, name):
        try:
            with open(self.API_KEY_FILE, "r") as api_key_file:
                api_key = api_key_file.read().strip()
                print("API key loaded from file.")
                return api_key
                api_keys = json.load(api_key_file)
                if api_keys.get(name):
                    print(f"API key loaded from file for {name}.")
                    return api_keys[name]
                else:
                    print(f"API key not found in file for {name}.")
                    return None
        except FileNotFoundError:
            print("API key file not found. Please set your API key using 'set_api_key' method or by environment variable.")
            return None
        
    def test_api_access(self):
        test_result = {}
        if self.client_anthropic is not None:
            try:
                response = self.client_anthropic.messages.create(
                    model=self.claude_model,
                    system=self.chatgpt_sys_prompt,
                    max_tokens=self.chatgpt_max_tokens,
                    messages=[{"role": "user", "content": "Hello"}],
                    temperature=self.chatgpt_temp
                )
                print(f"Anthropic API access test successful: {response}")
                test_result["anthropic_failure"] = False
            except Exception as e:
                print(f"Anthropic API access test failed: {e}")
                test_result["anthropic_failure"] = f"Error: {e}"
        if self.client is not None:
            try:
                response = self.client.chat.completions.create(
                    model=self.gpt_model,
                    messages=[{"role": "user", "content": "Hello"}],
                    max_tokens=self.chatgpt_max_tokens,
                    temperature=self.chatgpt_temp
                )
                print(f"OpenAI API access test successful: {response}")
                test_result["openai_failure"] = False
            except Exception as e:
                print(f"OpenAI API access test failed: {e}")
                test_result["openai_failure"] = f"Error: {e}"
        return test_result


    def init_clients(self):
        if os.environ.get("ANTHROPIC_API_KEY"):
            self.client_anthropic = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))
        elif api_key := self.load_api_key("anthropic") != "":
            self.client_anthropic = anthropic.Anthropic(api_key=api_key)
        else:
            Warning("ANTHROPIC_API_KEY environment variable not found.")
            self.warnings.append("ANTHROPIC_API_KEY environment variable not found.")
            self.client_anthropic = None
        if os.environ.get("OPENAI_API_KEY"):
            self.client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
        elif api_key := self.load_api_key("openai") != "":
            self.client = OpenAI(api_key=api_key)
        else:
            Warning("OPENAI_API_KEY environment variable not found.")
            self.warnings.append("OPENAI_API_KEY environment variable not found.")
            self.client = None

    def query_qaserver(self, question):