LangChain is a framework designed for developing applications powered by language models. It is not just about calling language models via API; it also provides data awareness, agent properties, and a range of module support, enabling developers to build more powerful and flexible applications.
https://github.com/langchain-ai/streamlit-agent
This article contains various reference implementations of LangChain agents, presented in the form of Streamlit applications.
Basic Example
https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/minimal_agent.py
-
Imports the relevant modules from the LangChain framework and the Streamlit library. -
Creates an OpenAI language model object (LLM) with the temperature parameter set to 0 and enables streaming. -
In the Streamlit application, when the user inputs text, it retrieves the user’s input via st.chat_input().
from langchain.llms import OpenAI
from langchain.agents import AgentType, initialize_agent, load_tools
from langchain.callbacks import StreamlitCallbackHandler
import streamlit as st
llm = OpenAI(temperature=0, streaming=True)
tools = load_tools(["ddg-search"])
agent = initialize_agent(
tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
)
if prompt := st.chat_input():
st.chat_message("user").write(prompt)
with st.chat_message("assistant"):
st_callback = StreamlitCallbackHandler(st.container())
response = agent.run(prompt, callbacks=[st_callback])
st.write(response)
Interacting with DataFrame
https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/chat_pandas_df.py
-
Uploads documents and reads the contents of the files. -
Creates an OpenAI LLM. -
Creates an agent to interact with the DataFrame.
uploaded_file = st.file_uploader(
"Upload a Data file",
type=list(file_formats.keys()),
help="Various File formats are Supported",
on_change=clear_submit,
)
if uploaded_file:
df = load_data(uploaded_file)
openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password")
if "messages" not in st.session_state or st.sidebar.button("Clear conversation history"):
st.session_state["messages"] = [{"role": "assistant", "content": "How can I help you?"}]
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
if prompt := st.chat_input(placeholder="What is this data about?"):
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
if not openai_api_key:
st.info("Please add your OpenAI API key to continue.")
st.stop()
llm = ChatOpenAI(
temperature=0, model="gpt-3.5-turbo-0613", openai_api_key=openai_api_key, streaming=True
)
pandas_df_agent = create_pandas_dataframe_agent(
llm,
df,
verbose=True,
agent_type=AgentType.OPENAI_FUNCTIONS,
handle_parsing_errors=True,
)
with st.chat_message("assistant"):
st_cb = StreamlitCallbackHandler(st.container(), expand_new_thoughts=False)
response = pandas_df_agent.run(st.session_state.messages, callbacks=[st_cb])
st.session_state.messages.append({"role": "assistant", "content": response})
st.write(response)
Interacting with Documents
https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/chat_with_documents.py
-
Uploads documents and reads the contents of the files. -
Creates an OpenAI LLM. -
Creates an agent to interact with documents.
uploaded_files = st.sidebar.file_uploader(
label="Upload PDF files", type=["pdf"], accept_multiple_files=True
)
retriever = configure_retriever(uploaded_files)
# Setup memory for contextual conversation
msgs = StreamlitChatMessageHistory()
memory = ConversationBufferMemory(memory_key="chat_history", chat_memory=msgs, return_messages=True)
# Setup LLM and QA chain
llm = ChatOpenAI(
model_name="gpt-3.5-turbo", openai_api_key=openai_api_key, temperature=0, streaming=True
)
qa_chain = ConversationalRetrievalChain.from_llm(
llm, retriever=retriever, memory=memory, verbose=True
)
if len(msgs.messages) == 0 or st.sidebar.button("Clear message history"):
msgs.clear()
msgs.add_ai_message("How can I help you?")
avatars = {"human": "user", "ai": "assistant"}
for msg in msgs.messages:
st.chat_message(avatars[msg.type]).write(msg.content)
if user_query := st.chat_input(placeholder="Ask me anything!"):
st.chat_message("user").write(user_query)
with st.chat_message("assistant"):
retrieval_handler = PrintRetrievalHandler(st.container())
stream_handler = StreamHandler(st.empty())
response = qa_chain.run(user_query, callbacks=[retrieval_handler, stream_handler])
Interacting with Databases
https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/chat_with_sql_db.py
-
Uploads documents and reads the contents of the files. -
Creates an OpenAI LLM. -
Creates an agent to interact with databases.
# Setup agent
llm = OpenAI(openai_api_key=openai_api_key, temperature=0, streaming=True)
@st.cache_resource(ttl="2h")
def configure_db(db_uri):
if db_uri == LOCALDB:
# Make the DB connection read-only to reduce risk of injection attacks
# See: https://python.langchain.com/docs/security
db_filepath = (Path(__file__).parent / "Chinook.db").absolute()
creator = lambda: sqlite3.connect(f"file:{db_filepath}?mode=ro", uri=True)
return SQLDatabase(create_engine("sqlite:///") , creator=creator)
return SQLDatabase.from_uri(database_uri=db_uri)
db = configure_db(db_uri)
toolkit = SQLDatabaseToolkit(db=db, llm=llm)
agent = create_sql_agent(
llm=llm,
toolkit=toolkit,
verbose=True,
agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
)
if "messages" not in st.session_state or st.sidebar.button("Clear message history"):
st.session_state["messages"] = [{"role": "assistant", "content": "How can I help you?"}]
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
user_query = st.chat_input(placeholder="Ask me anything!")
if user_query:
st.session_state.messages.append({"role": "user", "content": user_query})
st.chat_message("user").write(user_query)
with st.chat_message("assistant"):
st_cb = StreamlitCallbackHandler(st.container())
response = agent.run(user_query, callbacks=[st_cb])
st.session_state.messages.append({"role": "assistant", "content": response})
st.write(response)