Created
August 8, 2024 06:38
-
-
Save justinhennessy/eacc33b19f5cddb7a3d115a2753a20bf to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| from langchain_openai import ChatOpenAI | |
| from langchain_core.prompts import ChatPromptTemplate | |
| from langchain_core.runnables import RunnablePassthrough | |
| from langchain.memory import ConversationBufferWindowMemory | |
| # Initialize the LLM | |
| llm = ChatOpenAI(api_key="xxx") | |
| # Define a prompt template | |
| prompt = ChatPromptTemplate.from_messages([ | |
| ("system", "You are a helpful assistant. Here's the recent conversation history:\n{buffer}"), | |
| ("human", "{input}") | |
| ]) | |
| # Create the chain | |
| chain = prompt | llm | |
| # Create a simple message history | |
| conversation_memory = ConversationBufferWindowMemory(k=2) | |
| # Create a simple chain that doesn't manage history | |
| conversation_chain = RunnablePassthrough() | chain | |
| # Example usage | |
| while True: | |
| user_input = input(f"\n\nUser: ") | |
| if user_input.lower() == 'exit': | |
| break | |
| result = conversation_chain.invoke({ | |
| "input": user_input, | |
| "buffer": conversation_memory.buffer | |
| }) | |
| print(f"AI:", result.content) | |
| # Manually update the memory | |
| conversation_memory.save_context({"input": user_input}, {"output": result.content}) | |
| print(f"\n\n******\n{conversation_memory.chat_memory}\n\n{conversation_memory.buffer}\n\n******") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment