Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import os
- from langchain.chains.conversation.memory import ConversationBufferMemory
- from langchain import OpenAI, LLMChain, PromptTemplate
- from langchain_community.llms import OpenAI
- template = """
- {chat_history}
- Human: {question}
- AI:
- """
- prompt_template = PromptTemplate(input_variables=["chat_history","question"], template=template)
- memory2 = ConversationBufferMemory(memory_key="chat_history", input_key="question")
- llm_chain = LLMChain(
- llm=OpenAI(temperature=0.9),
- prompt=prompt_template,
- verbose=True,
- memory=memory2,
- )
- while True:
- question = input("Ask me question: ")
- result = llm_chain.predict(question=question)
- print(result)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement