from langchain import PromptTemplate, LLMChain from langchain.chat_models import ChatOpenAI import chainlit as cl import os # os.environ["OPENAI_API_KEY"] = '70846324f01c4e5cb3cc11da28a1e091' # os.environ["OPENAI_API_KEY"] = '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' # os.environ["OPENAI_API_KEY"] = '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' # os.environ["OPENAI_API_KEY"] = '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' os.environ["OPENAI_API_KEY"] = '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' template = """Question: {question} Answer: Let's think step by step.""" @cl.langchain_factory(use_async=False) def factory(): prompt = PromptTemplate(template=template, input_variables=["question"]) llm_chain = LLMChain(prompt=prompt, llm=ChatOpenAI(temperature=0, streaming=True)) return llm_chain