Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from llama_index import SimpleDirectoryReader, StorageContext, load_index_from_storage, GPTVectorStoreIndex, LLMPredictor, PromptHelper
- from langchain.chat_models import ChatOpenAI
- import gradio as gr
- import os
- import tkinter as tk
- import openai
- from tkinter import filedialog
- os.environ["OPENAI_API_KEY"] = '<YOUR-API-KEY>'
- # sometimes, we need to explicitly set this as well
- openai.api_key = '<YOUR-API-KEY>'
- def construct_index(directory_path):
- max_input_size = 4096
- num_outputs = 512
- max_chunk_overlap = 0.2
- chunk_size_limit = 600
- prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
- llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-4-32k", max_tokens=num_outputs))
- documents = SimpleDirectoryReader(directory_path).load_data()
- index = GPTVectorStoreIndex(documents,
- llm_predictor=llm_predictor,
- prompt_helper=prompt_helper,
- show_progress=True,
- store_nodes_override=True)
- index.storage_context.persist(persist_dir=storage_folder)
- return index
- def chatbot(input_text):
- response = qry.query(input_text)
- return response.response
- iface = gr.Interface(fn=chatbot,
- inputs=gr.components.Textbox(lines=7, label="Hello, how my I help you today?"),
- outputs="text",
- title="Fiverr Demo Chatbot")
- root = tk.Tk()
- root.withdraw() # Hide the main window
- folder_selected = filedialog.askdirectory()
- if (folder_selected == ""):
- exit()
- storage_folder = os.path.join(folder_selected, "storage")
- index = construct_index(folder_selected)
- index = load_index_from_storage(StorageContext.from_defaults(persist_dir=storage_folder))
- qry = index.as_query_engine()
- iface.launch()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement