Spaces:
Paused
Paused
Commit
Β·
c8f429c
1
Parent(s):
149c993
Create main.py
Browse files
main.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
import gradio as gr
|
3 |
+
|
4 |
+
from llm import *
|
5 |
+
from utils import *
|
6 |
+
from presets import *
|
7 |
+
from overwrites import *
|
8 |
+
|
9 |
+
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
|
10 |
+
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
|
11 |
+
|
12 |
+
PromptHelper.compact_text_chunks = compact_text_chunks
|
13 |
+
|
14 |
+
with gr.Blocks(css="") as demo:
|
15 |
+
with gr.Box():
|
16 |
+
gr.Markdown("<h1 style='font-size: 48px; text-align: center;'>π LLaMa Difu π</h1>")
|
17 |
+
gr.Markdown("<h3 style='text-align: center;'>LLaMa Do It For U π¦</h3>")
|
18 |
+
|
19 |
+
chat_context = gr.State([])
|
20 |
+
new_google_chat_context = gr.State([])
|
21 |
+
|
22 |
+
with gr.Row():
|
23 |
+
with gr.Column(scale=3):
|
24 |
+
with gr.Box():
|
25 |
+
gr.Markdown("**Indicies**")
|
26 |
+
with gr.Row():
|
27 |
+
with gr.Column(scale=12):
|
28 |
+
index_select = gr.Dropdown(choices=refresh_json_list(plain=True), value="index_select", show_label=False, multiselect=False).style(container=False)
|
29 |
+
with gr.Column(min_width=30, scale=1):
|
30 |
+
index_refresh_btn = gr.Button("π").style()
|
31 |
+
|
32 |
+
|
33 |
+
with gr.Tab("Search"):
|
34 |
+
with gr.Row():
|
35 |
+
with gr.Column(scale=1):
|
36 |
+
chat_tone = gr.Radio(["smart", "concise", "creative"], label="chat_tone", type="index", value="concise")
|
37 |
+
with gr.Column(scale=3):
|
38 |
+
search_options_checkbox = gr.CheckboxGroup(label="APIs", choices=["π Google", "Manual"])
|
39 |
+
chatbot = gr.Chatbot()
|
40 |
+
with gr.Row():
|
41 |
+
with gr.Column(min_width=50, scale=1):
|
42 |
+
chat_empty_btn = gr.Button("π§Ή", variant="secondary")
|
43 |
+
with gr.Column(scale=12):
|
44 |
+
chat_input = gr.Textbox(show_label=False, placeholder="Enter text...").style(container=False)
|
45 |
+
with gr.Column(min_width=50, scale=1):
|
46 |
+
chat_submit_btn = gr.Button("π", variant="primary")
|
47 |
+
|
48 |
+
|
49 |
+
with gr.Tab("Setting"):
|
50 |
+
with gr.Row():
|
51 |
+
sim_k = gr.Slider(1, 10, 3, step=1, label="similarity_topk", interactive=True, show_label=True)
|
52 |
+
tempurature = gr.Slider(0, 2, 0.5, step=0.1, label="tempurature", interactive=True, show_label=True)
|
53 |
+
with gr.Row():
|
54 |
+
with gr.Column():
|
55 |
+
tmpl_select = gr.Radio(list(prompt_tmpl_dict.keys()), value="Default", label="Prompt", interactive=True)
|
56 |
+
prompt_tmpl = gr.Textbox(value=prompt_tmpl_dict["Default"] ,lines=10, max_lines=40 ,show_label=False)
|
57 |
+
with gr.Column():
|
58 |
+
refine_select = gr.Radio(list(refine_tmpl_dict.keys()), value="Default", label="Refine", interactive=True)
|
59 |
+
refine_tmpl = gr.Textbox(value=refine_tmpl_dict["Default"] ,lines=10, max_lines=40 ,show_label=False)
|
60 |
+
|
61 |
+
|
62 |
+
with gr.Tab("Upload"):
|
63 |
+
with gr.Row():
|
64 |
+
with gr.Column():
|
65 |
+
index_type = gr.Dropdown(choices=["GPTListIndex", "GPTVectorStoreIndex"], label="index_type", value="GPTVectorStoreIndex")
|
66 |
+
upload_file = gr.Files(label="upload_file .txt, .pdf, .epub)")
|
67 |
+
new_index_name = gr.Textbox(placeholder="new_index_name: ", show_label=False).style(container=False)
|
68 |
+
construct_btn = gr.Button("βοΈ Index", variant="primary")
|
69 |
+
with gr.Row():
|
70 |
+
with gr.Column():
|
71 |
+
with gr.Row():
|
72 |
+
max_input_size = gr.Slider(256, 4096, 4096, step=1, label="max_input_size", interactive=True, show_label=True)
|
73 |
+
num_outputs = gr.Slider(256, 4096, 512, step=1, label="num_outputs", interactive=True, show_label=True)
|
74 |
+
with gr.Row():
|
75 |
+
max_chunk_overlap = gr.Slider(0, 100, 20, step=1, label="max_chunk_overlap", interactive=True, show_label=True)
|
76 |
+
chunk_size_limit = gr.Slider(0, 4096, 0, step=1, label="chunk_size_limit", interactive=True, show_label=True)
|
77 |
+
with gr.Row():
|
78 |
+
embedding_limit = gr.Slider(0, 100, 0, step=1, label="embedding_limit", interactive=True, show_label=True)
|
79 |
+
separator = gr.Textbox(show_label=False, label="separator", placeholder=",", value="", interactive=True)
|
80 |
+
with gr.Row():
|
81 |
+
num_children = gr.Slider(2, 100, 10, step=1, label="num_children", interactive=False, show_label=True)
|
82 |
+
max_keywords_per_chunk = gr.Slider(1, 100, 10, step=1, label="max_keywords_per_chunk", interactive=False, show_label=True)
|
83 |
+
|
84 |
+
|
85 |
+
index_refresh_btn.click(refresh_json_list, None, [index_select])
|
86 |
+
|
87 |
+
chat_input.submit(chat_ai, [index_select, chat_input, prompt_tmpl, refine_tmpl, sim_k, chat_tone, chat_context, chatbot, search_options_checkbox], [chat_context, chatbot])
|
88 |
+
chat_input.submit(reset_textbox, [], [chat_input])
|
89 |
+
chat_submit_btn.click(chat_ai, [index_select, chat_input, prompt_tmpl, refine_tmpl, sim_k, chat_tone, chat_context, chatbot, search_options_checkbox], [chat_context, chatbot])
|
90 |
+
chat_submit_btn.click(reset_textbox, [], [chat_input])
|
91 |
+
chat_empty_btn.click(lambda: ([], []), None, [chat_context, chatbot])
|
92 |
+
|
93 |
+
tmpl_select.change(change_prompt_tmpl, [tmpl_select], [prompt_tmpl])
|
94 |
+
refine_select.change(change_refine_tmpl, [refine_select], [refine_tmpl])
|
95 |
+
|
96 |
+
index_type.change(lock_params, [index_type], [num_children, max_keywords_per_chunk])
|
97 |
+
construct_btn.click(construct_index, [upload_file, new_index_name, index_type, max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit, embedding_limit, separator, num_children], [index_select])
|
98 |
+
|
99 |
+
|
100 |
+
if __name__ == "__main__":
|
101 |
+
demo.title = "LLaMa Do It For U"
|
102 |
+
demo.queue().launch()
|