Update app.py
Browse files
app.py
CHANGED
@@ -183,6 +183,17 @@ css = """
|
|
183 |
}
|
184 |
"""
|
185 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
# ------------------------------
|
187 |
# Gradio UI
|
188 |
# ------------------------------
|
@@ -235,45 +246,26 @@ with gr.Blocks(title="Qwen3 Chat", css=css) as demo:
|
|
235 |
<p>Qwen3 models developed by Alibaba Cloud. Interface powered by Gradio and ZeroGPU.</p>
|
236 |
</div>
|
237 |
""")
|
238 |
-
|
239 |
-
#
|
240 |
-
def get_model_name(full_selection):
|
241 |
-
return full_selection.split(" - ")[0]
|
242 |
-
|
243 |
clr.click(fn=lambda: ([], ""), outputs=[chat, txt])
|
244 |
cnl.click(fn=cancel_generation)
|
245 |
|
246 |
-
#
|
247 |
-
def submit_message(msg, history, prompt, model, tok, temp, k, p, rp):
|
248 |
-
return chat_response(
|
249 |
-
msg, history, prompt,
|
250 |
-
get_model_name(model), tok, temp, k, p, rp
|
251 |
-
), ""
|
252 |
-
|
253 |
txt.submit(
|
254 |
fn=submit_message,
|
255 |
-
inputs=[txt, chat, sys_prompt,
|
256 |
-
model_dd, max_tok, temp, k, p, rp],
|
257 |
outputs=[chat, txt],
|
258 |
show_progress=True
|
259 |
)
|
260 |
|
261 |
-
#
|
262 |
-
def send_message(msg, history, prompt, model, tok, temp, k, p, rp):
|
263 |
-
return chat_response(
|
264 |
-
msg, history, prompt,
|
265 |
-
get_model_name(model), tok, temp, k, p, rp
|
266 |
-
), ""
|
267 |
-
|
268 |
send_btn.click(
|
269 |
-
fn=
|
270 |
-
inputs=[txt, chat, sys_prompt,
|
271 |
-
model_dd, max_tok, temp, k, p, rp],
|
272 |
outputs=[chat, txt],
|
273 |
show_progress=True
|
274 |
)
|
275 |
-
show_progress=True
|
276 |
-
)
|
277 |
|
278 |
if __name__ == "__main__":
|
279 |
demo.launch()
|
|
|
183 |
}
|
184 |
"""
|
185 |
|
186 |
+
# Function to get just the model name from the dropdown selection
|
187 |
+
def get_model_name(full_selection):
|
188 |
+
return full_selection.split(" - ")[0]
|
189 |
+
|
190 |
+
# Function to handle message submission
|
191 |
+
def submit_message(msg, history, prompt, model, tok, temp, k, p, rp):
|
192 |
+
return chat_response(
|
193 |
+
msg, history, prompt,
|
194 |
+
get_model_name(model), tok, temp, k, p, rp
|
195 |
+
), ""
|
196 |
+
|
197 |
# ------------------------------
|
198 |
# Gradio UI
|
199 |
# ------------------------------
|
|
|
246 |
<p>Qwen3 models developed by Alibaba Cloud. Interface powered by Gradio and ZeroGPU.</p>
|
247 |
</div>
|
248 |
""")
|
249 |
+
|
250 |
+
# Event handlers
|
|
|
|
|
|
|
251 |
clr.click(fn=lambda: ([], ""), outputs=[chat, txt])
|
252 |
cnl.click(fn=cancel_generation)
|
253 |
|
254 |
+
# Handle submission from Enter key
|
|
|
|
|
|
|
|
|
|
|
|
|
255 |
txt.submit(
|
256 |
fn=submit_message,
|
257 |
+
inputs=[txt, chat, sys_prompt, model_dd, max_tok, temp, k, p, rp],
|
|
|
258 |
outputs=[chat, txt],
|
259 |
show_progress=True
|
260 |
)
|
261 |
|
262 |
+
# Handle submission from Send button
|
|
|
|
|
|
|
|
|
|
|
|
|
263 |
send_btn.click(
|
264 |
+
fn=submit_message,
|
265 |
+
inputs=[txt, chat, sys_prompt, model_dd, max_tok, temp, k, p, rp],
|
|
|
266 |
outputs=[chat, txt],
|
267 |
show_progress=True
|
268 |
)
|
|
|
|
|
269 |
|
270 |
if __name__ == "__main__":
|
271 |
demo.launch()
|