Spaces:
Sleeping
Sleeping
Update app.py (#1)
Browse files- Update app.py (891e9151abb3a9b549567218a34a7f87cbdf90ad)
Co-authored-by: Ritesh Mehrotra <ritzy88@users.noreply.huggingface.co>
app.py
CHANGED
@@ -4,8 +4,11 @@ from huggingface_hub import InferenceClient
|
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
-
client = InferenceClient("
|
8 |
-
|
|
|
|
|
|
|
9 |
|
10 |
def respond(
|
11 |
message,
|
@@ -34,19 +37,33 @@ def respond(
|
|
34 |
temperature=temperature,
|
35 |
top_p=top_p,
|
36 |
):
|
|
|
37 |
if message.choices and message.choices[0].delta.content is not None:
|
38 |
token = message.choices[0].delta.content
|
39 |
response += token
|
40 |
yield response
|
41 |
|
42 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
"""
|
44 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
45 |
"""
|
46 |
demo = gr.ChatInterface(
|
47 |
respond,
|
48 |
additional_inputs=[
|
49 |
-
gr.Textbox(value=
|
50 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
51 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
52 |
gr.Slider(
|
|
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
+
# client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
8 |
+
# client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
9 |
+
# client = InferenceClient("Qwen/Qwen3-Coder-480B-A35B-Instruct")
|
10 |
+
client = InferenceClient("openai/gpt-oss-120b")
|
11 |
+
|
12 |
|
13 |
def respond(
|
14 |
message,
|
|
|
37 |
temperature=temperature,
|
38 |
top_p=top_p,
|
39 |
):
|
40 |
+
|
41 |
if message.choices and message.choices[0].delta.content is not None:
|
42 |
token = message.choices[0].delta.content
|
43 |
response += token
|
44 |
yield response
|
45 |
|
46 |
|
47 |
+
instruction = '''You are a song information assistant with a single, strict purpose. You only provide the artist, album, and year released when the user explicitly provides a specific song name.
|
48 |
+
For every user input, you will follow this precise decision process:
|
49 |
+
Analyze: Determine if the user's input is a specific, valid song name.
|
50 |
+
Act:
|
51 |
+
If Yes: Provide the artist, album, and year released for that song.
|
52 |
+
If No: Politely state that you can only provide the artist, album, and year released for a specific song name. Do not ask for clarification or engage in any other conversation.
|
53 |
+
Keep your response concise.
|
54 |
+
Example User Inputs and Your Expected Responses:
|
55 |
+
User: "Bohemian Rhapsody"
|
56 |
+
Your Response: "Artist: Queen, Album: A Night at the Opera, Year Released: 1975"
|
57 |
+
User: "What's the best album of 2023?"
|
58 |
+
Your Response: "I'm sorry, I can only provide the artist, album, and year released for a specific song name'''
|
59 |
+
|
60 |
"""
|
61 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
62 |
"""
|
63 |
demo = gr.ChatInterface(
|
64 |
respond,
|
65 |
additional_inputs=[
|
66 |
+
gr.Textbox(value=instruction, label="System message"),
|
67 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
68 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
69 |
gr.Slider(
|