Spaces:
Runtime error
Runtime error
Commit
·
8af24d4
1
Parent(s):
f309e2f
Added top_p
Browse files
app.py
CHANGED
@@ -7,11 +7,12 @@ import requests
|
|
7 |
title = "A conversation with Gandalf (GPTJ-6B) 🧙"
|
8 |
description = ""
|
9 |
article = """
|
10 |
-
<p> To reset you need to reload the page </p>
|
11 |
<h2> Parameters: </h2>
|
12 |
<ul>
|
13 |
-
<li>
|
14 |
-
<li>
|
|
|
15 |
</ul>
|
16 |
<img src='http://www.simoninithomas.com/test/gandalf.jpg', alt="Gandalf"/>"""
|
17 |
theme="huggingface"
|
@@ -45,7 +46,7 @@ def clean_chat_output(txt, prompt):
|
|
45 |
return output
|
46 |
|
47 |
|
48 |
-
def chat(top_p, max_new_tokens, message):
|
49 |
history = gr.get_state() or []
|
50 |
history.append((message, ""))
|
51 |
gr.set_state(history)
|
@@ -56,6 +57,7 @@ def chat(top_p, max_new_tokens, message):
|
|
56 |
json_ = {"inputs": prompt,
|
57 |
"parameters":
|
58 |
{
|
|
|
59 |
"temperature": temperature,
|
60 |
"max_new_tokens": max_new_tokens,
|
61 |
"return_full_text": False
|
@@ -77,6 +79,7 @@ def chat(top_p, max_new_tokens, message):
|
|
77 |
iface = gr.Interface(
|
78 |
chat,
|
79 |
[
|
|
|
80 |
gr.inputs.Slider(minimum=0.5, maximum=1.5, step=0.1, default=1.1, label="temperature"),
|
81 |
gr.inputs.Slider(minimum=20, maximum=250, step=10, default=50, label="max_new_tokens"),
|
82 |
"text",
|
|
|
7 |
title = "A conversation with Gandalf (GPTJ-6B) 🧙"
|
8 |
description = ""
|
9 |
article = """
|
10 |
+
<p> To reset you <b>need to reload the page.</b> </p>
|
11 |
<h2> Parameters: </h2>
|
12 |
<ul>
|
13 |
+
<li><i>top_p</i>: control how deterministic the model is in generating a response.</li>
|
14 |
+
<li><i>temperature</i>: (sampling temperature) higher values means the model will take more risks.</li>
|
15 |
+
<li><i>max_new_tokens</i>: Max number of tokens in generation.</li>
|
16 |
</ul>
|
17 |
<img src='http://www.simoninithomas.com/test/gandalf.jpg', alt="Gandalf"/>"""
|
18 |
theme="huggingface"
|
|
|
46 |
return output
|
47 |
|
48 |
|
49 |
+
def chat(top_p, temperature, max_new_tokens, message):
|
50 |
history = gr.get_state() or []
|
51 |
history.append((message, ""))
|
52 |
gr.set_state(history)
|
|
|
57 |
json_ = {"inputs": prompt,
|
58 |
"parameters":
|
59 |
{
|
60 |
+
"top_p": top_p,
|
61 |
"temperature": temperature,
|
62 |
"max_new_tokens": max_new_tokens,
|
63 |
"return_full_text": False
|
|
|
79 |
iface = gr.Interface(
|
80 |
chat,
|
81 |
[
|
82 |
+
gr.inputs.Slider(minimum=0.5, maximum=1, step=0.05, default=0.9, label="top_p"),
|
83 |
gr.inputs.Slider(minimum=0.5, maximum=1.5, step=0.1, default=1.1, label="temperature"),
|
84 |
gr.inputs.Slider(minimum=20, maximum=250, step=10, default=50, label="max_new_tokens"),
|
85 |
"text",
|