Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,11 +1,11 @@
|
|
| 1 |
from huggingface_hub import InferenceClient
|
| 2 |
import gradio as gr
|
|
|
|
| 3 |
import prompts
|
| 4 |
client = InferenceClient(
|
| 5 |
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
| 6 |
)
|
| 7 |
|
| 8 |
-
|
| 9 |
def format_prompt(message, history):
|
| 10 |
prompt = "<s>"
|
| 11 |
for user_prompt, bot_response in history:
|
|
@@ -21,6 +21,8 @@ agents =[
|
|
| 21 |
def generate(
|
| 22 |
prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
|
| 23 |
):
|
|
|
|
|
|
|
| 24 |
agent=prompts.WEB_DEV
|
| 25 |
if agent_name == "WEB_DEV":
|
| 26 |
agent = prompts.WEB_DEV
|
|
@@ -40,7 +42,7 @@ def generate(
|
|
| 40 |
top_p=top_p,
|
| 41 |
repetition_penalty=repetition_penalty,
|
| 42 |
do_sample=True,
|
| 43 |
-
seed=
|
| 44 |
)
|
| 45 |
|
| 46 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
|
|
|
| 1 |
from huggingface_hub import InferenceClient
|
| 2 |
import gradio as gr
|
| 3 |
+
import random.randint as rand
|
| 4 |
import prompts
|
| 5 |
client = InferenceClient(
|
| 6 |
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
| 7 |
)
|
| 8 |
|
|
|
|
| 9 |
def format_prompt(message, history):
|
| 10 |
prompt = "<s>"
|
| 11 |
for user_prompt, bot_response in history:
|
|
|
|
| 21 |
def generate(
|
| 22 |
prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
|
| 23 |
):
|
| 24 |
+
seed = rand(1,1111111111111111)
|
| 25 |
+
|
| 26 |
agent=prompts.WEB_DEV
|
| 27 |
if agent_name == "WEB_DEV":
|
| 28 |
agent = prompts.WEB_DEV
|
|
|
|
| 42 |
top_p=top_p,
|
| 43 |
repetition_penalty=repetition_penalty,
|
| 44 |
do_sample=True,
|
| 45 |
+
seed=seed,
|
| 46 |
)
|
| 47 |
|
| 48 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|