Spaces:
Running
Running
fix caching and change model
Browse files- app.py +1 -1
- utils/haystack.py +2 -2
app.py
CHANGED
|
@@ -25,7 +25,7 @@ if st.session_state.get("model") == None:
|
|
| 25 |
mistral, openai = st.columns([2,1], gap="small")
|
| 26 |
|
| 27 |
with mistral:
|
| 28 |
-
st.button("
|
| 29 |
|
| 30 |
with openai:
|
| 31 |
st.button("GPT-4", on_click=openai_pressed, type="primary")
|
|
|
|
| 25 |
mistral, openai = st.columns([2,1], gap="small")
|
| 26 |
|
| 27 |
with mistral:
|
| 28 |
+
st.button("Mistral-7B-Instruct-v0.2", on_click=mistral_pressed, type="primary")
|
| 29 |
|
| 30 |
with openai:
|
| 31 |
st.button("GPT-4", on_click=openai_pressed, type="primary")
|
utils/haystack.py
CHANGED
|
@@ -4,6 +4,7 @@ from haystack.components.builders.prompt_builder import PromptBuilder
|
|
| 4 |
from haystack.components.generators import HuggingFaceTGIGenerator, OpenAIGenerator
|
| 5 |
from .hackernews_fetcher import HackernewsFetcher
|
| 6 |
|
|
|
|
| 7 |
def start_haystack(key, model):
|
| 8 |
prompt_template = """
|
| 9 |
You will be provided one or more top HakcerNews posts, followed by their URL.
|
|
@@ -19,7 +20,7 @@ Summaries:
|
|
| 19 |
|
| 20 |
prompt_builder = PromptBuilder(template=prompt_template)
|
| 21 |
if model == "Mistral":
|
| 22 |
-
llm = HuggingFaceTGIGenerator("mistralai/
|
| 23 |
elif model == "GPT-4":
|
| 24 |
llm = OpenAIGenerator(api_key=key, model="gpt-4")
|
| 25 |
fetcher = HackernewsFetcher()
|
|
@@ -34,7 +35,6 @@ Summaries:
|
|
| 34 |
return pipe
|
| 35 |
|
| 36 |
|
| 37 |
-
@st.cache_data(show_spinner=True)
|
| 38 |
def query(top_k, _pipeline):
|
| 39 |
try:
|
| 40 |
run_args = {"hackernews_fetcher": {"top_k": top_k}}
|
|
|
|
| 4 |
from haystack.components.generators import HuggingFaceTGIGenerator, OpenAIGenerator
|
| 5 |
from .hackernews_fetcher import HackernewsFetcher
|
| 6 |
|
| 7 |
+
st.cache_resource
|
| 8 |
def start_haystack(key, model):
|
| 9 |
prompt_template = """
|
| 10 |
You will be provided one or more top HakcerNews posts, followed by their URL.
|
|
|
|
| 20 |
|
| 21 |
prompt_builder = PromptBuilder(template=prompt_template)
|
| 22 |
if model == "Mistral":
|
| 23 |
+
llm = HuggingFaceTGIGenerator("mistralai/Mistral-7B-Instruct-v0.2", token=key)
|
| 24 |
elif model == "GPT-4":
|
| 25 |
llm = OpenAIGenerator(api_key=key, model="gpt-4")
|
| 26 |
fetcher = HackernewsFetcher()
|
|
|
|
| 35 |
return pipe
|
| 36 |
|
| 37 |
|
|
|
|
| 38 |
def query(top_k, _pipeline):
|
| 39 |
try:
|
| 40 |
run_args = {"hackernews_fetcher": {"top_k": top_k}}
|