Spaces:
Sleeping
Sleeping
| import os | |
| from groq import Groq | |
| import gradio as gr | |
| # Obtém a chave API a partir das variáveis de ambiente | |
| api_key = os.getenv("GROQ_API_KEY2") | |
| # Inicializa o cliente Groq com a chave API | |
| client = Groq(api_key=api_key) | |
| # Definição da mensagem do sistema | |
| system_prompt = { | |
| "role": "system", | |
| "content": "You are a useful and human assistant. You reply with efficient answers." | |
| } | |
| # Função para interagir com o modelo | |
| async def chat_groq(message, history): | |
| messages = [system_prompt] | |
| for msg in history: | |
| messages.append({"role": "user", "content": str(msg[0])}) | |
| messages.append({"role": "assistant", "content": str(msg[1])}) | |
| messages.append({"role": "user", "content": str(message)}) | |
| response_content = '' | |
| # Chamada ao modelo `llama-3.2-90b-text-preview` | |
| stream = client.chat.completions.create( | |
| model="llama-3.2-90b-text-preview", | |
| messages=messages, | |
| max_tokens=1024, | |
| temperature=1.3, | |
| stream=True | |
| ) | |
| for chunk in stream: | |
| content = chunk.choices[0].delta.content | |
| if content: | |
| response_content += chunk.choices[0].delta.content | |
| yield response_content | |
| # Interface Gradio | |
| with gr.Blocks(theme=gr.themes.Monochrome()) as demo: | |
| gr.ChatInterface(chat_groq, | |
| clear_btn=None, | |
| undo_btn=None, | |
| retry_btn=None) | |
| demo.queue() | |
| demo.launch() | |