johnbridges commited on
Commit
f0122f7
·
1 Parent(s): 4625807

fixed requirements

Browse files
Files changed (2) hide show
  1. oa_server.py +5 -5
  2. requirements.txt +8 -5
oa_server.py CHANGED
@@ -4,7 +4,6 @@ import json, time, uuid, logging
4
  from typing import Any, Dict, List, AsyncIterable, Optional
5
 
6
  from rabbit_repo import RabbitRepo
7
- from openai_backend import OpenAICompatChatBackend, OpenAIImagesBackend
8
 
9
  logger = logging.getLogger(__name__)
10
 
@@ -32,12 +31,13 @@ class OpenAIServers:
32
  - 'oaImagesGenerate' -> handle_images_generate
33
  Uses RabbitRepo.publish(...) to emit CloudEvent-wrapped OpenAI JSON.
34
  """
 
35
  def __init__(self, publisher: RabbitRepo,
36
- *, chat_backend: Optional[ChatBackend] = None,
37
- images_backend: Optional[ImagesBackend] = None):
38
  self._pub = publisher
39
- self._chat = chat_backend or OpenAICompatChatBackend()
40
- self._img = images_backend or OpenAIImagesBackend()
 
41
 
42
  # -------- Chat Completions --------
43
  async def handle_chat_create(self, data: Dict[str, Any]) -> None:
 
4
  from typing import Any, Dict, List, AsyncIterable, Optional
5
 
6
  from rabbit_repo import RabbitRepo
 
7
 
8
  logger = logging.getLogger(__name__)
9
 
 
31
  - 'oaImagesGenerate' -> handle_images_generate
32
  Uses RabbitRepo.publish(...) to emit CloudEvent-wrapped OpenAI JSON.
33
  """
34
+
35
  def __init__(self, publisher: RabbitRepo,
36
+ *, chat_backend=None, images_backend=None):
 
37
  self._pub = publisher
38
+ self._chat = chat_backend
39
+ self._img = images_backend
40
+
41
 
42
  # -------- Chat Completions --------
43
  async def handle_chat_create(self, data: Dict[str, Any]) -> None:
requirements.txt CHANGED
@@ -1,7 +1,10 @@
1
  gradio==5.42.0
2
- fastapi==0.116.1
3
- uvicorn==0.35.0
4
  aio-pika==9.5.7
5
- pydantic==2.11.1
6
- pydantic-settings==2.10.1
7
- spaces
 
 
 
 
1
  gradio==5.42.0
2
+ fastapi==0.115.5 # latest stable FastAPI
3
+ uvicorn==0.30.6 # modern uvicorn, compatible
4
  aio-pika==9.5.7
5
+ pydantic==2.10.5
6
+ pydantic-settings==2.3.4
7
+ spaces # HF Spaces decorator
8
+ vllm>=0.6.3 # local LLM runner
9
+ torch>=2.5.0 # GPU-enabled if space has CUDA
10
+ transformers>=4.44.0 # to support HF safetensors + trust_remote_code