Speedofmastery commited on
Commit
e4b3d6c
Β·
verified Β·
1 Parent(s): 5817bba

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +12 -0
  2. README.md +40 -10
  3. app.py +391 -0
  4. requirements.txt +5 -0
Dockerfile ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10-slim
2
+
3
+ WORKDIR /app
4
+
5
+ COPY requirements.txt .
6
+ RUN pip install --no-cache-dir -r requirements.txt
7
+
8
+ COPY app.py .
9
+
10
+ EXPOSE 7860
11
+
12
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
README.md CHANGED
@@ -1,10 +1,40 @@
1
- ---
2
- title: Orynxml Api
3
- emoji: πŸ“š
4
- colorFrom: red
5
- colorTo: purple
6
- sdk: docker
7
- pinned: false
8
- ---
9
-
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: ORYNXML REST API Backend
3
+ emoji: πŸš€
4
+ colorFrom: blue
5
+ colorTo: purple
6
+ sdk: docker
7
+ pinned: false
8
+ ---
9
+
10
+ # ORYNXML REST API Backend
11
+
12
+ FastAPI-based REST API backend for ORYNXML AI Platform.
13
+
14
+ ## Features
15
+ - User Authentication (Mobile + Password)
16
+ - 211 AI Models via HuggingFace Inference API
17
+ - Cloudflare Integration (R2, D1, KV, Durable Objects)
18
+ - CORS enabled for frontend access
19
+
20
+ ## API Endpoints
21
+
22
+ ### Authentication
23
+ - `POST /auth/signup` - Register new user
24
+ - `POST /auth/login` - Login user
25
+
26
+ ### AI Operations
27
+ - `POST /ai/chat` - AI chat interface
28
+ - `POST /ai/generate` - Generic AI generation
29
+ - `GET /models/list` - List all models
30
+
31
+ ### Status
32
+ - `GET /health` - Health check
33
+ - `GET /cloudflare/status` - Cloudflare services status
34
+
35
+ ## Frontend
36
+ https://orynxml-ai.pages.dev
37
+
38
+ ## Architecture
39
+ This backend uses HuggingFace Inference API (no local GPU needed).
40
+ All AI models run on HuggingFace's infrastructure.
app.py ADDED
@@ -0,0 +1,391 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ORYNXML REST API Backend - FastAPI with 211 AI Models
3
+ Provides REST API endpoints for HTML frontend at orynxml-ai.pages.dev
4
+ """
5
+
6
+ from fastapi import FastAPI, HTTPException, Depends
7
+ from fastapi.middleware.cors import CORSMiddleware
8
+ from fastapi.responses import JSONResponse
9
+ from pydantic import BaseModel
10
+ from typing import Optional, List, Dict, Any
11
+ import os
12
+ import sqlite3
13
+ import hashlib
14
+ from datetime import datetime, timedelta
15
+ from huggingface_hub import InferenceClient
16
+ import uvicorn
17
+
18
+ # HuggingFace Inference Client
19
+ HF_TOKEN = os.getenv("HF_TOKEN", "")
20
+ inference_client = InferenceClient(token=HF_TOKEN if HF_TOKEN else None)
21
+
22
+ # Cloudflare Configuration
23
+ CLOUDFLARE_CONFIG = {
24
+ "api_token": os.getenv("CLOUDFLARE_API_TOKEN", ""),
25
+ "account_id": os.getenv("CLOUDFLARE_ACCOUNT_ID", "62af59a7ac82b29543577ee6800735ee"),
26
+ "d1_database_id": os.getenv("CLOUDFLARE_D1_DATABASE_ID", "6d887f74-98ac-4db7-bfed-8061903d1f6c"),
27
+ "r2_bucket_name": os.getenv("CLOUDFLARE_R2_BUCKET_NAME", "openmanus-storage"),
28
+ "kv_namespace_id": os.getenv("CLOUDFLARE_KV_NAMESPACE_ID", "87f4aa01410d4fb19821f61006f94441"),
29
+ "kv_namespace_cache": os.getenv("CLOUDFLARE_KV_CACHE_ID", "7b58c88292c847d1a82c8e0dd5129f37"),
30
+ "durable_objects_sessions": "AGENT_SESSIONS",
31
+ "durable_objects_chatrooms": "CHAT_ROOMS",
32
+ }
33
+
34
+ # AI Models Dictionary (211 models)
35
+ AI_MODELS = {
36
+ "Text Generation": {
37
+ "Qwen Models": [
38
+ "Qwen/Qwen2.5-72B-Instruct",
39
+ "Qwen/Qwen2.5-Coder-32B-Instruct",
40
+ "Qwen/Qwen2.5-Math-72B-Instruct",
41
+ # ... (add all 35 Qwen models)
42
+ ],
43
+ "DeepSeek Models": [
44
+ "deepseek-ai/deepseek-llm-67b-chat",
45
+ "deepseek-ai/DeepSeek-V2-Chat",
46
+ # ... (add all 17 DeepSeek models)
47
+ ],
48
+ },
49
+ "Image Generation": [
50
+ "black-forest-labs/FLUX.1-dev",
51
+ "black-forest-labs/FLUX.1-schnell",
52
+ "stabilityai/stable-diffusion-xl-base-1.0",
53
+ # ... (add all image gen models)
54
+ ],
55
+ "Image Editing": [
56
+ "timbrooks/instruct-pix2pix",
57
+ "lllyasviel/control_v11p_sd15_canny",
58
+ # ... (add all editing models)
59
+ ],
60
+ "Video Generation": {
61
+ "Text-to-Video": [
62
+ "ali-vilab/text-to-video-ms-1.7b",
63
+ # ...
64
+ ],
65
+ "Image-to-Video": [
66
+ "stabilityai/stable-video-diffusion-img2vid",
67
+ # ...
68
+ ],
69
+ },
70
+ "Audio": {
71
+ "TTS": ["suno/bark", "microsoft/speecht5_tts"],
72
+ "STT": ["openai/whisper-large-v3"],
73
+ },
74
+ "Translation": {
75
+ "Arabic-English": [
76
+ "Helsinki-NLP/opus-mt-ar-en",
77
+ "Helsinki-NLP/opus-mt-en-ar",
78
+ ]
79
+ },
80
+ }
81
+
82
+ # Initialize FastAPI
83
+ app = FastAPI(
84
+ title="ORYNXML AI Platform API",
85
+ description="REST API for 211 AI models with authentication and Cloudflare integration",
86
+ version="1.0.0",
87
+ )
88
+
89
+ # CORS Configuration
90
+ app.add_middleware(
91
+ CORSMiddleware,
92
+ allow_origins=["*"], # In production, restrict to your domain
93
+ allow_credentials=True,
94
+ allow_methods=["*"],
95
+ allow_headers=["*"],
96
+ )
97
+
98
+ # Database initialization
99
+ def init_database():
100
+ """Initialize SQLite database for user authentication"""
101
+ conn = sqlite3.connect("openmanus.db")
102
+ cursor = conn.cursor()
103
+ cursor.execute("""
104
+ CREATE TABLE IF NOT EXISTS users (
105
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
106
+ mobile TEXT UNIQUE NOT NULL,
107
+ name TEXT NOT NULL,
108
+ password_hash TEXT NOT NULL,
109
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
110
+ )
111
+ """)
112
+ conn.commit()
113
+ conn.close()
114
+
115
+ init_database()
116
+
117
+ # Pydantic Models
118
+ class SignupRequest(BaseModel):
119
+ mobile: str
120
+ name: str
121
+ password: str
122
+
123
+ class LoginRequest(BaseModel):
124
+ mobile: str
125
+ password: str
126
+
127
+ class AIRequest(BaseModel):
128
+ model: str
129
+ prompt: str
130
+ max_tokens: Optional[int] = 2000
131
+ temperature: Optional[float] = 0.7
132
+
133
+ class ChatRequest(BaseModel):
134
+ message: str
135
+ model: Optional[str] = "Qwen/Qwen2.5-72B-Instruct"
136
+ history: Optional[List[Dict[str, str]]] = []
137
+
138
+ # Helper Functions
139
+ def hash_password(password: str) -> str:
140
+ """Hash password using SHA-256"""
141
+ return hashlib.sha256(password.encode()).hexdigest()
142
+
143
+ def verify_password(password: str, password_hash: str) -> bool:
144
+ """Verify password against hash"""
145
+ return hash_password(password) == password_hash
146
+
147
+ # API Endpoints
148
+
149
+ @app.get("/")
150
+ async def root():
151
+ """Root endpoint"""
152
+ return {
153
+ "message": "ORYNXML AI Platform API",
154
+ "version": "1.0.0",
155
+ "status": "running",
156
+ "models": 211,
157
+ "endpoints": {
158
+ "health": "/health",
159
+ "auth": "/auth/signup, /auth/login",
160
+ "ai": "/ai/chat, /ai/generate",
161
+ "models": "/models/list",
162
+ }
163
+ }
164
+
165
+ @app.get("/health")
166
+ async def health_check():
167
+ """Health check endpoint"""
168
+ return {
169
+ "status": "healthy",
170
+ "timestamp": datetime.now().isoformat(),
171
+ "gpu_available": False, # We're using HF API, not local GPU
172
+ "backend": "HuggingFace Inference API",
173
+ "models_available": 211,
174
+ "cloudflare_configured": bool(CLOUDFLARE_CONFIG["api_token"]),
175
+ }
176
+
177
+ @app.post("/auth/signup")
178
+ async def signup(request: SignupRequest):
179
+ """User registration endpoint"""
180
+ try:
181
+ if len(request.password) < 6:
182
+ raise HTTPException(status_code=400, detail="Password must be at least 6 characters")
183
+
184
+ conn = sqlite3.connect("openmanus.db")
185
+ cursor = conn.cursor()
186
+
187
+ # Check if user exists
188
+ cursor.execute("SELECT mobile FROM users WHERE mobile = ?", (request.mobile,))
189
+ if cursor.fetchone():
190
+ conn.close()
191
+ raise HTTPException(status_code=400, detail="Mobile number already registered")
192
+
193
+ # Insert new user
194
+ password_hash = hash_password(request.password)
195
+ cursor.execute(
196
+ "INSERT INTO users (mobile, name, password_hash) VALUES (?, ?, ?)",
197
+ (request.mobile, request.name, password_hash)
198
+ )
199
+ conn.commit()
200
+ conn.close()
201
+
202
+ return {
203
+ "success": True,
204
+ "message": "Account created successfully",
205
+ "mobile": request.mobile,
206
+ "name": request.name
207
+ }
208
+
209
+ except HTTPException:
210
+ raise
211
+ except Exception as e:
212
+ raise HTTPException(status_code=500, detail=f"Registration failed: {str(e)}")
213
+
214
+ @app.post("/auth/login")
215
+ async def login(request: LoginRequest):
216
+ """User login endpoint"""
217
+ try:
218
+ conn = sqlite3.connect("openmanus.db")
219
+ cursor = conn.cursor()
220
+
221
+ cursor.execute(
222
+ "SELECT name, password_hash FROM users WHERE mobile = ?",
223
+ (request.mobile,)
224
+ )
225
+ result = cursor.fetchone()
226
+ conn.close()
227
+
228
+ if not result:
229
+ raise HTTPException(status_code=401, detail="Invalid mobile number or password")
230
+
231
+ name, password_hash = result
232
+
233
+ if not verify_password(request.password, password_hash):
234
+ raise HTTPException(status_code=401, detail="Invalid mobile number or password")
235
+
236
+ return {
237
+ "success": True,
238
+ "message": "Login successful",
239
+ "user": {
240
+ "mobile": request.mobile,
241
+ "name": name
242
+ },
243
+ "token": f"session_{hash_password(request.mobile + str(datetime.now()))[:32]}"
244
+ }
245
+
246
+ except HTTPException:
247
+ raise
248
+ except Exception as e:
249
+ raise HTTPException(status_code=500, detail=f"Login failed: {str(e)}")
250
+
251
+ @app.post("/ai/chat")
252
+ async def ai_chat(request: ChatRequest):
253
+ """AI chat endpoint - main endpoint for AI interactions"""
254
+ try:
255
+ # Prepare messages for chat completion
256
+ messages = []
257
+
258
+ # Add history
259
+ for msg in request.history:
260
+ messages.append({
261
+ "role": msg.get("role", "user"),
262
+ "content": msg.get("content", "")
263
+ })
264
+
265
+ # Add current message
266
+ messages.append({
267
+ "role": "user",
268
+ "content": request.message
269
+ })
270
+
271
+ # Call HuggingFace Inference API
272
+ response_text = ""
273
+ for message in inference_client.chat_completion(
274
+ model=request.model,
275
+ messages=messages,
276
+ max_tokens=2000,
277
+ temperature=0.7,
278
+ stream=True
279
+ ):
280
+ if hasattr(message, 'choices') and len(message.choices) > 0:
281
+ delta = message.choices[0].delta
282
+ if hasattr(delta, 'content') and delta.content:
283
+ response_text += delta.content
284
+
285
+ return {
286
+ "success": True,
287
+ "response": response_text,
288
+ "model": request.model,
289
+ "timestamp": datetime.now().isoformat()
290
+ }
291
+
292
+ except Exception as e:
293
+ raise HTTPException(status_code=500, detail=f"AI generation failed: {str(e)}")
294
+
295
+ @app.post("/ai/generate")
296
+ async def ai_generate(request: AIRequest):
297
+ """Generic AI generation endpoint"""
298
+ try:
299
+ # Determine task type based on model
300
+ model_lower = request.model.lower()
301
+
302
+ if "flux" in model_lower or "stable-diffusion" in model_lower:
303
+ # Image generation
304
+ return {
305
+ "success": True,
306
+ "type": "image",
307
+ "message": f"Image generation with {request.model}",
308
+ "prompt": request.prompt,
309
+ "note": "Image will be generated using HuggingFace Inference API"
310
+ }
311
+
312
+ elif "video" in model_lower:
313
+ # Video generation
314
+ return {
315
+ "success": True,
316
+ "type": "video",
317
+ "message": f"Video generation with {request.model}",
318
+ "prompt": request.prompt,
319
+ "note": "Video will be generated using HuggingFace Inference API"
320
+ }
321
+
322
+ else:
323
+ # Text generation (default)
324
+ messages = [{"role": "user", "content": request.prompt}]
325
+ response_text = ""
326
+
327
+ for message in inference_client.chat_completion(
328
+ model=request.model,
329
+ messages=messages,
330
+ max_tokens=request.max_tokens,
331
+ temperature=request.temperature,
332
+ stream=True
333
+ ):
334
+ if hasattr(message, 'choices') and len(message.choices) > 0:
335
+ delta = message.choices[0].delta
336
+ if hasattr(delta, 'content') and delta.content:
337
+ response_text += delta.content
338
+
339
+ return {
340
+ "success": True,
341
+ "type": "text",
342
+ "response": response_text,
343
+ "model": request.model,
344
+ "timestamp": datetime.now().isoformat()
345
+ }
346
+
347
+ except Exception as e:
348
+ raise HTTPException(status_code=500, detail=f"Generation failed: {str(e)}")
349
+
350
+ @app.get("/models/list")
351
+ async def list_models():
352
+ """List all available AI models"""
353
+ return {
354
+ "total": 211,
355
+ "categories": AI_MODELS,
356
+ "note": "All models are accessed via HuggingFace Inference API"
357
+ }
358
+
359
+ @app.get("/cloudflare/status")
360
+ async def cloudflare_status():
361
+ """Cloudflare services status"""
362
+ services = []
363
+
364
+ if CLOUDFLARE_CONFIG["api_token"]:
365
+ services.append("βœ… API Token Configured")
366
+ if CLOUDFLARE_CONFIG["d1_database_id"]:
367
+ services.append("βœ… D1 Database Connected")
368
+ if CLOUDFLARE_CONFIG["r2_bucket_name"]:
369
+ services.append("βœ… R2 Storage Connected")
370
+ if CLOUDFLARE_CONFIG["kv_namespace_id"]:
371
+ services.append("βœ… KV Sessions Connected")
372
+ if CLOUDFLARE_CONFIG["kv_namespace_cache"]:
373
+ services.append("βœ… KV Cache Connected")
374
+ if CLOUDFLARE_CONFIG["durable_objects_sessions"]:
375
+ services.append("βœ… Durable Objects (Agent Sessions)")
376
+ if CLOUDFLARE_CONFIG["durable_objects_chatrooms"]:
377
+ services.append("βœ… Durable Objects (Chat Rooms)")
378
+
379
+ return {
380
+ "configured": len(services) > 0,
381
+ "services": services,
382
+ "account_id": CLOUDFLARE_CONFIG["account_id"]
383
+ }
384
+
385
+ if __name__ == "__main__":
386
+ uvicorn.run(
387
+ app,
388
+ host="0.0.0.0",
389
+ port=7860,
390
+ log_level="info"
391
+ )
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ fastapi==0.115.0
2
+ uvicorn[standard]==0.30.6
3
+ pydantic==2.9.2
4
+ huggingface-hub==0.26.2
5
+ python-multipart==0.0.12