Mark-Lasfar
commited on
Commit
·
8ad4420
1
Parent(s):
c050ce0
Update Model
Browse files- api/auth.py +5 -4
- api/endpoints.py +2 -1
- main.py +2 -1
- requirements.txt +5 -6
- utils/generation.py +4 -3
api/auth.py
CHANGED
|
@@ -14,12 +14,13 @@ import os
|
|
| 14 |
import logging
|
| 15 |
from api.models import UserRead, UserCreate, UserUpdate
|
| 16 |
|
| 17 |
-
|
| 18 |
# Setup logging
|
| 19 |
logger = logging.getLogger(__name__)
|
| 20 |
|
|
|
|
| 21 |
cookie_transport = CookieTransport(cookie_max_age=3600)
|
| 22 |
|
|
|
|
| 23 |
SECRET = os.getenv("JWT_SECRET")
|
| 24 |
if not SECRET or len(SECRET) < 32:
|
| 25 |
logger.error("JWT_SECRET is not set or too short.")
|
|
@@ -110,7 +111,7 @@ google_oauth_router = get_oauth_router(
|
|
| 110 |
google_oauth_client,
|
| 111 |
auth_backend,
|
| 112 |
get_user_manager,
|
| 113 |
-
state_secret=SECRET,
|
| 114 |
associate_by_email=True,
|
| 115 |
redirect_url="https://mgzon-mgzon-app.hf.space/auth/google/callback",
|
| 116 |
)
|
|
@@ -119,13 +120,13 @@ github_oauth_router = get_oauth_router(
|
|
| 119 |
github_oauth_client,
|
| 120 |
auth_backend,
|
| 121 |
get_user_manager,
|
| 122 |
-
state_secret=SECRET,
|
| 123 |
associate_by_email=True,
|
| 124 |
redirect_url="https://mgzon-mgzon-app.hf.space/auth/github/callback",
|
| 125 |
)
|
| 126 |
|
| 127 |
fastapi_users = FastAPIUsers[User, int](
|
| 128 |
-
get_user_db
|
| 129 |
[auth_backend],
|
| 130 |
)
|
| 131 |
|
|
|
|
| 14 |
import logging
|
| 15 |
from api.models import UserRead, UserCreate, UserUpdate
|
| 16 |
|
|
|
|
| 17 |
# Setup logging
|
| 18 |
logger = logging.getLogger(__name__)
|
| 19 |
|
| 20 |
+
# Cookie transport for JWT
|
| 21 |
cookie_transport = CookieTransport(cookie_max_age=3600)
|
| 22 |
|
| 23 |
+
# JWT Secret
|
| 24 |
SECRET = os.getenv("JWT_SECRET")
|
| 25 |
if not SECRET or len(SECRET) < 32:
|
| 26 |
logger.error("JWT_SECRET is not set or too short.")
|
|
|
|
| 111 |
google_oauth_client,
|
| 112 |
auth_backend,
|
| 113 |
get_user_manager,
|
| 114 |
+
state_secret=SECRET,
|
| 115 |
associate_by_email=True,
|
| 116 |
redirect_url="https://mgzon-mgzon-app.hf.space/auth/google/callback",
|
| 117 |
)
|
|
|
|
| 120 |
github_oauth_client,
|
| 121 |
auth_backend,
|
| 122 |
get_user_manager,
|
| 123 |
+
state_secret=SECRET,
|
| 124 |
associate_by_email=True,
|
| 125 |
redirect_url="https://mgzon-mgzon-app.hf.space/auth/github/callback",
|
| 126 |
)
|
| 127 |
|
| 128 |
fastapi_users = FastAPIUsers[User, int](
|
| 129 |
+
get_user_manager, # تم تصحيح الـ dependency من get_user_db إلى get_user_manager
|
| 130 |
[auth_backend],
|
| 131 |
)
|
| 132 |
|
api/endpoints.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
|
|
| 1 |
import os
|
| 2 |
import uuid
|
| 3 |
from fastapi import APIRouter, Depends, HTTPException, Request, status, UploadFile, File
|
|
@@ -31,7 +32,7 @@ if not BACKUP_HF_TOKEN:
|
|
| 31 |
ROUTER_API_URL = os.getenv("ROUTER_API_URL", "https://router.huggingface.co")
|
| 32 |
API_ENDPOINT = os.getenv("API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 33 |
FALLBACK_API_ENDPOINT = os.getenv("FALLBACK_API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 34 |
-
MODEL_NAME = os.getenv("MODEL_NAME", "openai/gpt-oss-120b") #
|
| 35 |
SECONDARY_MODEL_NAME = os.getenv("SECONDARY_MODEL_NAME", "mistralai/Mixtral-8x7B-Instruct-v0.1")
|
| 36 |
TERTIARY_MODEL_NAME = os.getenv("TERTIARY_MODEL_NAME", "gpt2")
|
| 37 |
CLIP_BASE_MODEL = os.getenv("CLIP_BASE_MODEL", "Salesforce/blip-image-captioning-large")
|
|
|
|
| 1 |
+
# api/endpoints.py
|
| 2 |
import os
|
| 3 |
import uuid
|
| 4 |
from fastapi import APIRouter, Depends, HTTPException, Request, status, UploadFile, File
|
|
|
|
| 32 |
ROUTER_API_URL = os.getenv("ROUTER_API_URL", "https://router.huggingface.co")
|
| 33 |
API_ENDPOINT = os.getenv("API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 34 |
FALLBACK_API_ENDPOINT = os.getenv("FALLBACK_API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 35 |
+
MODEL_NAME = os.getenv("MODEL_NAME", "openai/gpt-oss-120b") # بدون :cerebras
|
| 36 |
SECONDARY_MODEL_NAME = os.getenv("SECONDARY_MODEL_NAME", "mistralai/Mixtral-8x7B-Instruct-v0.1")
|
| 37 |
TERTIARY_MODEL_NAME = os.getenv("TERTIARY_MODEL_NAME", "gpt2")
|
| 38 |
CLIP_BASE_MODEL = os.getenv("CLIP_BASE_MODEL", "Salesforce/blip-image-captioning-large")
|
main.py
CHANGED
|
@@ -15,6 +15,7 @@ from fastapi.middleware.cors import CORSMiddleware
|
|
| 15 |
from api.endpoints import router as api_router
|
| 16 |
from api.auth import fastapi_users, auth_backend, current_active_user, get_auth_router
|
| 17 |
from api.database import get_db, engine, Base
|
|
|
|
| 18 |
from motor.motor_asyncio import AsyncIOMotorClient
|
| 19 |
from pydantic import BaseModel
|
| 20 |
from typing import List
|
|
@@ -26,7 +27,7 @@ from pathlib import Path
|
|
| 26 |
from hashlib import md5
|
| 27 |
from datetime import datetime
|
| 28 |
from httpx_oauth.exceptions import GetIdEmailError
|
| 29 |
-
|
| 30 |
|
| 31 |
# Setup logging
|
| 32 |
logging.basicConfig(level=logging.INFO)
|
|
|
|
| 15 |
from api.endpoints import router as api_router
|
| 16 |
from api.auth import fastapi_users, auth_backend, current_active_user, get_auth_router
|
| 17 |
from api.database import get_db, engine, Base
|
| 18 |
+
from api.models import User, UserRead, UserCreate, Conversation, UserUpdate # استيراد User
|
| 19 |
from motor.motor_asyncio import AsyncIOMotorClient
|
| 20 |
from pydantic import BaseModel
|
| 21 |
from typing import List
|
|
|
|
| 27 |
from hashlib import md5
|
| 28 |
from datetime import datetime
|
| 29 |
from httpx_oauth.exceptions import GetIdEmailError
|
| 30 |
+
import re
|
| 31 |
|
| 32 |
# Setup logging
|
| 33 |
logging.basicConfig(level=logging.INFO)
|
requirements.txt
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
fastapi==0.115.2
|
| 2 |
packaging>=23.0
|
| 3 |
-
uvicorn==0.30.6
|
| 4 |
gradio>=4.44.1
|
| 5 |
openai==1.42.0
|
| 6 |
httpx==0.27.0
|
|
@@ -25,15 +25,14 @@ Pillow==10.4.0
|
|
| 25 |
urllib3==2.0.7
|
| 26 |
itsdangerous
|
| 27 |
protobuf==3.19.6
|
| 28 |
-
fastapi-users[sqlalchemy,
|
| 29 |
-
|
| 30 |
-
sqlalchemy>=2.0.0
|
| 31 |
python-jose[cryptography]>=3.3.0
|
| 32 |
passlib[bcrypt]>=1.7.4
|
| 33 |
httpx-oauth
|
| 34 |
python-multipart
|
| 35 |
aiofiles
|
| 36 |
-
motor
|
| 37 |
redis
|
| 38 |
markdown2
|
| 39 |
-
|
|
|
|
| 1 |
fastapi==0.115.2
|
| 2 |
packaging>=23.0
|
| 3 |
+
uvicorn==0.32.0 # حدثت من 0.30.6 إلى 0.32.0 للتوافق
|
| 4 |
gradio>=4.44.1
|
| 5 |
openai==1.42.0
|
| 6 |
httpx==0.27.0
|
|
|
|
| 25 |
urllib3==2.0.7
|
| 26 |
itsdangerous
|
| 27 |
protobuf==3.19.6
|
| 28 |
+
fastapi-users[sqlalchemy,oauth2]>=14.0.0 # حدثت من 13.0.0 إلى 14.0.0 للتوافق مع api/auth.py
|
| 29 |
+
flash-attn>=2.0.0
|
| 30 |
+
sqlalchemy>=2.0.35 # حدثت من 2.0.0 إلى 2.0.35 للتوافق مع fastapi-users
|
| 31 |
python-jose[cryptography]>=3.3.0
|
| 32 |
passlib[bcrypt]>=1.7.4
|
| 33 |
httpx-oauth
|
| 34 |
python-multipart
|
| 35 |
aiofiles
|
| 36 |
+
motor>=3.6.0
|
| 37 |
redis
|
| 38 |
markdown2
|
|
|
utils/generation.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
|
|
| 1 |
import os
|
| 2 |
import re
|
| 3 |
import json
|
|
@@ -36,7 +37,7 @@ BACKUP_HF_TOKEN = os.getenv("BACKUP_HF_TOKEN")
|
|
| 36 |
ROUTER_API_URL = os.getenv("ROUTER_API_URL", "https://router.huggingface.co")
|
| 37 |
API_ENDPOINT = os.getenv("API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 38 |
FALLBACK_API_ENDPOINT = os.getenv("FALLBACK_API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 39 |
-
MODEL_NAME = os.getenv("MODEL_NAME", "openai/gpt-oss-120b") #
|
| 40 |
SECONDARY_MODEL_NAME = os.getenv("SECONDARY_MODEL_NAME", "mistralai/Mixtral-8x7B-Instruct-v0.1")
|
| 41 |
TERTIARY_MODEL_NAME = os.getenv("TERTIARY_MODEL_NAME", "gpt2")
|
| 42 |
CLIP_BASE_MODEL = os.getenv("CLIP_BASE_MODEL", "Salesforce/blip-image-captioning-large")
|
|
@@ -77,7 +78,7 @@ def check_model_availability(model_name: str, api_key: str) -> tuple[bool, str,
|
|
| 77 |
if response.status_code == 200:
|
| 78 |
data = response.json().get("data", {})
|
| 79 |
providers = data.get("providers", [])
|
| 80 |
-
# Select the first available provider (e.g., '
|
| 81 |
for provider in providers:
|
| 82 |
if provider.get("status") == "live":
|
| 83 |
provider_name = provider.get("provider")
|
|
@@ -682,7 +683,7 @@ def generate(message, history, system_prompt, temperature, reasoning_effort, ena
|
|
| 682 |
|
| 683 |
def make_raw_preview() -> str:
|
| 684 |
return (
|
| 685 |
-
"```text
|
| 686 |
"Analysis (live):\n"
|
| 687 |
f"{raw_analysis}\n\n"
|
| 688 |
"Response (draft):\n"
|
|
|
|
| 1 |
+
# utils/generation.py
|
| 2 |
import os
|
| 3 |
import re
|
| 4 |
import json
|
|
|
|
| 37 |
ROUTER_API_URL = os.getenv("ROUTER_API_URL", "https://router.huggingface.co")
|
| 38 |
API_ENDPOINT = os.getenv("API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 39 |
FALLBACK_API_ENDPOINT = os.getenv("FALLBACK_API_ENDPOINT", "https://api-inference.huggingface.co")
|
| 40 |
+
MODEL_NAME = os.getenv("MODEL_NAME", "openai/gpt-oss-120b") # بدون :cerebras
|
| 41 |
SECONDARY_MODEL_NAME = os.getenv("SECONDARY_MODEL_NAME", "mistralai/Mixtral-8x7B-Instruct-v0.1")
|
| 42 |
TERTIARY_MODEL_NAME = os.getenv("TERTIARY_MODEL_NAME", "gpt2")
|
| 43 |
CLIP_BASE_MODEL = os.getenv("CLIP_BASE_MODEL", "Salesforce/blip-image-captioning-large")
|
|
|
|
| 78 |
if response.status_code == 200:
|
| 79 |
data = response.json().get("data", {})
|
| 80 |
providers = data.get("providers", [])
|
| 81 |
+
# Select the first available provider (e.g., 'cerebras')
|
| 82 |
for provider in providers:
|
| 83 |
if provider.get("status") == "live":
|
| 84 |
provider_name = provider.get("provider")
|
|
|
|
| 683 |
|
| 684 |
def make_raw_preview() -> str:
|
| 685 |
return (
|
| 686 |
+
"```text
|
| 687 |
"Analysis (live):\n"
|
| 688 |
f"{raw_analysis}\n\n"
|
| 689 |
"Response (draft):\n"
|