Spaces:
Running
on
Zero
Running
on
Zero
File size: 5,506 Bytes
19b19f0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 |
#!/usr/bin/env python3
"""
Deployment script for Petite Elle L'Aime 3 Gradio Application
"""
import os
import sys
import subprocess
import argparse
import yaml
from pathlib import Path
def load_config():
"""Load configuration from config.yaml"""
config_path = Path("config.yaml")
if config_path.exists():
with open(config_path, 'r') as f:
return yaml.safe_load(f)
return {}
def check_dependencies():
"""Check if all dependencies are installed"""
print("π Checking dependencies...")
required_packages = [
'gradio',
'torch',
'transformers',
'accelerate'
]
missing_packages = []
for package in required_packages:
try:
__import__(package)
print(f"β
{package}")
except ImportError:
missing_packages.append(package)
print(f"β {package}")
if missing_packages:
print(f"\nβ οΈ Missing packages: {', '.join(missing_packages)}")
print("Run: pip install -r requirements.txt")
return False
return True
def check_hardware():
"""Check hardware requirements"""
print("\nπ Checking hardware...")
import psutil
# Check RAM
ram_gb = psutil.virtual_memory().total / (1024**3)
print(f"RAM: {ram_gb:.1f} GB")
if ram_gb < 8:
print("β οΈ Warning: Less than 8GB RAM detected")
print(" The application may run slowly or fail to load the model")
else:
print("β
RAM requirements met")
# Check GPU
try:
import torch
if torch.cuda.is_available():
gpu_name = torch.cuda.get_device_name(0)
gpu_memory = torch.cuda.get_device_properties(0).total_memory / (1024**3)
print(f"GPU: {gpu_name} ({gpu_memory:.1f} GB)")
else:
print("GPU: Not available (will use CPU)")
except:
print("GPU: Unable to detect")
return True
def install_dependencies():
"""Install dependencies from requirements.txt"""
print("\nπ¦ Installing dependencies...")
if not os.path.exists("requirements.txt"):
print("β requirements.txt not found")
return False
try:
subprocess.run([sys.executable, "-m", "pip", "install", "-r", "requirements.txt"],
check=True, capture_output=True, text=True)
print("β
Dependencies installed successfully")
return True
except subprocess.CalledProcessError as e:
print(f"β Failed to install dependencies: {e}")
return False
def run_tests():
"""Run the test suite"""
print("\nπ§ͺ Running tests...")
if not os.path.exists("test_app.py"):
print("β test_app.py not found")
return False
try:
result = subprocess.run([sys.executable, "test_app.py"],
capture_output=True, text=True)
print(result.stdout)
if result.stderr:
print(result.stderr)
return result.returncode == 0
except Exception as e:
print(f"β Failed to run tests: {e}")
return False
def start_application(port=None, host=None):
"""Start the Gradio application"""
print("\nπ Starting application...")
config = load_config()
ui_config = config.get('ui', {})
# Use provided arguments or config defaults
port = port or ui_config.get('server_port', 7860)
host = host or ui_config.get('server_name', '0.0.0.0')
print(f"π Application will be available at: http://{host}:{port}")
print("π Press Ctrl+C to stop the application")
try:
subprocess.run([sys.executable, "app.py"], check=True)
except KeyboardInterrupt:
print("\nπ Application stopped by user")
except subprocess.CalledProcessError as e:
print(f"β Failed to start application: {e}")
return False
return True
def main():
"""Main deployment function"""
parser = argparse.ArgumentParser(description="Deploy Petite Elle L'Aime 3 Gradio Application")
parser.add_argument("--install", action="store_true", help="Install dependencies")
parser.add_argument("--test", action="store_true", help="Run tests")
parser.add_argument("--check", action="store_true", help="Check system requirements")
parser.add_argument("--port", type=int, help="Port to run the application on")
parser.add_argument("--host", type=str, help="Host to bind the application to")
parser.add_argument("--start", action="store_true", help="Start the application")
args = parser.parse_args()
print("π€ Petite Elle L'Aime 3 - Deployment Script\n")
# If no arguments provided, run full deployment
if not any([args.install, args.test, args.check, args.start]):
args.install = True
args.test = True
args.check = True
args.start = True
success = True
if args.install:
success &= install_dependencies()
if args.check:
success &= check_dependencies()
success &= check_hardware()
if args.test:
success &= run_tests()
if args.start and success:
start_application(args.port, args.host)
if not success:
print("\nβ Deployment failed. Please fix the issues above.")
sys.exit(1)
else:
print("\nβ
Deployment completed successfully!")
if __name__ == "__main__":
main() |