Spaces:
Building
Building
| #!/usr/bin/env python3 | |
| """ | |
| Verify GPU setup for Hugging Face Spaces deployment. | |
| This script follows the official HuggingFace documentation for GPU verification. | |
| """ | |
| import sys | |
| import os | |
| print("=== GPU Setup Verification for HuggingFace Spaces ===") | |
| print() | |
| # Test 1: Check PyTorch installation and CUDA availability | |
| print("1. PyTorch/CUDA Check:") | |
| try: | |
| import torch | |
| print(f" β PyTorch installed: version {torch.__version__}") | |
| print(f" Is CUDA available: {torch.cuda.is_available()}") | |
| if torch.cuda.is_available(): | |
| print(f" CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}") | |
| print(f" CUDA version: {torch.version.cuda}") | |
| else: | |
| print(" Running on CPU") | |
| except ImportError as e: | |
| print(f" β PyTorch not installed or import error: {e}") | |
| except Exception as e: | |
| print(f" β Error checking PyTorch: {e}") | |
| print() | |
| # Test 2: Check spaCy GPU configuration | |
| print("2. spaCy GPU Check:") | |
| try: | |
| import spacy | |
| # Try to enable GPU | |
| gpu_id = spacy.prefer_gpu() | |
| if gpu_id is not False: | |
| print(f" β spaCy GPU enabled on device {gpu_id}") | |
| else: | |
| print(" β spaCy could not enable GPU (will use CPU)") | |
| print(f" spaCy version: {spacy.__version__}") | |
| except ImportError: | |
| print(" β spaCy not installed") | |
| except Exception as e: | |
| print(f" β Error checking spaCy: {e}") | |
| print() | |
| # Test 3: Check transformer packages | |
| print("3. Transformer Packages Check:") | |
| packages_found = [] | |
| try: | |
| import spacy_transformers | |
| packages_found.append("spacy-transformers") | |
| print(" β spacy-transformers installed") | |
| except ImportError: | |
| print(" β spacy-transformers not installed") | |
| try: | |
| import spacy_curated_transformers | |
| packages_found.append("spacy-curated-transformers") | |
| print(" β spacy-curated-transformers installed") | |
| except ImportError: | |
| print(" β spacy-curated-transformers not installed") | |
| if not packages_found: | |
| print(" β οΈ No transformer packages found - transformer models won't work!") | |
| else: | |
| print(f" Found packages: {', '.join(packages_found)}") | |
| print() | |
| # Test 4: Test loading a transformer model | |
| print("4. Transformer Model Loading Test:") | |
| try: | |
| import spacy | |
| # Try to load English transformer model | |
| print(" Testing en_core_web_trf...") | |
| nlp = spacy.load("en_core_web_trf") | |
| # Process a test sentence | |
| doc = nlp("This is a test sentence.") | |
| print(f" β Successfully loaded and processed text with {len(doc)} tokens") | |
| # Check if model is on GPU | |
| if hasattr(nlp, 'pipe'): | |
| for name, component in nlp.pipeline: | |
| if hasattr(component, 'model') and hasattr(component.model, 'device'): | |
| print(f" Component '{name}' device: {component.model.device}") | |
| except Exception as e: | |
| print(f" β Error loading transformer model: {e}") | |
| print() | |
| # Test 5: Environment information | |
| print("5. Environment Information:") | |
| print(f" Platform: {sys.platform}") | |
| print(f" Python: {sys.version}") | |
| print(f" Working directory: {os.getcwd()}") | |
| # Check for HuggingFace Spaces environment | |
| if os.environ.get('SPACES'): | |
| print(" β Running in HuggingFace Spaces") | |
| print(f" Space ID: {os.environ.get('SPACE_ID', 'N/A')}") | |
| print(f" Space Host: {os.environ.get('SPACE_HOST', 'N/A')}") | |
| else: | |
| print(" β Not running in HuggingFace Spaces (local environment)") | |
| print() | |
| print("=== Verification Complete ===") | |
| # Summary | |
| print("\nSummary:") | |
| if 'torch' in sys.modules and torch.cuda.is_available(): | |
| print("β GPU support is properly configured for HuggingFace Spaces") | |
| else: | |
| print("β οΈ GPU not available - will fall back to CPU processing") | |
| print(" This is normal for local development on Mac") | |
| print(" GPU will be available when deployed to HuggingFace Spaces with GPU hardware") | |