simple-text-analyzer / verify_gpu_setup.py
egumasa's picture
trigger GPU
3f10400
#!/usr/bin/env python3
"""
Verify GPU setup for Hugging Face Spaces deployment.
This script follows the official HuggingFace documentation for GPU verification.
"""
import sys
import os
print("=== GPU Setup Verification for HuggingFace Spaces ===")
print()
# Test 1: Check PyTorch installation and CUDA availability
print("1. PyTorch/CUDA Check:")
try:
import torch
print(f" βœ“ PyTorch installed: version {torch.__version__}")
print(f" Is CUDA available: {torch.cuda.is_available()}")
if torch.cuda.is_available():
print(f" CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
print(f" CUDA version: {torch.version.cuda}")
else:
print(" Running on CPU")
except ImportError as e:
print(f" βœ— PyTorch not installed or import error: {e}")
except Exception as e:
print(f" βœ— Error checking PyTorch: {e}")
print()
# Test 2: Check spaCy GPU configuration
print("2. spaCy GPU Check:")
try:
import spacy
# Try to enable GPU
gpu_id = spacy.prefer_gpu()
if gpu_id is not False:
print(f" βœ“ spaCy GPU enabled on device {gpu_id}")
else:
print(" βœ— spaCy could not enable GPU (will use CPU)")
print(f" spaCy version: {spacy.__version__}")
except ImportError:
print(" βœ— spaCy not installed")
except Exception as e:
print(f" βœ— Error checking spaCy: {e}")
print()
# Test 3: Check transformer packages
print("3. Transformer Packages Check:")
packages_found = []
try:
import spacy_transformers
packages_found.append("spacy-transformers")
print(" βœ“ spacy-transformers installed")
except ImportError:
print(" βœ— spacy-transformers not installed")
try:
import spacy_curated_transformers
packages_found.append("spacy-curated-transformers")
print(" βœ“ spacy-curated-transformers installed")
except ImportError:
print(" βœ— spacy-curated-transformers not installed")
if not packages_found:
print(" ⚠️ No transformer packages found - transformer models won't work!")
else:
print(f" Found packages: {', '.join(packages_found)}")
print()
# Test 4: Test loading a transformer model
print("4. Transformer Model Loading Test:")
try:
import spacy
# Try to load English transformer model
print(" Testing en_core_web_trf...")
nlp = spacy.load("en_core_web_trf")
# Process a test sentence
doc = nlp("This is a test sentence.")
print(f" βœ“ Successfully loaded and processed text with {len(doc)} tokens")
# Check if model is on GPU
if hasattr(nlp, 'pipe'):
for name, component in nlp.pipeline:
if hasattr(component, 'model') and hasattr(component.model, 'device'):
print(f" Component '{name}' device: {component.model.device}")
except Exception as e:
print(f" βœ— Error loading transformer model: {e}")
print()
# Test 5: Environment information
print("5. Environment Information:")
print(f" Platform: {sys.platform}")
print(f" Python: {sys.version}")
print(f" Working directory: {os.getcwd()}")
# Check for HuggingFace Spaces environment
if os.environ.get('SPACES'):
print(" βœ“ Running in HuggingFace Spaces")
print(f" Space ID: {os.environ.get('SPACE_ID', 'N/A')}")
print(f" Space Host: {os.environ.get('SPACE_HOST', 'N/A')}")
else:
print(" βœ— Not running in HuggingFace Spaces (local environment)")
print()
print("=== Verification Complete ===")
# Summary
print("\nSummary:")
if 'torch' in sys.modules and torch.cuda.is_available():
print("βœ… GPU support is properly configured for HuggingFace Spaces")
else:
print("⚠️ GPU not available - will fall back to CPU processing")
print(" This is normal for local development on Mac")
print(" GPU will be available when deployed to HuggingFace Spaces with GPU hardware")