|
| 1 | +#!/bin/bash |
| 2 | + |
| 3 | +set -e # Exit on any error |
| 4 | + |
| 5 | +# Colors for output |
| 6 | +GREEN='\033[0;32m' |
| 7 | +RED='\033[0;31m' |
| 8 | +NC='\033[0m' # No Color |
| 9 | +BLUE='\033[0;34m' |
| 10 | +YELLOW='\033[1;33m' |
| 11 | + |
| 12 | +# Function to print section headers |
| 13 | +print_header() { |
| 14 | + echo -e "\n${BLUE}=== $1 ===${NC}\n" |
| 15 | +} |
| 16 | + |
| 17 | +# Function to echo command before running |
| 18 | +echo_run() { |
| 19 | + echo -e "${YELLOW}Running: $@${NC}" |
| 20 | + "$@" |
| 21 | +} |
| 22 | + |
| 23 | +# Function to build an image |
| 24 | +build_image() { |
| 25 | + local type=$1 |
| 26 | + print_header "Building $type image" |
| 27 | + echo_run docker build -t deepethogram:$type -f docker/Dockerfile-$type . |
| 28 | +} |
| 29 | + |
| 30 | +# Function to verify GPU in container |
| 31 | +verify_gpu() { |
| 32 | + local gpu_flag=$1 |
| 33 | + local type=$2 |
| 34 | + echo "Verifying GPU access in container..." |
| 35 | + echo -e "${YELLOW}Running: docker run $gpu_flag --rm deepethogram:$type nvidia-smi${NC}" |
| 36 | + if ! docker run $gpu_flag --rm deepethogram:$type nvidia-smi; then |
| 37 | + echo -e "${RED}Failed to access GPU in container${NC}" |
| 38 | + return 1 |
| 39 | + fi |
| 40 | + echo -e "${YELLOW}Running: docker run $gpu_flag --rm deepethogram:$type python -c \"import torch; print('CUDA available:', torch.cuda.is_available())\"${NC}" |
| 41 | + if ! docker run $gpu_flag --rm deepethogram:$type python -c "import torch; print('CUDA available:', torch.cuda.is_available())" | grep -q "CUDA available: True"; then |
| 42 | + echo -e "${RED}Failed to access GPU through PyTorch${NC}" |
| 43 | + return 1 |
| 44 | + fi |
| 45 | + return 0 |
| 46 | +} |
| 47 | + |
| 48 | +# Function to run tests in container |
| 49 | +test_container() { |
| 50 | + local type=$1 |
| 51 | + local gpu_flag=$2 |
| 52 | + local has_gpu=$3 |
| 53 | + |
| 54 | + print_header "Testing $type container" |
| 55 | + |
| 56 | + # Test basic import |
| 57 | + echo "Testing Python import..." |
| 58 | + echo -e "${YELLOW}Running: docker run $gpu_flag -it deepethogram:$type python -c \"import deepethogram\"${NC}" |
| 59 | + docker run $gpu_flag -it deepethogram:$type python -c "import deepethogram" && \ |
| 60 | + echo -e "${GREEN}✓ Import test passed${NC}" || \ |
| 61 | + (echo -e "${RED}✗ Import test failed${NC}" && exit 1) |
| 62 | + |
| 63 | + # For containers that should support tests |
| 64 | + if [ "$type" = "full" ] || [ "$type" = "headless" ]; then |
| 65 | + echo "Running CPU tests..." |
| 66 | + echo -e "${YELLOW}Running: docker run $gpu_flag -it deepethogram:$type pytest -v -m \"not gpu\" tests/${NC}" |
| 67 | + docker run $gpu_flag -it deepethogram:$type pytest -v -m "not gpu" tests/ && \ |
| 68 | + echo -e "${GREEN}✓ CPU tests passed${NC}" || \ |
| 69 | + (echo -e "${RED}✗ CPU tests failed${NC}" && exit 1) |
| 70 | + |
| 71 | + # Run GPU tests if GPU is available |
| 72 | + if [ "$has_gpu" = true ] && [ "$type" != "gui" ]; then |
| 73 | + echo "Running GPU tests..." |
| 74 | + # First verify CUDA is accessible |
| 75 | + echo -e "${YELLOW}Running: docker run $gpu_flag -it deepethogram:$type python -c \"import torch; assert torch.cuda.is_available(), 'CUDA not available'; print('CUDA is available')\"${NC}" |
| 76 | + docker run $gpu_flag -it deepethogram:$type python -c "import torch; assert torch.cuda.is_available(), 'CUDA not available'; print('CUDA is available')" |
| 77 | + # Run the actual GPU tests |
| 78 | + echo -e "${YELLOW}Running: docker run $gpu_flag -it deepethogram:$type bash -c \"export CUDA_VISIBLE_DEVICES=0 && pytest -v -m gpu tests/\"${NC}" |
| 79 | + docker run $gpu_flag -it deepethogram:$type \ |
| 80 | + bash -c "export CUDA_VISIBLE_DEVICES=0 && pytest -v -m gpu tests/" && \ |
| 81 | + echo -e "${GREEN}✓ GPU tests passed${NC}" || \ |
| 82 | + (echo -e "${RED}✗ GPU tests failed${NC}" && exit 1) |
| 83 | + fi |
| 84 | + fi |
| 85 | + |
| 86 | + # For containers that should support GUI |
| 87 | + if [ "$type" = "full" ] || [ "$type" = "gui" ]; then |
| 88 | + echo "Testing GUI import..." |
| 89 | + echo -e "${YELLOW}Running: docker run $gpu_flag -e DISPLAY=$DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix:rw -it deepethogram:$type python -c \"from deepethogram.gui import main\"${NC}" |
| 90 | + docker run $gpu_flag -e DISPLAY=$DISPLAY \ |
| 91 | + -v /tmp/.X11-unix:/tmp/.X11-unix:rw \ |
| 92 | + -it deepethogram:$type python -c "from deepethogram.gui import main" && \ |
| 93 | + echo -e "${GREEN}✓ GUI import test passed${NC}" || \ |
| 94 | + (echo -e "${RED}✗ GUI import test failed${NC}" && exit 1) |
| 95 | + fi |
| 96 | +} |
| 97 | + |
| 98 | +# Main execution |
| 99 | +main() { |
| 100 | + # Ensure we're in the project root |
| 101 | + if [[ ! -f "pyproject.toml" ]]; then |
| 102 | + echo -e "${RED}Error: Must run from project root directory (where pyproject.toml is located)${NC}" |
| 103 | + exit 1 |
| 104 | + fi |
| 105 | + |
| 106 | + # Check if GPU is available by testing nvidia-smi |
| 107 | + local has_gpu=false |
| 108 | + if command -v nvidia-smi &> /dev/null && nvidia-smi &> /dev/null; then |
| 109 | + GPU_FLAG="--gpus all" |
| 110 | + has_gpu=true |
| 111 | + echo -e "${GREEN}NVIDIA GPU detected, will use GPUs and run GPU tests${NC}" |
| 112 | + else |
| 113 | + GPU_FLAG="" |
| 114 | + echo -e "${RED}No NVIDIA GPU detected, running without GPU${NC}" |
| 115 | + fi |
| 116 | + |
| 117 | + # Build and test each image type |
| 118 | + for type in "headless" "gui" "full"; do |
| 119 | + build_image $type |
| 120 | + # Verify GPU access after building if we have a GPU |
| 121 | + if [ "$has_gpu" = true ] && [ "$type" != "gui" ]; then |
| 122 | + if ! verify_gpu "$GPU_FLAG" "$type"; then |
| 123 | + echo -e "${RED}GPU detected on host but not accessible in container. Please check nvidia-docker installation.${NC}" |
| 124 | + exit 1 |
| 125 | + fi |
| 126 | + fi |
| 127 | + test_container $type "$GPU_FLAG" $has_gpu |
| 128 | + done |
| 129 | + |
| 130 | + print_header "All builds and tests completed successfully!" |
| 131 | +} |
| 132 | + |
| 133 | +# Execute main function |
| 134 | +main |
0 commit comments