1- .PHONY : check-poetry install test lint format help system-deps coverage coverage-html
1+ .PHONY : check-poetry install test lint format help system-deps coverage coverage-html download-model download-model-small download-model-medium download-model-large serve serve-prod docker-build docker-run docker-compose-up docker-compose-down
2+
3+ # Define model path variable with default value, can be overridden by environment
4+ MODEL_PATH ?= ./models
25
36# Extract target descriptions from comments
47help : # # Show this help message
@@ -40,11 +43,10 @@ install: check-poetry system-deps ## Install project dependencies
4043
4144test : check-poetry # # Run tests
4245 @echo " Running tests..."
43- @poetry run pytest
46+ @poetry run pytest tests/unit
4447
4548lint : check-poetry # # Run linters
4649 @echo " Running linters..."
47- @poetry run flake8 babeltron
4850 @poetry run isort --check babeltron
4951 @poetry run black --check babeltron
5052
@@ -55,8 +57,77 @@ format: check-poetry ## Format code
5557
5658coverage : check-poetry # # Run tests with coverage report
5759 @echo " Running tests with coverage..."
58- @poetry run pytest --cov=babeltron --cov-report=term-missing
60+ @poetry run pytest tests/unit --cov=babeltron --cov-report=term
5961
6062coverage-html : check-poetry # # Generate HTML coverage report
6163 @echo " Generating HTML coverage report..."
62- @poetry run pytest --cov=babeltron --cov-report=html
64+ @poetry run pytest tests/unit --cov=babeltron --cov-report=html
65+ @echo " HTML coverage report generated in htmlcov/ directory"
66+ @echo " Open htmlcov/index.html in your browser to view the report"
67+
68+ # Model download commands
69+ download-model : download-model-small # # Download the default (small) translation model
70+
71+ download-model-small : check-poetry # # Download small translation model (418M parameters, ~1GB)
72+ @echo " Downloading small translation model (418M parameters)..."
73+ @poetry run python -m babeltron.scripts.download_models --size 418M --output-dir $(MODEL_PATH )
74+
75+ download-model-medium : check-poetry # # Download medium translation model (1.2B parameters, ~2.5GB)
76+ @echo " Downloading medium translation model (1.2B parameters)..."
77+ @poetry run python -m babeltron.scripts.download_models --size 1.2B --output-dir $(MODEL_PATH )
78+
79+ download-model-large : check-poetry # # Download large translation model (12B parameters, ~24GB)
80+ @echo " Downloading large translation model (12B parameters)..."
81+ @poetry run python -m babeltron.scripts.download_models --size 12B --output-dir $(MODEL_PATH )
82+
83+ # Add these commands to your Makefile
84+ serve : check-poetry # # Run the API server locally
85+ @echo " Starting API server on http://localhost:8000..."
86+ @poetry run uvicorn babeltron.app.main:app --reload --host 0.0.0.0 --port 8000
87+
88+ serve-prod : check-poetry # # Run the API server in production mode (no reload)
89+ @echo " Starting API server in production mode on http://localhost:8000..."
90+ @poetry run uvicorn babeltron.app.main:app --host 0.0.0.0 --port 8000
91+
92+ # Docker commands
93+ docker-build : # # Build Docker image
94+ @echo " Building Docker image..."
95+ @docker build -t babeltron:latest .
96+
97+ docker-run : # # Run Docker container with model volume mount
98+ @echo " Checking for model files..."
99+ @if [ ! -d " $( MODEL_PATH) " ] || [ -z " $( shell ls -A $( MODEL_PATH) 2> /dev/null) " ]; then \
100+ echo " No model files found in $( MODEL_PATH) directory." ; \
101+ read -p " Do you want to download the small model now? (y/n) " answer; \
102+ if [ " $$ answer" = " y" ]; then \
103+ mkdir -p $(MODEL_PATH ) ; \
104+ echo " Downloading small model..." ; \
105+ poetry run python -m babeltron.scripts.download_models --size 418M --output-dir $(MODEL_PATH ) ; \
106+ else \
107+ echo " Model download skipped. Container may not work properly." ; \
108+ fi ; \
109+ fi
110+ @echo " Running Docker container..."
111+ @docker run -p 8000:8000 -v $(shell pwd) /$(MODEL_PATH ) :/models babeltron:latest
112+
113+ docker-up : # # Build and start services with docker-compose
114+ @echo " Checking for model files..."
115+ @if [ ! -d " $( MODEL_PATH) " ] || [ -z " $( shell ls -A $( MODEL_PATH) 2> /dev/null) " ]; then \
116+ echo " No model files found in $( MODEL_PATH) directory." ; \
117+ read -p " Do you want to download the small model now? (y/n) " answer; \
118+ if [ " $$ answer" = " y" ]; then \
119+ mkdir -p $(MODEL_PATH ) ; \
120+ echo " Downloading small model..." ; \
121+ poetry run python -m babeltron.scripts.download_models --size 418M --output-dir $(MODEL_PATH ) ; \
122+ else \
123+ echo " Model download skipped. Container may not work properly." ; \
124+ fi ; \
125+ fi
126+ @echo " Building and starting services with docker-compose..."
127+ @docker-compose up -d --build
128+ @echo " Services started successfully. API available at http://localhost:8000"
129+ @echo " API documentation available at http://localhost:8000/docs"
130+
131+ docker-down :
132+ @echo " Stopping docker-compose services..."
133+ @docker-compose down
0 commit comments