diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..478ea62
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,17 @@
+.git
+.gitignore
+.venv
+.pytest_cache
+__pycache__
+*.pyc
+*.pyo
+*.pyd
+.coverage
+htmlcov/
+.mypy_cache
+.DS_Store
+Thumbs.db
+README.md
+*.md
+.env
+.env.*
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
new file mode 100644
index 0000000..9488c6e
--- /dev/null
+++ b/.github/workflows/ci-cd.yml
@@ -0,0 +1,93 @@
+name: CI/CD Pipeline
+
+on:
+ push:
+ branches: [ main, develop ]
+ pull_request:
+ branches: [ main ]
+
+env:
+ AWS_REGION: eu-west-1
+ ECR_REPOSITORY: neurobank-fastapi
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+
+ - name: Install Poetry
+ uses: snok/install-poetry@v1
+ with:
+ version: 1.8.2
+
+ - name: Install dependencies
+ run: poetry install
+
+ - name: Run tests with coverage
+ run: |
+ poetry run pytest --cov=app --cov-report=xml --cov-report=html
+
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v3
+ with:
+ files: ./coverage.xml
+
+ security:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+
+ - name: Install dependencies
+ run: |
+ pip install bandit safety
+
+ - name: Run security checks
+ run: |
+ bandit -r app/ -f json -o bandit-report.json
+ safety check
+
+ build-and-deploy:
+ needs: [test, security]
+ runs-on: ubuntu-latest
+ if: github.ref == 'refs/heads/main'
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: ${{ env.AWS_REGION }}
+
+ - name: Login to Amazon ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@v2
+
+ - name: Build, tag, and push image to Amazon ECR
+ env:
+ ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
+ IMAGE_TAG: ${{ github.sha }}
+ run: |
+ docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG .
+ docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
+ docker tag $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:latest
+ docker push $ECR_REGISTRY/$ECR_REPOSITORY:latest
+
+ - name: Deploy to AWS Lambda
+ run: |
+ sam build
+ sam deploy --no-confirm-changeset --no-fail-on-empty-changeset --stack-name neurobank-api --capabilities CAPABILITY_IAM --parameter-overrides ApiKey=${{ secrets.API_KEY }}
diff --git a/.gitignore b/.gitignore
index b7faf40..0b0af7d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,207 +1,51 @@
-# Byte-compiled / optimized / DLL files
+# --- Basado en la plantilla oficial de Python ---
__pycache__/
-*.py[codz]
+*.py[cod]
*$py.class
-# C extensions
-*.so
+# Entornos virtuales
+.venv/
+venv/
+ENV/
+env/
+env.bak/
+venv.bak/
-# Distribution / packaging
-.Python
+# Paquetes/compilados
build/
-develop-eggs/
dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-share/python-wheels/
*.egg-info/
-.installed.cfg
+.eggs/
*.egg
-MANIFEST
-
-# PyInstaller
-# Usually these files are written by a python script from a template
-# before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.nox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*.cover
-*.py.cover
-.hypothesis/
-.pytest_cache/
-cover/
-
-# Translations
-*.mo
-*.pot
+wheels/
+pip-wheel-metadata/
-# Django stuff:
+# Archivos de logs
*.log
-local_settings.py
-db.sqlite3
-db.sqlite3-journal
-
-# Flask stuff:
-instance/
-.webassets-cache
-
-# Scrapy stuff:
-.scrapy
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-.pybuilder/
-target/
-
-# Jupyter Notebook
-.ipynb_checkpoints
-
-# IPython
-profile_default/
-ipython_config.py
-
-# pyenv
-# For a library or package, you might want to ignore these files since the code is
-# intended to run in multiple environments; otherwise, check them in:
-# .python-version
-
-# pipenv
-# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
-# However, in case of collaboration, if having platform-specific dependencies or dependencies
-# having no cross-platform support, pipenv may install dependencies that don't work, or not
-# install all needed dependencies.
-#Pipfile.lock
-
-# UV
-# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
-# This is especially recommended for binary packages to ensure reproducibility, and is more
-# commonly ignored for libraries.
-#uv.lock
-# poetry
-# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
-# This is especially recommended for binary packages to ensure reproducibility, and is more
-# commonly ignored for libraries.
-# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
-#poetry.lock
-#poetry.toml
-
-# pdm
-# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
-# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
-# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
-#pdm.lock
-#pdm.toml
-.pdm-python
-.pdm-build/
-
-# pixi
-# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
-#pixi.lock
-# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
-# in the .venv directory. It is recommended not to include this directory in version control.
-.pixi
-
-# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
-__pypackages__/
-
-# Celery stuff
-celerybeat-schedule
-celerybeat.pid
-
-# SageMath parsed files
-*.sage.py
-
-# Environments
-.env
-.envrc
-.venv
-env/
-venv/
-ENV/
-env.bak/
-venv.bak/
-
-# Spyder project settings
-.spyderproject
-.spyproject
-
-# Rope project settings
-.ropeproject
-
-# mkdocs documentation
-/site
-
-# mypy
+# Cachés de pruebas y cobertura
+.pytest_cache/
+.coverage
+htmlcov/
.mypy_cache/
-.dmypy.json
-dmypy.json
-
-# Pyre type checker
.pyre/
-# pytype static type analyzer
-.pytype/
+# Configuración de IDEs
+.vscode/
+.idea/
-# Cython debug symbols
-cython_debug/
-
-# PyCharm
-# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
-# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
-# and can be added to the global gitignore or merged into this file. For a more nuclear
-# option (not recommended) you can uncomment the following to ignore the entire idea folder.
-#.idea/
-
-# Abstra
-# Abstra is an AI-powered process automation framework.
-# Ignore directories containing user credentials, local state, and settings.
-# Learn more at https://abstra.io/docs
-.abstra/
-
-# Visual Studio Code
-# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
-# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
-# and can be added to the global gitignore or merged into this file. However, if you prefer,
-# you could uncomment the following to ignore the entire vscode folder
-# .vscode/
-
-# Ruff stuff:
-.ruff_cache/
+# Variables de entorno (dotenv)
+.env
+.env.*
-# PyPI configuration file
-.pypirc
+# Documentación generada
+docs/_build/
-# Cursor
-# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
-# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
-# refer to https://docs.cursor.com/context/ignore-files
-.cursorignore
-.cursorindexingignore
+# Docker (si lo usas)
+*.docker
+docker-compose.override.yml
-# Marimo
-marimo/_static/
-marimo/_lsp/
-__marimo__/
+# Sistema operativo
+.DS_Store
+Thumbs.db
+.aws-sam/
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..3136fee
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,27 @@
+FROM python:3.11-slim
+
+# Variables
+ENV PYTHONDONTWRITEBYTECODE=1 \
+ PYTHONUNBUFFERED=1 \
+ POETRY_VERSION=1.8.2
+
+# Sistema
+RUN apt-get update && apt-get install -y build-essential curl && rm -rf /var/lib/apt/lists/*
+
+# Poetry
+RUN curl -sSL https://install.python-poetry.org | python3 -
+ENV PATH="/root/.local/bin:$PATH"
+
+# Copiar proyecto
+WORKDIR /app
+COPY pyproject.toml poetry.lock* /app/
+RUN poetry install --no-root --only main
+
+# Copiar código
+COPY . /app
+
+# Exponer puerto
+EXPOSE 8000
+
+# Comando por defecto
+CMD ["poetry", "run", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
diff --git a/Jenkinsfile b/Jenkinsfile
new file mode 100644
index 0000000..e2e95ad
--- /dev/null
+++ b/Jenkinsfile
@@ -0,0 +1,48 @@
+pipeline {
+ agent any
+ stages {
+ stage('Checkout') {
+ steps { checkout scm }
+ }
+ stage('Test') {
+ steps {
+ sh 'poetry install'
+ sh 'poetry run pytest --cov=app --cov-report=xml'
+ }
+ }
+ stage('SonarQube') {
+ environment {
+ SONAR_TOKEN = credentials('sonar-token')
+ }
+ steps {
+ sh "sonar-scanner -Dsonar.projectKey=neurobank \
+ -Dsonar.python.coverage.reportPaths=coverage.xml"
+ }
+ }
+ stage('Build & Push') {
+ steps {
+ sh 'docker build -t myrepo/neurobank:${BUILD_NUMBER} .'
+ sh 'docker push myrepo/neurobank:${BUILD_NUMBER}'
+ }
+ }
+ stage('Deploy AppRunner') {
+ steps {
+ // AWS CLI / CDK / Terraform script here
+ script {
+ sh '''
+ aws apprunner update-service \
+ --service-arn ${APP_RUNNER_SERVICE_ARN} \
+ --source-configuration '{
+ "ImageRepository": {
+ "ImageIdentifier": "myrepo/neurobank:${BUILD_NUMBER}",
+ "ImageConfiguration": {
+ "Port": "8000"
+ }
+ }
+ }'
+ '''
+ }
+ }
+ }
+ }
+}
diff --git a/README.md b/README.md
index 49e740f..5b2f3ff 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,73 @@
-# NeuroBank-FastAPI-Toolkit
+# Operator API (FASE 2)
+
+Backend en FastAPI para consultar estados de pedidos y generar facturas.
+
+## Requisitos
+
+- Python 3.10+
+- macOS Sonoma 14.7.6 (compatible)
+- Poetry o pip + virtualenv
+
+## Instalación
+
+```bash
+python -m venv .venv
+source .venv/bin/activate
+pip install -r requirements.txt
+cp .env.example .env # Ajusta la API_KEY si lo deseas
+```
+
+## Ejecución
+
+```bash
+uvicorn app.main:app --reload
+# Visita http://localhost:8000/docs
+```
+
+## Tests
+
+```bash
+pytest
+```
+
+## Despliegue
+
+- Contenedor Docker (opcional)
+- AWS ECS/Fargate o EC2 + Systemd
+- Observabilidad: CloudWatch, Prometheus + Grafana, etc.
+
+---
+
+## Cómo levantar el proyecto
+
+1. **Crear entorno virtual**
+ ```bash
+ python -m venv .venv
+ source .venv/bin/activate
+ ```
+
+2. **Instalar dependencias**
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. **Configurar .env**
+ ```bash
+ cp .env.example .env
+ # Edita API_KEY si lo deseas
+ ```
+
+4. **Ejecutar servidor**
+ ```bash
+ uvicorn app.main:app --reload
+ ```
+
+5. **Probar endpoints** (con la API Key en cabecera X-API-Key)
+ - `GET /operator/order_status/123`
+ - `POST /operator/generate_invoice` con body `{"order_id": "123"}`
+
+6. **Ejecutar tests**
+ ```bash
+ pytest
+ ```NeuroBank-FastAPI-Toolkit
Senior‑grade FastAPI microservice blueprint for AI‑driven banking. Python 3.10+, Pydantic v2, Docker & AWS stack (Lambda, AppRunner, CloudWatch, X‑Ray) with CI/CD via GitHub Actions. Incluye clean code, tests completos, observabilidad y módulos listos para estado de pedidos, facturación y analítica.
diff --git a/app/__init__.py b/app/__init__.py
new file mode 100644
index 0000000..8a66036
--- /dev/null
+++ b/app/__init__.py
@@ -0,0 +1 @@
+# Permite que 'app' sea un paquete importable.
diff --git a/app/auth/__init__.py b/app/auth/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/auth/dependencies.py b/app/auth/dependencies.py
new file mode 100644
index 0000000..271f016
--- /dev/null
+++ b/app/auth/dependencies.py
@@ -0,0 +1,17 @@
+import os
+from fastapi import Header, HTTPException, status
+from dotenv import load_dotenv
+
+load_dotenv() # Carga variables de .env
+
+API_KEY = os.getenv("API_KEY", "secret")
+
+async def verify_api_key(x_api_key: str = Header(...)):
+ """
+ Verifica la API Key enviada en la cabecera 'X-API-Key'.
+ """
+ if x_api_key != API_KEY:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="API key inválida",
+ )
diff --git a/app/main.py b/app/main.py
new file mode 100644
index 0000000..0c45bf5
--- /dev/null
+++ b/app/main.py
@@ -0,0 +1,28 @@
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+from .routers import operator
+from .utils.logging import init_logging
+
+init_logging()
+
+app = FastAPI(
+ title="Operator API",
+ version="0.1.0",
+ description="Backend de operadores reales (FASE 2)"
+)
+
+# CORS (ajusta origins según tu caso)
+app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+)
+
+# Rutas
+app.include_router(operator.router, prefix="/operator", tags=["Operator"])
+
+@app.get("/health", tags=["Health"])
+async def health():
+ return {"status": "ok"}
diff --git a/app/routers/__init__.py b/app/routers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/routers/operator.py b/app/routers/operator.py
new file mode 100644
index 0000000..7a443f5
--- /dev/null
+++ b/app/routers/operator.py
@@ -0,0 +1,48 @@
+from fastapi import APIRouter, Depends, HTTPException, status
+from pydantic import BaseModel
+from ..services.order_service import get_order_status
+from ..services.invoice_service import generate_invoice
+from ..auth.dependencies import verify_api_key
+
+router = APIRouter()
+
+# ----- Modelos Pydantic -----
+class OrderStatusResponse(BaseModel):
+ order_id: str
+ status: str
+ carrier: str
+ eta: str
+ last_updated: str
+ tracking_number: str
+ estimated_delivery_time: str | None
+
+class InvoiceRequest(BaseModel):
+ order_id: str
+
+class InvoiceResponse(BaseModel):
+ invoice_id: str
+ order_id: str
+ amount: float
+ currency: str
+ issued_at: str
+ status: str
+ tax_rate: float
+ subtotal: float
+ tax_amount: float
+
+# ----- Endpoints -----
+@router.get(
+ "/order_status/{order_id}",
+ response_model=OrderStatusResponse,
+ dependencies=[Depends(verify_api_key)]
+)
+async def order_status(order_id: str):
+ return get_order_status(order_id)
+
+@router.post(
+ "/generate_invoice",
+ response_model=InvoiceResponse,
+ dependencies=[Depends(verify_api_key)]
+)
+async def invoice(data: InvoiceRequest):
+ return generate_invoice(data.order_id)
diff --git a/app/services/__init__.py b/app/services/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/services/invoice_service.py b/app/services/invoice_service.py
new file mode 100644
index 0000000..0fd3ffa
--- /dev/null
+++ b/app/services/invoice_service.py
@@ -0,0 +1,53 @@
+import uuid
+from datetime import datetime
+from typing import Dict, Any
+from loguru import logger
+
+
+def generate_invoice(order_id: str) -> Dict[str, Any]:
+ """
+ Genera la factura de un pedido.
+
+ Args:
+ order_id (str): ID del pedido para generar la factura
+
+ Returns:
+ Dict[str, Any]: Datos de la factura generada
+
+ Raises:
+ ValueError: Si el order_id no es válido
+ """
+ if not order_id or not order_id.strip():
+ logger.error("Order ID cannot be empty or None")
+ raise ValueError("Order ID is required")
+
+ # Generar ID único de factura
+ invoice_id = f"INV-{datetime.now().strftime('%Y-%m')}-{str(uuid.uuid4())[:8].upper()}"
+
+ # Fecha actual en formato ISO
+ issued_at = datetime.now().strftime('%Y-%m-%d')
+
+ # Simular cálculo del monto basado en el order_id
+ base_amount = hash(order_id) % 100 + 50.0 # Entre 50-149
+ amount = round(base_amount + (base_amount * 0.21), 2) # IVA incluido
+
+ invoice_data = {
+ "invoice_id": invoice_id,
+ "order_id": order_id,
+ "amount": amount,
+ "currency": "EUR",
+ "issued_at": issued_at,
+ "status": "issued",
+ "tax_rate": 0.21,
+ "subtotal": base_amount,
+ "tax_amount": round(base_amount * 0.21, 2)
+ }
+
+ logger.info(f"Invoice generated: {invoice_id} for order: {order_id}")
+
+ # FUTURO: Aquí se integrará con:
+ # - Base de datos para persistir la factura
+ # - S3 para almacenar PDF de la factura
+ # - Sistema de contabilidad externo
+
+ return invoice_data
diff --git a/app/services/order_service.py b/app/services/order_service.py
new file mode 100644
index 0000000..9ffcaaf
--- /dev/null
+++ b/app/services/order_service.py
@@ -0,0 +1,61 @@
+from datetime import datetime, timedelta
+from typing import Dict, Any
+from loguru import logger
+
+
+def get_order_status(order_id: str) -> Dict[str, Any]:
+ """
+ Obtiene el estado actual de un pedido.
+
+ Args:
+ order_id (str): ID único del pedido a consultar
+
+ Returns:
+ Dict[str, Any]: Estado completo del pedido
+
+ Raises:
+ ValueError: Si el order_id no es válido
+ """
+ if not order_id or not order_id.strip():
+ logger.error("Order ID cannot be empty or None")
+ raise ValueError("Order ID is required")
+
+ # Simular diferentes estados basados en el ID
+ status_options = ["Pendiente", "En preparación", "En tránsito", "Entregado", "Cancelado"]
+ carriers = ["Correos Express", "SEUR", "MRW", "DHL", "Amazon Logistics"]
+
+ # Generar estado pseudo-aleatorio basado en el order_id
+ status_index = hash(order_id) % len(status_options)
+ carrier_index = hash(f"{order_id}_carrier") % len(carriers)
+
+ status = status_options[status_index]
+ carrier = carriers[carrier_index]
+
+ # Calcular ETA dinámicamente
+ if status == "Entregado":
+ eta = "Entregado"
+ elif status == "Cancelado":
+ eta = "N/A"
+ else:
+ days_to_add = (hash(order_id) % 5) + 1 # 1-5 días
+ eta_date = datetime.now() + timedelta(days=days_to_add)
+ eta = eta_date.strftime('%Y-%m-%d')
+
+ order_data = {
+ "order_id": order_id,
+ "status": status,
+ "carrier": carrier,
+ "eta": eta,
+ "last_updated": datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
+ "tracking_number": f"TRK{hash(order_id) % 1000000:06d}",
+ "estimated_delivery_time": "08:00-18:00" if status not in ["Entregado", "Cancelado"] else None
+ }
+
+ logger.info(f"Order status retrieved: {order_id} - Status: {status}")
+
+ # FUTURO: Aquí se integrará con:
+ # - Base de datos de pedidos
+ # - APIs de transportistas reales
+ # - Sistema de tracking en tiempo real
+
+ return order_data
diff --git a/app/tests/__init__.py b/app/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/tests/test_operator.py b/app/tests/test_operator.py
new file mode 100644
index 0000000..edd0fc9
--- /dev/null
+++ b/app/tests/test_operator.py
@@ -0,0 +1,28 @@
+import pytest
+from httpx import AsyncClient
+from app.main import app
+
+API_KEY = "secret" # Debe coincidir con tu .env
+
+@pytest.mark.asyncio
+async def test_order_status():
+ async with AsyncClient(app=app, base_url="http://test") as ac:
+ resp = await ac.get(
+ "/operator/order_status/123",
+ headers={"X-API-Key": API_KEY}
+ )
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data["order_id"] == "123"
+
+@pytest.mark.asyncio
+async def test_generate_invoice():
+ async with AsyncClient(app=app, base_url="http://test") as ac:
+ resp = await ac.post(
+ "/operator/generate_invoice",
+ json={"order_id": "123"},
+ headers={"X-API-Key": API_KEY}
+ )
+ assert resp.status_code == 200
+ data = resp.json()
+ assert data["order_id"] == "123"
diff --git a/app/utils/__init__.py b/app/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/app/utils/logging.py b/app/utils/logging.py
new file mode 100644
index 0000000..28040c1
--- /dev/null
+++ b/app/utils/logging.py
@@ -0,0 +1,39 @@
+from loguru import logger
+import sys, os
+
+def init_logging():
+ """
+ Configura Loguru para logging estructurado con CloudWatch.
+ """
+ logger.remove()
+
+ # Consola
+ logger.add(
+ sys.stdout,
+ level="INFO",
+ format="{time:YYYY-MM-DD HH:mm:ss} | "
+ "{level} | {message}"
+ )
+
+ # CloudWatch (solo si AWS_REGION está configurado)
+ if os.getenv("AWS_REGION"):
+ try:
+ import watchtower
+ logger.add(
+ watchtower.CloudWatchLogHandler(log_group="neurobank-api"),
+ level="INFO",
+ serialize=True
+ )
+ logger.info("CloudWatch logging configured")
+ except Exception as e:
+ logger.warning(f"Could not configure CloudWatch logging: {e}")
+
+ # X-Ray (solo si está en AWS Lambda)
+ if os.getenv("AWS_LAMBDA_FUNCTION_NAME"):
+ try:
+ from aws_xray_sdk.core import patch_all, xray_recorder
+ patch_all()
+ xray_recorder.configure(service='NeuroBankAPI')
+ logger.info("X-Ray tracing configured")
+ except Exception as e:
+ logger.warning(f"Could not configure X-Ray: {e}")
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..5cb17c9
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,9 @@
+version: "3.9"
+services:
+ api:
+ build: .
+ ports:
+ - "8000:8000"
+ environment:
+ API_KEY: secret
+ AWS_REGION: eu-west-1
diff --git a/lambda_handler.py b/lambda_handler.py
new file mode 100644
index 0000000..4cedaba
--- /dev/null
+++ b/lambda_handler.py
@@ -0,0 +1,4 @@
+from mangum import Mangum
+from app.main import app
+
+handler = Mangum(app)
diff --git a/neurobank-fastapi.code-workspace b/neurobank-fastapi.code-workspace
new file mode 100644
index 0000000..84589de
--- /dev/null
+++ b/neurobank-fastapi.code-workspace
@@ -0,0 +1,31 @@
+{
+ "folders": [
+ {
+ "path": "."
+ }
+ ],
+ "settings": {
+ "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
+ "python.venvPath": "${workspaceFolder}/.venv",
+ "python.formatting.provider": "black",
+ "editor.formatOnSave": true,
+ "editor.codeActionsOnSave": {
+ "source.organizeImports": "explicit"
+ },
+ "files.exclude": {
+ "**/__pycache__": true,
+ "**/*.pyc": true,
+ ".venv": true
+ },
+ "git.ignoreLimitWarning": true
+ },
+ "extensions": {
+ "recommendations": [
+ "ms-python.python",
+ "ms-python.vscode-pylance",
+ "ms-azuretools.vscode-docker",
+ "github.copilot",
+ "github.copilot-chat"
+ ]
+ }
+}
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..69de536
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,38 @@
+[tool.poetry]
+name = "neurobank-fastapi-toolkit"
+version = "0.1.0"
+description = "Backend de operadores reales (FASE 2)"
+authors = ["NeuroBank Team "]
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = "^3.11"
+fastapi = "0.111.0"
+uvicorn = {extras = ["standard"], version = "0.29.0"}
+pydantic = "2.7.0"
+python-dotenv = "1.0.1"
+loguru = "0.7.2"
+pytest = "8.2.0"
+pytest-asyncio = "1.1.0"
+httpx = "0.27.0"
+watchtower = "3.0.0"
+aws-xray-sdk = "2.13.0"
+mangum = "^0.17.0"
+
+[tool.poetry.group.dev.dependencies]
+pytest-cov = "^4.0.0"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.pytest.ini_options]
+testpaths = ["app/tests"]
+python_files = ["test_*.py"]
+python_classes = ["Test*"]
+python_functions = ["test_*"]
+asyncio_mode = "auto"
+
+[tool.coverage.run]
+source = ["app"]
+omit = ["app/tests/*"]
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..6265c2b
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,11 @@
+fastapi==0.111.0
+uvicorn[standard]==0.29.0
+pydantic==2.7.0
+python-dotenv==1.0.1
+loguru==0.7.2
+pytest==8.2.0
+pytest-asyncio==1.1.0
+httpx==0.27.0
+watchtower==3.0.0
+aws-xray-sdk==2.13.0
+mangum==0.17.0
diff --git a/sonar-project.properties b/sonar-project.properties
new file mode 100644
index 0000000..0a2c361
--- /dev/null
+++ b/sonar-project.properties
@@ -0,0 +1,16 @@
+sonar.projectKey=neurobank-fastapi-toolkit
+sonar.projectName=NeuroBank FastAPI Toolkit
+sonar.projectVersion=0.1.0
+
+# Código fuente
+sonar.sources=app
+sonar.tests=app/tests
+sonar.python.coverage.reportPaths=coverage.xml
+
+# Exclusiones
+sonar.exclusions=**/__pycache__/**,**/*.pyc,**/tests/**
+sonar.test.exclusions=app/tests/**
+
+# Python específico
+sonar.python.pylint.reportPaths=pylint-report.txt
+sonar.python.bandit.reportPaths=bandit-report.json
diff --git a/template.yaml b/template.yaml
new file mode 100644
index 0000000..6c5afd0
--- /dev/null
+++ b/template.yaml
@@ -0,0 +1,74 @@
+AWSTemplateFormatVersion: '2010-09-09'
+Transform: AWS::Serverless-2016-10-31
+Description: 'NeuroBank FastAPI Toolkit - Serverless deployment'
+
+Globals:
+ Function:
+ Timeout: 30
+ Environment:
+ Variables:
+ API_KEY: !Ref ApiKey
+ AWS_XRAY_TRACING_NAME: NeuroBankAPI
+
+Parameters:
+ Stage:
+ Type: String
+ Default: prod
+ AllowedValues:
+ - dev
+ - staging
+ - prod
+
+ ApiKey:
+ Type: String
+ Default: mi-super-secret-key-2025
+ NoEcho: true
+
+Resources:
+ NeuroBankFunction:
+ Type: AWS::Serverless::Function
+ Properties:
+ FunctionName: !Sub 'neurobank-api-${Stage}'
+ CodeUri: .
+ Handler: lambda_handler.handler
+ Runtime: python3.11
+ MemorySize: 512
+ Timeout: 30
+ Tracing: Active
+ Environment:
+ Variables:
+ API_KEY: !Ref ApiKey
+ Events:
+ ApiGateway:
+ Type: Api
+ Properties:
+ Path: /{proxy+}
+ Method: ANY
+ RestApiId: !Ref NeuroBankApi
+
+ NeuroBankApi:
+ Type: AWS::Serverless::Api
+ Properties:
+ Name: !Sub 'neurobank-api-${Stage}'
+ StageName: !Ref Stage
+ TracingEnabled: true
+ Cors:
+ AllowMethods: "'*'"
+ AllowHeaders: "'*'"
+ AllowOrigin: "'*'"
+
+ # CloudWatch Log Group
+ NeuroBankLogGroup:
+ Type: AWS::Logs::LogGroup
+ Properties:
+ LogGroupName: !Sub '/aws/lambda/neurobank-api-${Stage}'
+ RetentionInDays: 14
+
+Outputs:
+ NeuroBankApiUrl:
+ Description: 'API Gateway endpoint URL'
+ Value: !Sub 'https://${NeuroBankApi}.execute-api.${AWS::Region}.amazonaws.com/${Stage}/'
+
+ NeuroBankFunctionArn:
+ Description: 'Lambda Function ARN'
+ Value: !GetAtt NeuroBankFunction.Arn
\ No newline at end of file
diff --git a/test-event.json b/test-event.json
new file mode 100644
index 0000000..f2fa86e
--- /dev/null
+++ b/test-event.json
@@ -0,0 +1,62 @@
+{
+ "body": "",
+ "resource": "/{proxy+}",
+ "path": "/health",
+ "httpMethod": "GET",
+ "isBase64Encoded": false,
+ "queryStringParameters": {},
+ "multiValueQueryStringParameters": {},
+ "pathParameters": {
+ "proxy": "health"
+ },
+ "stageVariables": {},
+ "headers": {
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
+ "Accept-Encoding": "gzip, deflate, sdch",
+ "Accept-Language": "en-US,en;q=0.8",
+ "Cache-Control": "max-age=0",
+ "CloudFront-Forwarded-Proto": "https",
+ "CloudFront-Is-Desktop-Viewer": "true",
+ "CloudFront-Is-Mobile-Viewer": "false",
+ "CloudFront-Is-SmartTV-Viewer": "false",
+ "CloudFront-Is-Tablet-Viewer": "false",
+ "CloudFront-Viewer-Country": "US",
+ "Host": "1234567890.execute-api.us-east-1.amazonaws.com",
+ "Upgrade-Insecure-Requests": "1",
+ "User-Agent": "Custom User Agent String",
+ "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)",
+ "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==",
+ "X-Amzn-Trace-Id": "Root=1-58b75890-12345678901234567890",
+ "X-Forwarded-For": "127.0.0.1, 127.0.0.2",
+ "X-Forwarded-Port": "443",
+ "X-Forwarded-Proto": "https"
+ },
+ "multiValueHeaders": {},
+ "requestContext": {
+ "path": "/prod/{proxy+}",
+ "accountId": "123456789012",
+ "resourceId": "123456",
+ "stage": "prod",
+ "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
+ "requestTime": "09/Apr/2015:12:34:56 +0000",
+ "requestTimeEpoch": 1428582896000,
+ "identity": {
+ "cognitoIdentityPoolId": null,
+ "accountId": null,
+ "cognitoIdentityId": null,
+ "caller": null,
+ "accessKey": null,
+ "sourceIp": "127.0.0.1",
+ "cognitoAuthenticationType": null,
+ "cognitoAuthenticationProvider": null,
+ "userArn": null,
+ "userAgent": "Custom User Agent String",
+ "user": null
+ },
+ "protocol": "HTTP/1.1",
+ "resourcePath": "/{proxy+}",
+ "httpMethod": "GET",
+ "apiId": "1234567890",
+ "extendedRequestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef"
+ }
+}