diff --git a/python-test-samples/README.md b/python-test-samples/README.md index e25b76f2..38649bec 100644 --- a/python-test-samples/README.md +++ b/python-test-samples/README.md @@ -12,6 +12,7 @@ This portion of the repository contains code samples for testing serverless appl |[Lambda local testing with Mocks](./lambda-mock)|This project contains unit tests for Lambda using mocks.| |[Lambda Layers with Mocks](./apigw-lambda-layer)|This project contains unit tests for Lambda layers using mocks.| |[API Gateway with Lambda and DynamoDB](./apigw-lambda-dynamodb)|This project contains unit and integration tests for a pattern using API Gateway, AWS Lambda and Amazon DynamoDB.| +|[API Gateway mock local](./apigw-mock-local)|This project contains unit tests for a pattern running API Gateway locally using pytests and mocks.| |[Schema and Contract Testing](./schema-and-contract-testing)|This project contains sample schema and contract tests for an event driven architecture.| |[Kinesis with Lambda and DynamoDB](./kinesis-lambda-dynamodb)|This project contains a example of testing an application with an Amazon Kinesis Data Stream.| |[SQS with Lambda](./apigw-sqs-lambda-sqs)|This project demonstrates testing SQS as a source and destination in an integration test| diff --git a/python-test-samples/apigw-mock-local/README.md b/python-test-samples/apigw-mock-local/README.md new file mode 100644 index 00000000..9063f7c3 --- /dev/null +++ b/python-test-samples/apigw-mock-local/README.md @@ -0,0 +1,389 @@ +[![python: 3.10](https://img.shields.io/badge/Python-3.10-green)](https://img.shields.io/badge/Python-3.10-green) +[![AWS: API Gateway](https://img.shields.io/badge/AWS-API%20Gateway-blue)](https://img.shields.io/badge/AWS-API%20Gateway-blue) +[![AWS: Lambda](https://img.shields.io/badge/AWS-Lambda-orange)](https://img.shields.io/badge/AWS-Lambda-orange) +[![test: local](https://img.shields.io/badge/Test-Local-red)](https://img.shields.io/badge/Test-Local-red) + +# Local: Amazon API Gateway Mock Testing + +## Introduction + +This project demonstrates how to test AWS API Gateway endpoints locally using SAM CLI. It showcases a simple mock implementation using Python 3.10 that returns predefined responses without requiring AWS credentials or external dependencies, making it ideal for rapid development and testing cycles. + +--- + +## Contents + +- [Local: Amazon API Gateway Mock Testing](#local-amazon-api-gateway-mock-testing) + - [Introduction](#introduction) + - [Contents](#contents) + - [Architecture Overview](#architecture-overview) + - [Project Structure](#project-structure) + - [Prerequisites](#prerequisites) + - [Test Scenarios](#test-scenarios) + - [About the Test Process](#about-the-test-process) + - [Testing Workflows](#testing-workflows) + - [API Documentation](#api-documentation) + - [Additional Resources](#additional-resources) + +--- + +## Architecture Overview + +

+ API Gateway Mock Testing +

+ +Components: + +- API Gateway Local emulator via SAM CLI +- Python Lambda function returning JSON mock responses +- Testcontainers for container management +- PyTest for automated testing + +--- + +## Project Structure +``` +├── events _# folder containing json events files_ +├── img/apigw-mock-local.png _# visual architecture diagram_ +├── lambda_mock_src/app.py _# Python Lambda function source code_ +├── tests/ +│ ├── unit/src/test_apigateway_local.py _# python PyTest test definition_ +│ └── requirements.txt _# pip requirements dependencies file_ +├── template.yaml _# SAM template defining API Gateway and Lambda resources_ +└── README.md _# instructions file_ +``` +--- + +## Prerequisites + +- Docker +- AWS SAM CLI +- Python 3.10 or newer +- curl (for API testing) +- Basic understanding of SAM, API Gateway and Lambda + +--- + +## Test Scenarios + +### 1. Mock Response Validation + +- Tests the basic mock endpoint functionality +- Verifies that the API Gateway correctly routes requests to the Lambda function +- Validates the JSON response format and content +- Ensures proper HTTP status codes are returned + +### 2. Local API Gateway Behavior + +- Tests the local emulation of API Gateway routing +- Verifies that the `/MOCK` endpoint is accessible via GET method +- Validates that the Lambda integration works correctly in local environment + +### 3. PyTest Integration Tests (end to end python test) + +- **Basic API Gateway Test**: Validates API Gateway routing and Lambda integration +- **Response Format Validation**: Tests proper JSON response structure +- **Error Handling Test**: Validates behavior with invalid requests and methods +- **Performance Metrics**: Measures API response times and consistency +- **Concurrent Request Test**: Tests API behavior under concurrent load +- **Input Validation**: Tests API with various input scenarios + +--- + +## About the Test Process + +The test process leverages SAM CLI to provide local emulation of AWS services: + +1. **SAM Local Setup**: SAM CLI starts a local API Gateway emulator that listens on port 3000 by default. + +2. **Lambda Function Loading**: The local emulator loads the Python Lambda function code from `lambda_mock_src/app.py` and creates a containerized runtime environment using Python 3.10. + +3. **API Route Mapping**: Based on the `template.yaml` configuration, SAM maps the `/MOCK` path with GET method to the Lambda function. + +4. **Request Processing**: When a request is made to `http://127.0.0.1:3000/MOCK`, the local API Gateway: + - Receives the HTTP request + - Routes it to the appropriate Lambda function + - Executes the Python function in a Docker container + - Returns the response to the client + +5. **Response Validation**: Tests verify that the mock response is correctly formatted and contains the expected content. + +--- + +## Testing Workflows + +### Setup Docker Environment + +> Make sure Docker engine is running before running the tests. + +```shell +apigw-mock-local$ docker version +Client: Docker Engine - Community + Version: 24.0.6 + API version: 1.43 +(...) +``` + +### Run the Unit Test - End to end python test + +> Start the API Gateway emulator in a separate terminal: + +```shell +apigw-mock-local$ +sam local start-api --port 3000 --docker-network host & +``` + +> Set up the python environment: + +```shell +apigw-mock-local$ cd tests +python3 -m venv venv +source venv/bin/activate +pip install --upgrade pip +pip install -r requirements.txt +``` + +#### Run the Unit Tests + +```shell +apigw-mock-local/tests$ +python3 -m pytest -s unit/src/test_apigateway_local.py +``` + +Expected output: + +```shell +========================================================== test session starts========================================================== +platform linux -- Python 3.10.12, pytest-8.4.1, pluggy-1.6.0 +rootdir: /home/ubuntu/environment/python-test-samples/apigw-mock-local/tests +plugins: timeout-2.4.0, xdist-3.8.0 +collected 9 items +unit/src/test_apigateway_local.py SAM Local API Gateway is running on port 3000 +API Gateway endpoint is responding correctly +API Gateway response: {'StatusCode': 200, 'Response': 'This is mock response', 'ResponseTime': 496ms} +.API Gateway response format validation passed - all headers and format requirements met +.Error handling test passed: Invalid endpoint returned status 403 +Error handling test passed: Wrong HTTP method returned status 403 +Error handling test passed: Unsupported HTTP method PUT returned status 403 +Error handling test passed: Unsupported HTTP method DELETE returned status 403 +API Gateway error handling test passed - all error scenarios handled appropriately +.Performance metrics: + Average: 490ms + Min: 484ms + Max: 496ms + Consistency: All responses within acceptable range +Performance test completed: avg=490ms, min=484ms, max=496ms +.Concurrent requests test passed +Results: Success_Rate=100.0%, Avg_Response_Time=1642ms, Successful=5/5 +.Input validation test 1 passed: Basic request +Input validation test 2 passed: Request with query parameters +Input validation test 3 passed: Request with custom headers +Input validation test 4 passed: Request with Accept header +Input validation test 5 passed: Combined request with params and headers +Input validation test passed - 5 scenarios handled correctly +.Server header present: Werkzeug/3.0.1 Python/3.11.3 +Response headers validation passed - all required headers present and properly formatted +.Timeout test passed: Normal timeout - 478ms +Timeout test passed: Standard timeout - 505ms +Timeout test passed: Short timeout - 484ms +Timeout handling test passed - all timeout scenarios handled correctly +.Connection resilience test passed +Results: Success_Rate=100.0%, Avg_Response_Time=499ms, Successful=10/10 +. + +========================================================== 9 passed in 18.17s ========================================================== +``` + +#### Clean up section + +> clean pyenv environment + +```sh +apigw-mock-local/tests$ +deactivate +rm -rf venv/ +``` + +> stopping SAM local process: + +```sh +apigw-mock-local$ +ps -axuf | grep '[s]am local start-api' | awk '{print $2}' | xargs -r kill +``` + +#### Debug - PyTest Debugging + +For more detailed debugging in pytest: + +```sh +# Run with verbose output +python -m pytest -s -v unit/src/test_apigateway_local.py + +# Run with debug logging +python -m pytest -s -v unit/src/test_apigateway_local.py --log-cli-level=DEBUG + +# List available individual tests +python3 -m pytest unit/src/test_apigateway_local.py --collect-only + +# Run a specific pytest test +python3 -m pytest -s unit/src/test_apigateway_local.py::test_api_basic_mock_response -v +``` + +--- + +### Run the Local API Testing + +> Start the API Gateway emulator: + +```shell +apigw-mock-local$ +sam local start-api --port 3000 --docker-network host & +``` + +Expected output: +```shell +apigw-mock-local$ +Mounting LambdaMockFunction at http://127.0.0.1:3000/MOCK [GET] + +You can now browse to the above endpoints to invoke your functions. You do not need to restart/reload SAM CLI while working on your functions, changes will be reflected instantly/automatically. You only need to restart SAM CLI if you update your AWS SAM template +2024-08-05 10:30:15 * Running on http://127.0.0.1:3000/ (Press CTRL+C to quit) +``` + +#### Test the Mock Endpoint + +```shell +apigw-mock-local$ +curl -X GET http://127.0.0.1:3000/MOCK +``` + +Expected response: +```json +"This is mock response" +``` + +#### Test with Verbose Output + +```shell +apigw-mock-local$ +curl -v -X GET http://127.0.0.1:3000/MOCK +``` + +Expected verbose output: +```shell +* Trying 127.0.0.1:3000... +* Connected to 127.0.0.1 (127.0.0.1) port 3000 (#0) +> GET /MOCK HTTP/1.1 +> Host: 127.0.0.1:3000 +> User-Agent: curl/8.0.1 +> Accept: */* +> +< HTTP/1.1 200 OK +< Content-Type: application/json +< Content-Length: 22 +< Server: Werkzeug/2.3.6 Python/3.9.18 +< Date: Mon, 05 Aug 2024 14:30:15 GMT +< +"This is mock response" +``` + +#### Test Invalid Endpoints + +```shell +# Test non-existent endpoint +apigw-mock-local$ +curl -X GET http://127.0.0.1:3000/INVALID + +# Test wrong HTTP method +curl -X POST http://127.0.0.1:3000/MOCK +``` + +#### Clean up section + +> Stop SAM local process: + +```sh +apigw-mock-local$ +ps -axuf | grep '[s]am local start-api' | awk '{print $2}' | xargs -r kill +``` + +--- + +### Fast local development for API Gateway + +#### Manual Lambda Function Testing + +You can test the Python Lambda function directly without API Gateway: + +#### Test Lambda function directly + +```sh +# Test Python Lambda function locally +apigw-mock-local$ +sam local invoke LambdaMockFunction --event events/test-event.json +``` + +#### Debug API Gateway Routes + +```sh +# List all available endpoints +apigw-mock-local$ +sam local start-api --port 3000 --docker-network host --debug & + +# Check template syntax +apigw-mock-local$ +sam validate --lint + +# Generate sample events for testing +apigw-mock-local$ +sam local generate-event apigateway aws-proxy > events/sample-event.json + +# Build the application (Python dependencies) +apigw-mock-local$ +sam build +``` + +--- + +## API Documentation + +### Endpoints + +| Endpoint | Method | Response Type | Status Code | Description | +|----------|---------|---------------|-------------|-------------| +| `/MOCK` | GET | JSON String | 200 | Returns a simple mock response message | + +### Response Examples + +**Successful Response (200):** +```json +"This is mock response" +``` + +**Invalid Endpoint (403):** +```json +{ + "message": "Missing Authentication Token" +} +``` + +### Request/Response Flow + +1. **Request**: `GET /MOCK` +2. **API Gateway Processing**: Routes request to Python Lambda function +3. **Lambda Execution**: Python function returns mock response with 200 status code +4. **Response**: JSON string containing mock message + +--- + +## Additional Resources + +- [AWS SAM CLI Installation Guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) +- [SAM Local API Testing Guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-using-start-api.html) +- [AWS API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/welcome.html) +- [AWS Lambda Python Developer Guide](https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html) +- [Python 3.10 Runtime Guide](https://docs.aws.amazon.com/lambda/latest/dg/python-runtime.html) +- [SAM Template Specification](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification.html) +- [Local Testing with SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-using-invoke.html) + +[Top](#contents) \ No newline at end of file diff --git a/python-test-samples/apigw-mock-local/events/test-event.json b/python-test-samples/apigw-mock-local/events/test-event.json new file mode 100644 index 00000000..b7a88f21 --- /dev/null +++ b/python-test-samples/apigw-mock-local/events/test-event.json @@ -0,0 +1,7 @@ +{ + "httpMethod": "GET", + "path": "/MOCK", + "headers": { + "Accept": "application/json" + } +} diff --git a/python-test-samples/apigw-mock-local/img/apigateway-mock.png b/python-test-samples/apigw-mock-local/img/apigateway-mock.png new file mode 100644 index 00000000..d88090d4 Binary files /dev/null and b/python-test-samples/apigw-mock-local/img/apigateway-mock.png differ diff --git a/python-test-samples/apigw-mock-local/lambda_mock_src/app.py b/python-test-samples/apigw-mock-local/lambda_mock_src/app.py new file mode 100644 index 00000000..3fc2df22 --- /dev/null +++ b/python-test-samples/apigw-mock-local/lambda_mock_src/app.py @@ -0,0 +1,8 @@ +import json + +def lambda_handler(event, context): + response = { + 'statusCode': 200, + 'body': json.dumps('This is mock response') + } + return response \ No newline at end of file diff --git a/python-test-samples/apigw-mock-local/template.yaml b/python-test-samples/apigw-mock-local/template.yaml new file mode 100755 index 00000000..724d4b77 --- /dev/null +++ b/python-test-samples/apigw-mock-local/template.yaml @@ -0,0 +1,25 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: AWS SAM Template for API Gateway with Mock and Lambda integration using Python + +Resources: + # API Gateway with MOCK integration + APIGatewayMock: + Type: AWS::Serverless::Api + Properties: + StageName: Prod + + # Lambda function to run the MOCK + LambdaMockFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + Runtime: python3.10 + CodeUri: lambda_mock_src/ + Events: + EventMock: + Type: Api + Properties: + Path: /MOCK + Method: get + RestApiId: !Ref APIGatewayMock \ No newline at end of file diff --git a/python-test-samples/apigw-mock-local/tests/requirements.txt b/python-test-samples/apigw-mock-local/tests/requirements.txt new file mode 100644 index 00000000..b3017b62 --- /dev/null +++ b/python-test-samples/apigw-mock-local/tests/requirements.txt @@ -0,0 +1,14 @@ +# Testing framework +pytest>=8.0.0 +pytest-xdist>=3.5.0 +pytest-timeout>=2.3.0 + +# HTTP client for API testing +requests>=2.31.0 + +# AWS SDK (for potential future extensions) +boto3>=1.34.0 +botocore>=1.34.0 + +# Additional testing utilities +urllib3>=2.0.0 \ No newline at end of file diff --git a/python-test-samples/apigw-mock-local/tests/unit/src/test_apigateway_local.py b/python-test-samples/apigw-mock-local/tests/unit/src/test_apigateway_local.py new file mode 100644 index 00000000..82c9d18d --- /dev/null +++ b/python-test-samples/apigw-mock-local/tests/unit/src/test_apigateway_local.py @@ -0,0 +1,539 @@ +import pytest +import requests +import json +import time +import socket +import threading +import queue +from datetime import datetime +from requests.exceptions import RequestException, ConnectionError + + +@pytest.fixture(scope="session") +def api_container(): + """ + Fixture to verify SAM Local API Gateway emulator is running. + This fixture assumes the emulator is already started externally. + """ + # Check if API Gateway emulator is running on port 3000 + def is_port_open(host, port): + try: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.settimeout(5) + result = s.connect_ex((host, port)) + return result == 0 + except: + return False + + if not is_port_open("127.0.0.1", 3000): + pytest.skip("SAM Local API Gateway emulator is not running on port 3000. Please start with 'sam local start-api --port 3000'") + + print("SAM Local API Gateway is running on port 3000") + yield "http://127.0.0.1:3000" + + +@pytest.fixture(scope="session") +def api_client(): + """ + Fixture to create a base URL for API testing. + """ + return "http://127.0.0.1:3000" + + +@pytest.fixture(scope="session") +def health_check(api_container, api_client): + """ + Fixture to perform initial health check of the API Gateway endpoint. + """ + try: + response = requests.get(f"{api_client}/MOCK", timeout=10) + + if response.status_code == 200: + print("API Gateway endpoint is responding correctly") + return True + else: + pytest.fail(f"API Gateway health check failed with status: {response.status_code}") + + except Exception as e: + pytest.fail(f"API Gateway health check failed: {str(e)}") + + +def test_api_basic_mock_response(api_client, health_check): + """ + Test the basic API Gateway mock endpoint. + Validates the default mock response functionality. + """ + # Make GET request to mock endpoint + start_time = time.time() + response = requests.get(f"{api_client}/MOCK", timeout=10) + end_time = time.time() + + response_time = int((end_time - start_time) * 1000) + + # Validate HTTP response + assert response.status_code == 200, f"API request failed with status: {response.status_code}" + + # Validate response headers + assert 'content-type' in response.headers, "Response should contain Content-Type header" + content_type = response.headers.get('content-type', '').lower() + assert 'json' in content_type, f"Expected JSON content type, got: {content_type}" + + # Validate response content + try: + response_data = response.json() + except ValueError: + pytest.fail(f"Response is not valid JSON: {response.text}") + + # Validate mock response content + expected_response = "This is mock response" + assert response_data == expected_response, f"Expected '{expected_response}', got '{response_data}'" + + # Validate response time is reasonable + assert response_time < 5000, f"Response time too slow: {response_time}ms" + + print(f"API Gateway response: {{'StatusCode': {response.status_code}, 'Response': '{response_data}', 'ResponseTime': {response_time}ms}}") + + +def test_api_response_format_validation(api_client, health_check): + """ + Test that the API Gateway response format is correct. + Validates headers, content type, and JSON structure. + """ + # Make request to mock endpoint + response = requests.get(f"{api_client}/MOCK", timeout=10) + + # Validate HTTP status code + assert response.status_code == 200, f"API request failed with status: {response.status_code}" + + # Validate response headers + required_headers = ['content-type', 'content-length'] + for header in required_headers: + assert header in response.headers, f"Response missing required header: {header}" + + # Validate content type + content_type = response.headers.get('content-type', '') + assert 'application/json' in content_type or 'json' in content_type, \ + f"Expected JSON content type, got: {content_type}" + + # Validate content length + content_length = int(response.headers.get('content-length', 0)) + assert content_length > 0, "Content-Length should be greater than 0" + + # Validate response body + response_text = response.text + assert len(response_text) > 0, "Response body should not be empty" + + # Validate JSON parsing + try: + response_data = response.json() + assert response_data is not None, "Parsed JSON should not be None" + except ValueError as e: + pytest.fail(f"Response body is not valid JSON: {e}") + + # Validate response encoding + assert response.encoding is not None, "Response should have encoding information" + + print("API Gateway response format validation passed - all headers and format requirements met") + + +def test_api_error_handling(api_client, health_check): + """ + Test API Gateway error handling with invalid requests. + Validates proper error responses for various edge cases. + """ + # Test scenarios with expected error responses + test_scenarios = [ + # Invalid endpoint + { + "url": f"{api_client}/INVALID", + "method": "GET", + "expected_status": [403, 404], + "description": "Invalid endpoint" + }, + # Wrong HTTP method + { + "url": f"{api_client}/MOCK", + "method": "POST", + "expected_status": [403, 405], + "description": "Wrong HTTP method" + }, + # Wrong HTTP method - PUT + { + "url": f"{api_client}/MOCK", + "method": "PUT", + "expected_status": [403, 405], + "description": "Unsupported HTTP method PUT" + }, + # Wrong HTTP method - DELETE + { + "url": f"{api_client}/MOCK", + "method": "DELETE", + "expected_status": [403, 405], + "description": "Unsupported HTTP method DELETE" + } + ] + + for scenario in test_scenarios: + try: + if scenario["method"] == "GET": + response = requests.get(scenario["url"], timeout=10) + elif scenario["method"] == "POST": + response = requests.post(scenario["url"], timeout=10) + elif scenario["method"] == "PUT": + response = requests.put(scenario["url"], timeout=10) + elif scenario["method"] == "DELETE": + response = requests.delete(scenario["url"], timeout=10) + + # Validate error status codes + assert response.status_code in scenario["expected_status"], \ + f"{scenario['description']}: Expected status {scenario['expected_status']}, got {response.status_code}" + + # Validate that error responses are still properly formatted + assert 'content-type' in response.headers, f"{scenario['description']}: Error response should have content-type" + + print(f"Error handling test passed: {scenario['description']} returned status {response.status_code}") + + except requests.RequestException as e: + # Network errors are acceptable for invalid requests + print(f"Network error for {scenario['description']}: {str(e)} (acceptable)") + + print("API Gateway error handling test passed - all error scenarios handled appropriately") + + +def test_api_performance_metrics(api_client, health_check): + """ + Test API Gateway performance and measure response metrics. + """ + # Perform multiple requests to measure performance consistency + response_times = [] + responses = [] + + for i in range(5): + start_time = time.time() + + response = requests.get(f"{api_client}/MOCK", timeout=10) + + end_time = time.time() + response_time = int((end_time - start_time) * 1000) # Convert to milliseconds + response_times.append(response_time) + + # Validate each response + assert response.status_code == 200, f"Request {i+1} failed with status: {response.status_code}" + + response_data = response.json() + responses.append(response_data) + + # Small delay between requests + if i < 4: + time.sleep(0.2) + + # Analyze performance metrics + avg_response_time = sum(response_times) / len(response_times) + min_response_time = min(response_times) + max_response_time = max(response_times) + + # Performance assertions (reasonable for API Gateway + Lambda) + assert avg_response_time < 10000, f"Average response time too slow: {avg_response_time}ms" + assert min_response_time < 5000, f"Minimum response time too slow: {min_response_time}ms" + + # Validate response consistency + expected_response = "This is mock response" + for i, response_data in enumerate(responses): + assert response_data == expected_response, f"Response {i+1} inconsistent: {response_data}" + + # Check for performance consistency (no response should be significantly slower) + max_acceptable_time = avg_response_time * 3 + for i, response_time in enumerate(response_times): + assert response_time < max_acceptable_time, \ + f"Request {i+1} response time ({response_time}ms) significantly slower than average ({avg_response_time}ms)" + + print(f"Performance metrics:") + print(f" Average: {int(avg_response_time)}ms") + print(f" Min: {min_response_time}ms") + print(f" Max: {max_response_time}ms") + print(f" Consistency: All responses within acceptable range") + + print(f"Performance test completed: avg={int(avg_response_time)}ms, min={min_response_time}ms, max={max_response_time}ms") + + +def test_api_concurrent_requests(api_client, health_check): + """ + Test concurrent API requests to validate thread safety and load handling. + """ + results = queue.Queue() + num_threads = 5 + + def make_api_request(thread_id): + """Helper function for concurrent API requests""" + try: + start_time = time.time() + + response = requests.get(f"{api_client}/MOCK", timeout=15) + + end_time = time.time() + response_time = int((end_time - start_time) * 1000) + + # Parse response + response_data = response.json() + + results.put({ + 'thread_id': thread_id, + 'success': response.status_code == 200, + 'response_time': response_time, + 'response_data': response_data, + 'status_code': response.status_code + }) + + except Exception as e: + results.put({ + 'thread_id': thread_id, + 'success': False, + 'error': str(e), + 'response_time': 0 + }) + + # Start concurrent threads + threads = [] + for i in range(num_threads): + thread = threading.Thread(target=make_api_request, args=(i,)) + threads.append(thread) + thread.start() + + # Wait for all threads to complete + for thread in threads: + thread.join(timeout=30) + + # Analyze results + successful_requests = 0 + total_response_time = 0 + expected_response = "This is mock response" + + while not results.empty(): + result = results.get() + if result['success']: + successful_requests += 1 + total_response_time += result['response_time'] + + # Validate response consistency + assert result['response_data'] == expected_response, \ + f"Thread {result['thread_id']} returned inconsistent response: {result['response_data']}" + else: + print(f"Thread {result['thread_id']} failed: {result.get('error', 'Unknown error')}") + + success_rate = successful_requests / num_threads * 100 + avg_response_time = total_response_time / successful_requests if successful_requests > 0 else 0 + + # Validate concurrent performance + assert success_rate >= 90, f"Concurrent request success rate too low: {success_rate}%" + assert successful_requests >= num_threads - 1, f"Too many failed concurrent requests" + assert avg_response_time < 15000, f"Average concurrent response time too slow: {avg_response_time}ms" + + print(f"Concurrent requests test passed") + print(f"Results: Success_Rate={success_rate}%, Avg_Response_Time={int(avg_response_time)}ms, Successful={successful_requests}/{num_threads}") + + +def test_api_input_validation(api_client, health_check): + """ + Test API Gateway with various input scenarios and query parameters. + """ + # Test scenarios with different request variations + test_scenarios = [ + # Basic request + { + "url": f"{api_client}/MOCK", + "params": None, + "headers": None, + "description": "Basic request" + }, + # Request with query parameters (should still work) + { + "url": f"{api_client}/MOCK", + "params": {"param1": "value1", "param2": "value2"}, + "headers": None, + "description": "Request with query parameters" + }, + # Request with custom headers + { + "url": f"{api_client}/MOCK", + "params": None, + "headers": {"User-Agent": "pytest-test", "X-Custom-Header": "test-value"}, + "description": "Request with custom headers" + }, + # Request with Accept header + { + "url": f"{api_client}/MOCK", + "params": None, + "headers": {"Accept": "application/json"}, + "description": "Request with Accept header" + }, + # Combined request + { + "url": f"{api_client}/MOCK", + "params": {"test": "combined"}, + "headers": {"Accept": "application/json", "X-Test": "true"}, + "description": "Combined request with params and headers" + } + ] + + expected_response = "This is mock response" + + for i, scenario in enumerate(test_scenarios): + try: + response = requests.get( + scenario["url"], + params=scenario["params"], + headers=scenario["headers"], + timeout=10 + ) + + # Validate basic response + assert response.status_code == 200, \ + f"{scenario['description']}: Expected status 200, got {response.status_code}" + + # Validate response content consistency + response_data = response.json() + assert response_data == expected_response, \ + f"{scenario['description']}: Response should be consistent regardless of input" + + # Validate response format + assert 'content-type' in response.headers, \ + f"{scenario['description']}: Response should have content-type header" + + print(f"Input validation test {i+1} passed: {scenario['description']}") + + except requests.RequestException as e: + pytest.fail(f"Input validation test failed for {scenario['description']}: {str(e)}") + + print(f"Input validation test passed - {len(test_scenarios)} scenarios handled correctly") + + +def test_api_response_headers_validation(api_client, health_check): + """ + Test that API Gateway returns appropriate response headers. + """ + response = requests.get(f"{api_client}/MOCK", timeout=10) + + # Validate basic response + assert response.status_code == 200, f"API request failed with status: {response.status_code}" + + # Check for essential headers + essential_headers = ['content-type', 'content-length'] + for header in essential_headers: + assert header in response.headers, f"Missing essential header: {header}" + + # Validate content-type header + content_type = response.headers.get('content-type', '').lower() + valid_content_types = ['application/json', 'text/json', 'json'] + assert any(ct in content_type for ct in valid_content_types), \ + f"Invalid content-type for JSON response: {content_type}" + + # Validate content-length header + content_length = response.headers.get('content-length') + if content_length: + assert int(content_length) > 0, "Content-Length should be greater than 0" + assert int(content_length) == len(response.content), \ + "Content-Length should match actual content length" + + # Check for server header (optional but informative) + server_header = response.headers.get('server', '') + if server_header: + print(f"Server header present: {server_header}") + + # Validate that headers are properly formatted + for header_name, header_value in response.headers.items(): + assert isinstance(header_name, str), f"Header name should be string: {header_name}" + assert isinstance(header_value, str), f"Header value should be string: {header_value}" + assert len(header_name) > 0, f"Header name should not be empty" + + print("Response headers validation passed - all required headers present and properly formatted") + + +def test_api_timeout_handling(api_client, health_check): + """ + Test API Gateway timeout handling and response time limits. + """ + # Test with various timeout scenarios + timeout_scenarios = [ + {"timeout": 30, "description": "Normal timeout"}, + {"timeout": 10, "description": "Standard timeout"}, + {"timeout": 5, "description": "Short timeout"} + ] + + for scenario in timeout_scenarios: + try: + start_time = time.time() + response = requests.get(f"{api_client}/MOCK", timeout=scenario["timeout"]) + end_time = time.time() + + response_time = int((end_time - start_time) * 1000) + + # Validate response + assert response.status_code == 200, \ + f"{scenario['description']}: Request failed with status {response.status_code}" + + # Validate response time is within timeout + timeout_ms = scenario["timeout"] * 1000 + assert response_time < timeout_ms, \ + f"{scenario['description']}: Response time ({response_time}ms) exceeded timeout ({timeout_ms}ms)" + + # Validate response content + response_data = response.json() + expected_response = "This is mock response" + assert response_data == expected_response, \ + f"{scenario['description']}: Response content should be consistent" + + print(f"Timeout test passed: {scenario['description']} - {response_time}ms") + + except requests.Timeout: + pytest.fail(f"Request timed out for {scenario['description']} - this shouldn't happen for a mock endpoint") + except requests.RequestException as e: + pytest.fail(f"Request failed for {scenario['description']}: {str(e)}") + + print("Timeout handling test passed - all timeout scenarios handled correctly") + + +def test_api_connection_resilience(api_client, health_check): + """ + Test API Gateway connection resilience with rapid sequential requests. + """ + # Make rapid sequential requests to test connection handling + num_requests = 10 + successful_requests = 0 + response_times = [] + + for i in range(num_requests): + try: + start_time = time.time() + response = requests.get(f"{api_client}/MOCK", timeout=10) + end_time = time.time() + + response_time = int((end_time - start_time) * 1000) + response_times.append(response_time) + + if response.status_code == 200: + successful_requests += 1 + + # Validate response content + response_data = response.json() + expected_response = "This is mock response" + assert response_data == expected_response, \ + f"Request {i+1}: Response content inconsistent" + + # Very small delay to make rapid requests + time.sleep(0.05) + + except requests.RequestException as e: + print(f"Request {i+1} failed: {str(e)}") + + success_rate = successful_requests / num_requests * 100 + avg_response_time = sum(response_times) / len(response_times) if response_times else 0 + + # Validate connection resilience + assert success_rate >= 90, f"Connection resilience test failed: success rate {success_rate}%" + assert successful_requests >= num_requests - 2, f"Too many failed requests: {successful_requests}/{num_requests}" + + if response_times: + assert avg_response_time < 8000, f"Average response time too slow under load: {avg_response_time}ms" + + print(f"Connection resilience test passed") + print(f"Results: Success_Rate={success_rate}%, Avg_Response_Time={int(avg_response_time)}ms, Successful={successful_requests}/{num_requests}") \ No newline at end of file