diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b866f808..f78783ca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -226,9 +226,86 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} files: junit.xml + e2e: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.13" + + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + enable-cache: true + cache-dependency-glob: pyproject.toml + + - name: Install dependencies + run: make install-dev + + - name: Start Kafka with devservices + run: | + .venv/bin/devservices up --mode default + + echo "Waiting for Kafka to be ready..." + KAFKA_READY=false + for i in {1..30}; do + KAFKA_CONTAINER=$(docker ps -qf "name=kafka") + if [ -z "$KAFKA_CONTAINER" ]; then + echo "Waiting for Kafka container to start... attempt $i/30" + sleep 2 + continue + fi + + HEALTH_STATUS=$(docker inspect --format='{{.State.Health.Status}}' $KAFKA_CONTAINER 2>/dev/null || echo "none") + if [ "$HEALTH_STATUS" = "healthy" ]; then + echo "Kafka is ready!" + KAFKA_READY=true + break + fi + echo "Waiting for Kafka health check (status: $HEALTH_STATUS)... attempt $i/30" + sleep 2 + done + + if [ "$KAFKA_READY" = "false" ]; then + echo "ERROR: Kafka failed to become healthy after 60 seconds" + echo "=== Docker containers ===" + docker ps -a + echo "=== Kafka logs ===" + docker logs $(docker ps -aqf "name=kafka") --tail 100 || echo "Could not get Kafka logs" + exit 1 + fi + + docker ps + + - name: Run E2E tests + run: | + # Start E2E stack (will connect to devservices Kafka) + docker compose -f docker-compose.e2e.yml up --build --abort-on-container-exit --exit-code-from e2e-tests + timeout-minutes: 15 + + - name: Show E2E logs on failure + if: failure() + run: | + echo "=== Launchpad logs ===" + docker compose -f docker-compose.e2e.yml logs launchpad + echo "=== Mock API logs ===" + docker compose -f docker-compose.e2e.yml logs mock-sentry-api + echo "=== E2E test logs ===" + docker compose -f docker-compose.e2e.yml logs e2e-tests + echo "=== Kafka logs ===" + docker logs $(docker ps -qf "name=kafka") --tail 100 || echo "Could not get Kafka logs" + + - name: Cleanup E2E environment + if: always() + run: docker compose -f docker-compose.e2e.yml down -v + build: runs-on: ubuntu-latest - needs: [check, test] + needs: [check, test, e2e] steps: - name: Checkout code diff --git a/Makefile b/Makefile index 9c620dbd..1d8307d9 100644 --- a/Makefile +++ b/Makefile @@ -29,6 +29,29 @@ test-unit: test-integration: $(PYTHON_VENV) -m pytest -n auto tests/integration/ -v +test-e2e: ## Run E2E tests with Docker Compose (requires devservices up) + @echo "Ensuring devservices Kafka is running..." + @if ! docker ps | grep -q kafka; then \ + echo "Starting devservices..."; \ + devservices up --mode default; \ + sleep 10; \ + else \ + echo "Kafka already running"; \ + fi + @echo "Starting E2E test environment..." + docker compose -f docker-compose.e2e.yml up --build --abort-on-container-exit --exit-code-from e2e-tests + @echo "Cleaning up E2E environment..." + docker compose -f docker-compose.e2e.yml down -v + +test-e2e-up: ## Start E2E environment (for debugging) + docker compose -f docker-compose.e2e.yml up --build + +test-e2e-down: ## Stop E2E environment + docker compose -f docker-compose.e2e.yml down -v + +test-e2e-logs: ## Show logs from E2E environment + docker compose -f docker-compose.e2e.yml logs -f + coverage: $(PYTHON_VENV) -m pytest tests/unit/ tests/integration/ -v --cov --cov-branch --cov-report=xml --junitxml=junit.xml diff --git a/docker-compose.e2e.yml b/docker-compose.e2e.yml new file mode 100644 index 00000000..b90dd41f --- /dev/null +++ b/docker-compose.e2e.yml @@ -0,0 +1,123 @@ +version: '3.8' + +# Note: This E2E setup leverages your existing devservices Kafka +# Run `devservices up` before starting these tests + +services: + # MinIO for ObjectStore (S3-compatible) + minio: + image: minio/minio:latest + ports: + - "9000:9000" + - "9001:9001" + environment: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + command: server /data --console-address ":9001" + healthcheck: + test: ["CMD", "mc", "ready", "local"] + interval: 10s + timeout: 5s + retries: 3 + volumes: + - minio-data:/data + networks: + - launchpad-e2e + + # Mock Sentry API server + mock-sentry-api: + build: + context: . + dockerfile: tests/e2e/mock-sentry-api/Dockerfile + ports: + - "8000:8000" + environment: + PYTHONUNBUFFERED: "1" + MINIO_ENDPOINT: "http://minio:9000" + MINIO_ACCESS_KEY: "minioadmin" + MINIO_SECRET_KEY: "minioadmin" + depends_on: + minio: + condition: service_healthy + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 10s + timeout: 5s + retries: 3 + volumes: + - mock-api-data:/app/data + networks: + - launchpad-e2e + - devservices + + # Launchpad service + launchpad: + build: + context: . + dockerfile: Dockerfile + args: + TEST_BUILD: "true" # Include test fixtures + ports: + - "2218:2218" + environment: + PYTHONUNBUFFERED: "1" + KAFKA_BOOTSTRAP_SERVERS: "kafka:9093" + KAFKA_GROUP_ID: "launchpad-e2e-test" + KAFKA_TOPICS: "preprod-artifact-events" + KAFKA_AUTO_OFFSET_RESET: "earliest" + LAUNCHPAD_HOST: "0.0.0.0" + LAUNCHPAD_PORT: "2218" + LAUNCHPAD_ENV: "e2e-test" + SENTRY_BASE_URL: "http://mock-sentry-api:8000" + OBJECTSTORE_URL: "http://minio:9000" + LAUNCHPAD_RPC_SHARED_SECRET: "test-secret-key-for-e2e" + SENTRY_DSN: "" # Disable Sentry SDK in tests + depends_on: + mock-sentry-api: + condition: service_healthy + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:2218/health"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + networks: + - launchpad-e2e + - devservices + + # Test orchestrator + e2e-tests: + build: + context: . + dockerfile: tests/e2e/Dockerfile.test-runner + environment: + KAFKA_BOOTSTRAP_SERVERS: "kafka:9093" + MOCK_API_URL: "http://mock-sentry-api:8000" + LAUNCHPAD_URL: "http://launchpad:2218" + MINIO_ENDPOINT: "http://minio:9000" + MINIO_ACCESS_KEY: "minioadmin" + MINIO_SECRET_KEY: "minioadmin" + LAUNCHPAD_RPC_SHARED_SECRET: "test-secret-key-for-e2e" + depends_on: + launchpad: + condition: service_healthy + mock-sentry-api: + condition: service_healthy + volumes: + - ./tests:/app/tests + - ./tests/e2e/results:/app/results + command: pytest tests/e2e/test_e2e_flow.py -v --tb=short + networks: + - launchpad-e2e + - devservices + +volumes: + minio-data: + mock-api-data: + +networks: + launchpad-e2e: + name: launchpad-e2e + devservices: + name: devservices + external: true diff --git a/tests/e2e/Dockerfile.test-runner b/tests/e2e/Dockerfile.test-runner new file mode 100644 index 00000000..96a38c3d --- /dev/null +++ b/tests/e2e/Dockerfile.test-runner @@ -0,0 +1,25 @@ +FROM python:3.13-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Install Python test dependencies +RUN pip install --no-cache-dir \ + pytest==8.3.3 \ + pytest-asyncio==0.24.0 \ + confluent-kafka==2.5.3 \ + requests==2.32.3 \ + boto3==1.35.0 + +# Copy test files +COPY tests/e2e /app/tests/e2e +COPY tests/_fixtures /app/tests/_fixtures + +# Create results directory +RUN mkdir -p /app/results + +WORKDIR /app diff --git a/tests/e2e/mock-sentry-api/Dockerfile b/tests/e2e/mock-sentry-api/Dockerfile new file mode 100644 index 00000000..643a9406 --- /dev/null +++ b/tests/e2e/mock-sentry-api/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.13-slim + +WORKDIR /app + +# Install dependencies +RUN pip install --no-cache-dir \ + fastapi==0.115.0 \ + uvicorn[standard]==0.32.0 \ + pydantic==2.9.2 \ + boto3==1.35.0 + +# Copy mock API server code +COPY tests/e2e/mock-sentry-api/server.py . + +# Create data directory for storing artifacts and results +RUN mkdir -p /app/data/artifacts /app/data/results /app/data/chunks + +EXPOSE 8000 + +CMD ["uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/tests/e2e/mock-sentry-api/server.py b/tests/e2e/mock-sentry-api/server.py new file mode 100644 index 00000000..3f94ec54 --- /dev/null +++ b/tests/e2e/mock-sentry-api/server.py @@ -0,0 +1,329 @@ +"""Mock Sentry API server for E2E testing. + +This server simulates the Sentry monolith API endpoints that Launchpad interacts with: +- Artifact download +- Artifact updates +- Size analysis uploads (chunked) +- Chunk assembly +""" + +import hashlib +import hmac +import json +import os + +from pathlib import Path +from typing import Any, Dict, List, Optional + +from fastapi import FastAPI, Header, HTTPException, Request, Response, UploadFile +from fastapi.responses import FileResponse, JSONResponse +from pydantic import BaseModel + +app = FastAPI(title="Mock Sentry API for Launchpad E2E Tests") + +# Storage paths +DATA_DIR = Path("/app/data") +ARTIFACTS_DIR = DATA_DIR / "artifacts" +RESULTS_DIR = DATA_DIR / "results" +CHUNKS_DIR = DATA_DIR / "chunks" + +# Create directories +for dir_path in [ARTIFACTS_DIR, RESULTS_DIR, CHUNKS_DIR]: + dir_path.mkdir(parents=True, exist_ok=True) + +# In-memory storage for test data +artifacts_db: Dict[str, Dict[str, Any]] = {} +size_analysis_db: Dict[str, Dict[str, Any]] = {} +assembled_files: Dict[str, bytes] = {} + +# Expected RPC secret (should match docker-compose env var) +RPC_SHARED_SECRET = os.getenv("LAUNCHPAD_RPC_SHARED_SECRET", "test-secret-key-for-e2e") + + +def verify_rpc_signature(authorization: str, body: bytes) -> bool: + """Verify RPC signature from Authorization header.""" + if not authorization or not authorization.startswith("rpcsignature rpc0:"): + return False + + signature = authorization.replace("rpcsignature rpc0:", "") + expected_signature = hmac.new(RPC_SHARED_SECRET.encode("utf-8"), body, hashlib.sha256).hexdigest() + + return signature == expected_signature + + +@app.get("/health") +async def health(): + """Health check endpoint.""" + return {"status": "ok", "service": "mock-sentry-api"} + + +@app.head("/api/0/internal/{org}/{project}/files/preprodartifacts/{artifact_id}/") +@app.get("/api/0/internal/{org}/{project}/files/preprodartifacts/{artifact_id}/") +async def download_artifact( + org: str, + project: str, + artifact_id: str, + request: Request, + authorization: str = Header(None), +): + """Download artifact file.""" + # Check if artifact exists + artifact_path = ARTIFACTS_DIR / f"{artifact_id}.zip" + + if not artifact_path.exists(): + raise HTTPException(status_code=404, detail="Artifact not found") + + # Handle HEAD request + if request.method == "HEAD": + file_size = artifact_path.stat().st_size + return Response(headers={"Content-Length": str(file_size)}, status_code=200) + + # Handle Range requests for resumable downloads + range_header = request.headers.get("range") + if range_header: + # Parse range header (simplified implementation) + range_start = int(range_header.replace("bytes=", "").split("-")[0]) + with open(artifact_path, "rb") as f: + f.seek(range_start) + content = f.read() + return Response( + content=content, + status_code=206, + headers={"Content-Range": f"bytes {range_start}-{len(content) - 1}/{artifact_path.stat().st_size}"}, + ) + + return FileResponse(artifact_path) + + +class UpdateRequest(BaseModel): + """Artifact update request model.""" + + pass # Accept any fields + + +@app.put("/api/0/internal/{org}/{project}/files/preprodartifacts/{artifact_id}/update/") +async def update_artifact( + org: str, + project: str, + artifact_id: str, + request: Request, + authorization: str = Header(None), +): + """Update artifact metadata.""" + body = await request.body() + + # Verify signature + if not verify_rpc_signature(authorization, body): + raise HTTPException(status_code=403, detail="Invalid signature") + + data = json.loads(body) + + # Store update in database + if artifact_id not in artifacts_db: + artifacts_db[artifact_id] = {} + + artifacts_db[artifact_id].update(data) + + # Track which fields were updated + updated_fields = list(data.keys()) + + return {"success": True, "artifactId": artifact_id, "updatedFields": updated_fields} + + +class ChunkOptionsResponse(BaseModel): + """Chunk upload options response.""" + + url: str + chunkSize: int + chunksPerRequest: int + maxFileSize: int + maxRequestSize: int + concurrency: int + hashAlgorithm: str + compression: List[str] + accept: List[str] + + +@app.get("/api/0/organizations/{org}/chunk-upload/") +async def get_chunk_options(org: str): + """Get chunk upload configuration.""" + return { + "url": f"/api/0/organizations/{org}/chunk-upload/", + "chunkSize": 8388608, # 8MB + "chunksPerRequest": 64, + "maxFileSize": 2147483648, # 2GB + "maxRequestSize": 33554432, # 32MB + "concurrency": 8, + "hashAlgorithm": "sha1", + "compression": ["gzip"], + "accept": ["*"], + } + + +@app.post("/api/0/organizations/{org}/chunk-upload/") +async def upload_chunk( + org: str, + file: UploadFile, + authorization: str = Header(None), +): + """Upload a file chunk.""" + # Read chunk data + chunk_data = await file.read() + + # Calculate checksum + checksum = hashlib.sha1(chunk_data).hexdigest() + + # Store chunk + chunk_path = CHUNKS_DIR / checksum + chunk_path.write_bytes(chunk_data) + + # Return 200 if successful, 409 if already exists + return JSONResponse({"checksum": checksum}, status_code=200) + + +class AssembleRequest(BaseModel): + """Assembly request model.""" + + checksum: str + chunks: List[str] + assemble_type: str + + +@app.post("/api/0/internal/{org}/{project}/files/preprodartifacts/{artifact_id}/assemble-generic/") +async def assemble_file( + org: str, + project: str, + artifact_id: str, + request: Request, + authorization: str = Header(None), +): + """Assemble uploaded chunks into complete file.""" + body = await request.body() + + # Verify signature + if not verify_rpc_signature(authorization, body): + raise HTTPException(status_code=403, detail="Invalid signature") + + data = json.loads(body) + checksum = data["checksum"] + chunks = data["chunks"] + assemble_type = data["assemble_type"] + + # Check which chunks are missing + missing_chunks = [] + for chunk_checksum in chunks: + chunk_path = CHUNKS_DIR / chunk_checksum + if not chunk_path.exists(): + missing_chunks.append(chunk_checksum) + + if missing_chunks: + return {"state": "not_found", "missingChunks": missing_chunks} + + # Assemble the file + file_data = b"" + for chunk_checksum in chunks: + chunk_path = CHUNKS_DIR / chunk_checksum + file_data += chunk_path.read_bytes() + + # Verify checksum + actual_checksum = hashlib.sha1(file_data).hexdigest() + if actual_checksum != checksum: + return { + "state": "error", + "missingChunks": [], + "detail": f"Checksum mismatch: expected {checksum}, got {actual_checksum}", + } + + # Store assembled file + if assemble_type == "size_analysis": + result_path = RESULTS_DIR / f"{artifact_id}_size_analysis.json" + result_path.write_bytes(file_data) + + # Parse and store in database + try: + size_analysis_db[artifact_id] = json.loads(file_data.decode("utf-8")) + except Exception as e: + print(f"Error parsing size analysis: {e}") + + elif assemble_type == "installable_app": + app_path = RESULTS_DIR / f"{artifact_id}_app" + app_path.write_bytes(file_data) + + return {"state": "ok", "missingChunks": []} + + +class PutSizeRequest(BaseModel): + """Size analysis update request.""" + + pass # Accept any fields + + +@app.put("/api/0/internal/{org}/{project}/files/preprodartifacts/{artifact_id}/size/") +@app.put("/api/0/internal/{org}/{project}/files/preprodartifacts/{artifact_id}/size/{identifier}/") +async def update_size_analysis( + org: str, + project: str, + artifact_id: str, + request: Request, + identifier: Optional[str] = None, + authorization: str = Header(None), +): + """Update size analysis metadata.""" + body = await request.body() + + # Verify signature + if not verify_rpc_signature(authorization, body): + raise HTTPException(status_code=403, detail="Invalid signature") + + data = json.loads(body) + + # Store in database + key = f"{artifact_id}:{identifier}" if identifier else artifact_id + if key not in size_analysis_db: + size_analysis_db[key] = {} + size_analysis_db[key].update(data) + + return {"artifactId": artifact_id} + + +# Test helper endpoints (not part of real Sentry API) + + +@app.post("/test/upload-artifact/{artifact_id}") +async def test_upload_artifact(artifact_id: str, file: UploadFile): + """Test helper: Upload an artifact file for testing.""" + artifact_path = ARTIFACTS_DIR / f"{artifact_id}.zip" + + with open(artifact_path, "wb") as f: + content = await file.read() + f.write(content) + + return {"artifact_id": artifact_id, "size": len(content)} + + +@app.get("/test/results/{artifact_id}") +async def test_get_results(artifact_id: str): + """Test helper: Get analysis results for an artifact.""" + return { + "artifact_metadata": artifacts_db.get(artifact_id, {}), + "size_analysis": size_analysis_db.get(artifact_id, {}), + "has_size_analysis_file": (RESULTS_DIR / f"{artifact_id}_size_analysis.json").exists(), + "has_installable_app": (RESULTS_DIR / f"{artifact_id}_app").exists(), + } + + +@app.get("/test/results/{artifact_id}/size-analysis-raw") +async def test_get_size_analysis_raw(artifact_id: str): + """Test helper: Get raw size analysis JSON.""" + result_path = RESULTS_DIR / f"{artifact_id}_size_analysis.json" + + if not result_path.exists(): + raise HTTPException(status_code=404, detail="Size analysis not found") + + return JSONResponse(json.loads(result_path.read_text())) + + +if __name__ == "__main__": + import uvicorn + + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/tests/e2e/test_e2e_flow.py b/tests/e2e/test_e2e_flow.py new file mode 100644 index 00000000..8422b053 --- /dev/null +++ b/tests/e2e/test_e2e_flow.py @@ -0,0 +1,269 @@ +"""End-to-end tests for Launchpad service. + +Tests the full flow: +1. Upload test artifact to mock API +2. Send Kafka message to trigger processing +3. Wait for Launchpad to process +4. Verify results via mock API +""" + +import json +import os +import time + +from pathlib import Path +from typing import Any, Dict + +import pytest +import requests + +from confluent_kafka import Producer + +# Configuration from environment +KAFKA_BOOTSTRAP_SERVERS = os.getenv("KAFKA_BOOTSTRAP_SERVERS", "kafka:9093") +MOCK_API_URL = os.getenv("MOCK_API_URL", "http://mock-sentry-api:8000") +LAUNCHPAD_URL = os.getenv("LAUNCHPAD_URL", "http://launchpad:2218") +KAFKA_TOPIC = "preprod-artifact-events" + +# Test fixtures +FIXTURES_DIR = Path(__file__).parent.parent / "_fixtures" +IOS_FIXTURE = FIXTURES_DIR / "ios" / "HackerNews.xcarchive.zip" +ANDROID_APK_FIXTURE = FIXTURES_DIR / "android" / "hn.apk" +ANDROID_AAB_FIXTURE = FIXTURES_DIR / "android" / "hn.aab" + + +def wait_for_service(url: str, timeout: int = 60, service_name: str = "service") -> None: + """Wait for a service to be healthy.""" + start_time = time.time() + while time.time() - start_time < timeout: + try: + response = requests.get(f"{url}/health", timeout=5) + if response.status_code == 200: + print(f"✓ {service_name} is healthy") + return + except requests.exceptions.RequestException: + pass + time.sleep(2) + raise TimeoutError(f"{service_name} did not become healthy within {timeout}s") + + +def upload_artifact_to_mock_api(artifact_id: str, file_path: Path) -> None: + """Upload an artifact file to the mock API.""" + with open(file_path, "rb") as f: + files = {"file": (file_path.name, f, "application/zip")} + response = requests.post(f"{MOCK_API_URL}/test/upload-artifact/{artifact_id}", files=files, timeout=30) + response.raise_for_status() + print(f"✓ Uploaded artifact {artifact_id} ({file_path.name})") + + +def send_kafka_message(artifact_id: str, org: str, project: str, features: list[str]) -> None: + """Send a Kafka message to trigger artifact processing.""" + producer = Producer({"bootstrap.servers": KAFKA_BOOTSTRAP_SERVERS, "client.id": "e2e-test-producer"}) + + message = { + "artifact_id": artifact_id, + "organization_id": org, + "project_id": project, + "requested_features": features, + } + + producer.produce(KAFKA_TOPIC, key=artifact_id.encode("utf-8"), value=json.dumps(message).encode("utf-8")) + producer.flush(timeout=10) + print(f"✓ Sent Kafka message for artifact {artifact_id}") + + +def wait_for_processing(artifact_id: str, timeout: int = 120, check_interval: int = 3) -> Dict[str, Any]: + """Wait for artifact processing to complete and return results.""" + start_time = time.time() + last_status = None + + while time.time() - start_time < timeout: + try: + response = requests.get(f"{MOCK_API_URL}/test/results/{artifact_id}", timeout=10) + response.raise_for_status() + results = response.json() + + # Check if processing is complete + # We consider it complete when artifact metadata has been updated + if results.get("artifact_metadata"): + print(f"✓ Processing completed for {artifact_id}") + return results + + # Show progress + current_status = json.dumps(results, sort_keys=True) + if current_status != last_status: + print(f" Waiting for processing... (results so far: {results})") + last_status = current_status + + except requests.exceptions.RequestException as e: + print(f" Error checking results: {e}") + + time.sleep(check_interval) + + raise TimeoutError(f"Artifact {artifact_id} was not processed within {timeout}s") + + +def get_size_analysis_raw(artifact_id: str) -> Dict[str, Any]: + """Get the raw size analysis JSON for an artifact.""" + response = requests.get(f"{MOCK_API_URL}/test/results/{artifact_id}/size-analysis-raw", timeout=10) + response.raise_for_status() + return response.json() + + +class TestE2EFlow: + """End-to-end tests for full Launchpad service flow.""" + + @classmethod + def setup_class(cls): + """Wait for all services to be ready before running tests.""" + print("\n=== Waiting for services to be ready ===") + wait_for_service(MOCK_API_URL, service_name="Mock Sentry API") + wait_for_service(LAUNCHPAD_URL, service_name="Launchpad") + print("=== All services ready ===\n") + + def test_ios_xcarchive_full_flow(self): + """Test full flow with iOS .xcarchive.zip file.""" + if not IOS_FIXTURE.exists(): + pytest.skip(f"iOS fixture not found: {IOS_FIXTURE}") + + artifact_id = "test-ios-001" + org = "test-org" + project = "test-ios-project" + + print("\n=== Testing iOS .xcarchive.zip E2E flow ===") + + # Step 1: Upload artifact to mock API + upload_artifact_to_mock_api(artifact_id, IOS_FIXTURE) + + # Step 2: Send Kafka message + send_kafka_message(artifact_id, org, project, ["size_analysis"]) + + # Step 3: Wait for processing + results = wait_for_processing(artifact_id, timeout=180) + + # Step 4: Verify results + print("\n=== Verifying results ===") + + # Check artifact metadata was updated + assert results["artifact_metadata"], "Artifact metadata should be updated" + metadata = results["artifact_metadata"] + + # Verify basic metadata + assert "app_name" in metadata or "appName" in metadata, "App name should be present" + assert "app_id" in metadata or "appId" in metadata, "App ID should be present" + assert "build_version" in metadata or "buildVersion" in metadata, "Build version should be present" + + # Check size analysis was uploaded + assert results["has_size_analysis_file"], "Size analysis file should be uploaded" + + # Verify size analysis contents + size_analysis = get_size_analysis_raw(artifact_id) + assert "total_size" in size_analysis, "Size analysis should contain total_size" + assert "insights" in size_analysis, "Size analysis should contain insights" + assert "treemap" in size_analysis, "Size analysis should contain treemap" + + # Verify insights were generated + insights = size_analysis["insights"] + assert len(insights) > 0, "Should generate at least one insight" + + print("✓ iOS E2E test passed!") + print(f" - Total size: {size_analysis.get('total_size', 'N/A')} bytes") + print(f" - Insights generated: {len(insights)}") + print(f" - App name: {metadata.get('app_name') or metadata.get('appName')}") + + def test_android_apk_full_flow(self): + """Test full flow with Android .apk file.""" + if not ANDROID_APK_FIXTURE.exists(): + pytest.skip(f"Android APK fixture not found: {ANDROID_APK_FIXTURE}") + + artifact_id = "test-android-apk-001" + org = "test-org" + project = "test-android-project" + + print("\n=== Testing Android .apk E2E flow ===") + + # Step 1: Upload artifact to mock API + upload_artifact_to_mock_api(artifact_id, ANDROID_APK_FIXTURE) + + # Step 2: Send Kafka message + send_kafka_message(artifact_id, org, project, ["size_analysis"]) + + # Step 3: Wait for processing + results = wait_for_processing(artifact_id, timeout=180) + + # Step 4: Verify results + print("\n=== Verifying results ===") + + # Check artifact metadata was updated + assert results["artifact_metadata"], "Artifact metadata should be updated" + metadata = results["artifact_metadata"] + + # Verify basic metadata + assert "app_name" in metadata or "appName" in metadata, "App name should be present" + assert "app_id" in metadata or "appId" in metadata, "App ID should be present" + + # Check size analysis was uploaded + assert results["has_size_analysis_file"], "Size analysis file should be uploaded" + + # Verify size analysis contents + size_analysis = get_size_analysis_raw(artifact_id) + assert "total_size" in size_analysis, "Size analysis should contain total_size" + assert "insights" in size_analysis, "Size analysis should contain insights" + + print("✓ Android APK E2E test passed!") + print(f" - Total size: {size_analysis.get('total_size', 'N/A')} bytes") + print(f" - Insights generated: {len(size_analysis['insights'])}") + print(f" - App name: {metadata.get('app_name') or metadata.get('appName')}") + + def test_android_aab_full_flow(self): + """Test full flow with Android .aab file.""" + if not ANDROID_AAB_FIXTURE.exists(): + pytest.skip(f"Android AAB fixture not found: {ANDROID_AAB_FIXTURE}") + + artifact_id = "test-android-aab-001" + org = "test-org" + project = "test-android-project" + + print("\n=== Testing Android .aab E2E flow ===") + + # Step 1: Upload artifact to mock API + upload_artifact_to_mock_api(artifact_id, ANDROID_AAB_FIXTURE) + + # Step 2: Send Kafka message + send_kafka_message(artifact_id, org, project, ["size_analysis"]) + + # Step 3: Wait for processing + results = wait_for_processing(artifact_id, timeout=180) + + # Step 4: Verify results + print("\n=== Verifying results ===") + + # Check artifact metadata was updated + assert results["artifact_metadata"], "Artifact metadata should be updated" + metadata = results["artifact_metadata"] + + # Verify basic metadata + assert "app_name" in metadata or "appName" in metadata, "App name should be present" + assert "app_id" in metadata or "appId" in metadata, "App ID should be present" + + # Check size analysis was uploaded + assert results["has_size_analysis_file"], "Size analysis file should be uploaded" + + # Verify size analysis contents + size_analysis = get_size_analysis_raw(artifact_id) + assert "total_size" in size_analysis, "Size analysis should contain total_size" + assert "insights" in size_analysis, "Size analysis should contain insights" + + print("✓ Android AAB E2E test passed!") + print(f" - Total size: {size_analysis.get('total_size', 'N/A')} bytes") + print(f" - Insights generated: {len(size_analysis['insights'])}") + print(f" - App name: {metadata.get('app_name') or metadata.get('appName')}") + + def test_launchpad_health_check(self): + """Verify Launchpad service is healthy.""" + response = requests.get(f"{LAUNCHPAD_URL}/health", timeout=10) + assert response.status_code == 200 + data = response.json() + assert data["service"] == "launchpad" + assert data["status"] == "ok" + print("✓ Launchpad health check passed")