#!/usr/bin/env python3 """ Test the full pipeline with LLM integration """ import os from dotenv import load_dotenv from agent.pipeline_engine import OvergrowthPipeline load_dotenv() print("=" * 70) print("OVERGROWTH PIPELINE - FULL TEST") print("=" * 70) # Test input test_input = """ We're a coffee shop chain with 3 locations. We need WiFi for customers, POS systems with payment processing, security cameras, and secure VPN to HQ for centralized management. Each location has ~50 customers at peak time. """ pipeline = OvergrowthPipeline() print("\nšŸ¤ Stage 1: Consultation") print("-" * 70) results = pipeline.run_full_pipeline(test_input) print("\nšŸ“‹ Source of Truth Generated:") print("-" * 70) print(results.get('model', {})[:500] if isinstance(results.get('model'), str) else str(results.get('model'))[:500]) print("\nšŸ›’ Bill of Materials:") print("-" * 70) print(results.get('shopping_list', '')[:500]) print("\nšŸ“Š Diagrams:") print("-" * 70) diagrams = results.get('diagrams', {}) if diagrams.get('summary'): print(diagrams['summary'][:300]) print("\n" + "=" * 70) print("PIPELINE TEST COMPLETE") print("=" * 70) print(f"\nFiles created:") print(f" - infra/network_model.yaml") print(f" - infra/bill_of_materials.json") print(f" - infra/setup_guide.md")