Spaces:
Running
Running
| #!/usr/bin/env python3 | |
| """ | |
| Quick test to verify LLM integration is working | |
| """ | |
| import os | |
| from dotenv import load_dotenv | |
| from agent.llm_client import LLMClient, LLMMessage | |
| from agent.consultation import NetworkConsultant | |
| # Load environment variables | |
| load_dotenv() | |
| print("=" * 60) | |
| print("Testing LLM Integration") | |
| print("=" * 60) | |
| # Test 1: Check API keys | |
| print("\n1. Checking API keys...") | |
| anthropic_key = os.getenv("ANTHROPIC_API_KEY") | |
| openai_key = os.getenv("OPENAI_API_KEY") | |
| if anthropic_key: | |
| print(f"β Anthropic API key found: {anthropic_key[:20]}...") | |
| else: | |
| print("β Anthropic API key not found") | |
| if openai_key: | |
| print(f"β OpenAI API key found: {openai_key[:20]}...") | |
| else: | |
| print("β οΈ OpenAI API key not found (optional)") | |
| # Test 2: Initialize LLM client | |
| print("\n2. Initializing LLM client...") | |
| llm = LLMClient() | |
| print(f"β LLM client initialized with provider: {llm.provider}") | |
| # Test 3: Simple chat test | |
| print("\n3. Testing basic chat completion...") | |
| try: | |
| messages = [ | |
| LLMMessage(role="user", content="Reply with just 'Hello from Overgrowth!' and nothing else.") | |
| ] | |
| response = llm.chat(messages, temperature=0.1) | |
| print(f"β Response received: {response[:100]}") | |
| except Exception as e: | |
| print(f"β Chat test failed: {e}") | |
| # Test 4: Test consultation | |
| print("\n4. Testing network consultation...") | |
| try: | |
| consultant = NetworkConsultant() | |
| test_input = """ | |
| We're a coffee shop chain with 3 locations. We need WiFi for customers, | |
| POS systems with payment processing, security cameras, and secure VPN to HQ | |
| for centralized management. Each location has ~50 customers at peak time. | |
| """ | |
| is_complete, output, intent = consultant.start_consultation(test_input) | |
| if is_complete: | |
| print("β Consultation completed immediately") | |
| print(f"\nIntent captured:\n{output}") | |
| else: | |
| print("β Consultation started - follow-up questions:") | |
| print(f"\n{output}") | |
| except Exception as e: | |
| print(f"β Consultation test failed: {e}") | |
| import traceback | |
| traceback.print_exc() | |
| print("\n" + "=" * 60) | |
| print("LLM Integration Test Complete!") | |
| print("=" * 60) | |