Spaces:
Running
Running
File size: 2,196 Bytes
d7dc26f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
#!/usr/bin/env python3
"""
Quick test to verify LLM integration is working
"""
import os
from dotenv import load_dotenv
from agent.llm_client import LLMClient, LLMMessage
from agent.consultation import NetworkConsultant
# Load environment variables
load_dotenv()
print("=" * 60)
print("Testing LLM Integration")
print("=" * 60)
# Test 1: Check API keys
print("\n1. Checking API keys...")
anthropic_key = os.getenv("ANTHROPIC_API_KEY")
openai_key = os.getenv("OPENAI_API_KEY")
if anthropic_key:
print(f"β
Anthropic API key found: {anthropic_key[:20]}...")
else:
print("β Anthropic API key not found")
if openai_key:
print(f"β
OpenAI API key found: {openai_key[:20]}...")
else:
print("β οΈ OpenAI API key not found (optional)")
# Test 2: Initialize LLM client
print("\n2. Initializing LLM client...")
llm = LLMClient()
print(f"β
LLM client initialized with provider: {llm.provider}")
# Test 3: Simple chat test
print("\n3. Testing basic chat completion...")
try:
messages = [
LLMMessage(role="user", content="Reply with just 'Hello from Overgrowth!' and nothing else.")
]
response = llm.chat(messages, temperature=0.1)
print(f"β
Response received: {response[:100]}")
except Exception as e:
print(f"β Chat test failed: {e}")
# Test 4: Test consultation
print("\n4. Testing network consultation...")
try:
consultant = NetworkConsultant()
test_input = """
We're a coffee shop chain with 3 locations. We need WiFi for customers,
POS systems with payment processing, security cameras, and secure VPN to HQ
for centralized management. Each location has ~50 customers at peak time.
"""
is_complete, output, intent = consultant.start_consultation(test_input)
if is_complete:
print("β
Consultation completed immediately")
print(f"\nIntent captured:\n{output}")
else:
print("β
Consultation started - follow-up questions:")
print(f"\n{output}")
except Exception as e:
print(f"β Consultation test failed: {e}")
import traceback
traceback.print_exc()
print("\n" + "=" * 60)
print("LLM Integration Test Complete!")
print("=" * 60)
|