#!/usr/bin/env python3 """ Test script to validate CHORUS integration with ResetData API This will test the reasoning endpoint to ensure resetdata is working correctly """ import requests import time import json import os import sys def wait_for_chorus_health(base_url="http://localhost:8081", timeout=120): """Wait for CHORUS to be healthy""" print(f"šŸ„ Waiting for CHORUS health endpoint at {base_url}/health...") start_time = time.time() while time.time() - start_time < timeout: try: response = requests.get(f"{base_url}/health", timeout=5) if response.status_code == 200: print("āœ… CHORUS is healthy!") return True except requests.exceptions.RequestException as e: pass print("ā³ Waiting for CHORUS to start...") time.sleep(5) print("āŒ Timeout waiting for CHORUS health endpoint") return False def test_reasoning_endpoint(base_url="http://localhost:8080"): """Test the reasoning endpoint to ensure ResetData integration works""" # Test prompt for ResetData test_prompt = "What is the capital of Australia? Please provide a brief answer." print(f"🧠 Testing reasoning endpoint with prompt: '{test_prompt}'") try: # Make request to CHORUS reasoning endpoint # Note: This endpoint may not exist yet, this is a placeholder for testing response = requests.post( f"{base_url}/api/reasoning/generate", json={ "prompt": test_prompt, "model": "meta/llama-3.1-8b-instruct" }, timeout=30 ) if response.status_code == 200: result = response.json() print("āœ… ResetData integration test successful!") print(f"šŸ“ Response: {result.get('response', 'No response field')}") return True else: print(f"āŒ API returned status {response.status_code}: {response.text}") return False except requests.exceptions.RequestException as e: print(f"āŒ Request failed: {e}") return False def test_direct_resetdata_api(): """Test ResetData API directly to ensure our API key works""" print("🌐 Testing ResetData API directly...") api_key = os.getenv('RESETDATA_API_KEY') if not api_key: print("āŒ RESETDATA_API_KEY not set") return False try: from openai import OpenAI client = OpenAI( base_url="https://models.au-syd.resetdata.ai/v1", api_key=api_key ) response = client.chat.completions.create( model="meta/llama-3.1-8b-instruct:ptu-9f3627a0-4909-4561-8996-272774e91fc8", messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "What is 2+2? Answer briefly."} ], temperature=0.2, top_p=0.7, max_tokens=50 ) if response.choices and response.choices[0].message.content: print(f"āœ… Direct ResetData API test successful!") print(f"šŸ“ Response: {response.choices[0].message.content}") return True else: print("āŒ No valid response from ResetData API") return False except Exception as e: print(f"āŒ Direct ResetData API test failed: {e}") return False def main(): print("šŸš€ CHORUS ResetData Integration Test") print("=" * 50) # First test ResetData API directly if not test_direct_resetdata_api(): print("āŒ Direct ResetData API test failed - check your API key") return False print("\n" + "=" * 50) # Wait for CHORUS to be healthy if not wait_for_chorus_health(): print("āŒ CHORUS health check failed") return False print("\n" + "=" * 50) # Test CHORUS reasoning endpoint (if it exists) # Note: This may not be implemented yet, so we'll make it optional print("🧠 Testing CHORUS reasoning integration...") print("ā„¹ļø Note: Reasoning endpoint may not be implemented yet") success = test_reasoning_endpoint() print("\n" + "=" * 50) print("šŸŽÆ Integration Test Summary:") print(f" Direct ResetData API: āœ…") print(f" CHORUS Health Check: āœ…") print(f" CHORUS Reasoning: {'āœ…' if success else 'ā“ (endpoint may not exist)'}") return True if __name__ == "__main__": try: success = main() sys.exit(0 if success else 1) except KeyboardInterrupt: print("\nāš ļø Test interrupted by user") sys.exit(1)