Phase 2 build initial
This commit is contained in:
667
hcfs-python/hcfs/sdk/async_client.py
Normal file
667
hcfs-python/hcfs/sdk/async_client.py
Normal file
@@ -0,0 +1,667 @@
|
||||
"""
|
||||
HCFS Asynchronous Client
|
||||
|
||||
High-level asynchronous client for HCFS API operations with WebSocket support.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
from typing import List, Optional, Dict, Any, AsyncIterator, Callable
|
||||
from datetime import datetime
|
||||
|
||||
import httpx
|
||||
import websockets
|
||||
from websockets.exceptions import ConnectionClosed, WebSocketException
|
||||
|
||||
from .models import (
|
||||
Context, SearchResult, ContextFilter, PaginationOptions,
|
||||
SearchOptions, ClientConfig, AnalyticsData, BatchResult, StreamEvent
|
||||
)
|
||||
from .exceptions import (
|
||||
HCFSError, HCFSConnectionError, HCFSAuthenticationError,
|
||||
HCFSNotFoundError, HCFSValidationError, HCFSStreamError, handle_api_error
|
||||
)
|
||||
from .utils import MemoryCache, validate_path, normalize_path
|
||||
from .decorators import cached_context, retry_on_failure, rate_limited
|
||||
|
||||
|
||||
class HCFSAsyncClient:
|
||||
"""
|
||||
Asynchronous HCFS API client with WebSocket streaming capabilities.
|
||||
|
||||
This client provides async/await support for all operations and includes
|
||||
real-time streaming capabilities through WebSocket connections.
|
||||
|
||||
Example:
|
||||
>>> import asyncio
|
||||
>>> from hcfs.sdk import HCFSAsyncClient, Context
|
||||
>>>
|
||||
>>> async def main():
|
||||
... async with HCFSAsyncClient(
|
||||
... base_url="https://api.hcfs.example.com",
|
||||
... api_key="your-api-key"
|
||||
... ) as client:
|
||||
... # Create a context
|
||||
... context = Context(
|
||||
... path="/docs/async_readme",
|
||||
... content="Async README content",
|
||||
... summary="Async documentation"
|
||||
... )
|
||||
... created = await client.create_context(context)
|
||||
...
|
||||
... # Search with async
|
||||
... results = await client.search_contexts("async README")
|
||||
... async for result in results:
|
||||
... print(f"Found: {result.context.path}")
|
||||
>>>
|
||||
>>> asyncio.run(main())
|
||||
"""
|
||||
|
||||
def __init__(self, config: Optional[ClientConfig] = None, **kwargs):
|
||||
"""
|
||||
Initialize async HCFS client.
|
||||
|
||||
Args:
|
||||
config: Client configuration object
|
||||
**kwargs: Configuration overrides
|
||||
"""
|
||||
# Merge configuration
|
||||
if config:
|
||||
self.config = config
|
||||
else:
|
||||
self.config = ClientConfig(**kwargs)
|
||||
|
||||
# HTTP client will be initialized in __aenter__
|
||||
self.http_client: Optional[httpx.AsyncClient] = None
|
||||
self.websocket: Optional[websockets.WebSocketServerProtocol] = None
|
||||
self._websocket_listeners: List[Callable[[StreamEvent], None]] = []
|
||||
self._websocket_task: Optional[asyncio.Task] = None
|
||||
|
||||
# Initialize cache
|
||||
self._cache = MemoryCache(
|
||||
max_size=self.config.cache.max_size,
|
||||
strategy=self.config.cache.strategy,
|
||||
ttl_seconds=self.config.cache.ttl_seconds
|
||||
) if self.config.cache.enabled else None
|
||||
|
||||
# Analytics
|
||||
self.analytics = AnalyticsData()
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context manager entry."""
|
||||
await self._initialize_http_client()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Async context manager exit."""
|
||||
await self.close()
|
||||
|
||||
async def _initialize_http_client(self):
|
||||
"""Initialize the HTTP client with proper configuration."""
|
||||
headers = {
|
||||
"User-Agent": self.config.user_agent,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
if self.config.api_key:
|
||||
headers["X-API-Key"] = self.config.api_key
|
||||
elif self.config.jwt_token:
|
||||
headers["Authorization"] = f"Bearer {self.config.jwt_token}"
|
||||
|
||||
# Configure timeouts
|
||||
timeout = httpx.Timeout(
|
||||
connect=self.config.timeout,
|
||||
read=self.config.timeout,
|
||||
write=self.config.timeout,
|
||||
pool=self.config.timeout * 2
|
||||
)
|
||||
|
||||
# Configure connection limits
|
||||
limits = httpx.Limits(
|
||||
max_connections=self.config.max_connections,
|
||||
max_keepalive_connections=self.config.max_keepalive_connections
|
||||
)
|
||||
|
||||
self.http_client = httpx.AsyncClient(
|
||||
base_url=self.config.base_url,
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
limits=limits,
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
async def health_check(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Check API health status asynchronously.
|
||||
|
||||
Returns:
|
||||
Health status information
|
||||
|
||||
Raises:
|
||||
HCFSConnectionError: If health check fails
|
||||
"""
|
||||
try:
|
||||
response = await self.http_client.get("/health")
|
||||
|
||||
if response.status_code == 200:
|
||||
self._update_analytics("health_check", success=True)
|
||||
return response.json()
|
||||
else:
|
||||
self._update_analytics("health_check", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("health_check", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Health check failed: {str(e)}")
|
||||
|
||||
@cached_context()
|
||||
@retry_on_failure()
|
||||
async def create_context(self, context: Context) -> Context:
|
||||
"""
|
||||
Create a new context asynchronously.
|
||||
|
||||
Args:
|
||||
context: Context object to create
|
||||
|
||||
Returns:
|
||||
Created context with assigned ID
|
||||
|
||||
Raises:
|
||||
HCFSValidationError: If context data is invalid
|
||||
HCFSError: If creation fails
|
||||
"""
|
||||
if not validate_path(context.path):
|
||||
raise HCFSValidationError(f"Invalid context path: {context.path}")
|
||||
|
||||
context.path = normalize_path(context.path)
|
||||
|
||||
try:
|
||||
response = await self.http_client.post(
|
||||
"/api/v1/contexts",
|
||||
json=context.to_create_dict()
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()["data"]
|
||||
created_context = Context(**data)
|
||||
self._update_analytics("create_context", success=True)
|
||||
return created_context
|
||||
else:
|
||||
self._update_analytics("create_context", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("create_context", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Failed to create context: {str(e)}")
|
||||
|
||||
@cached_context()
|
||||
async def get_context(self, context_id: int) -> Context:
|
||||
"""
|
||||
Retrieve a context by ID asynchronously.
|
||||
|
||||
Args:
|
||||
context_id: Context identifier
|
||||
|
||||
Returns:
|
||||
Context object
|
||||
|
||||
Raises:
|
||||
HCFSNotFoundError: If context doesn't exist
|
||||
"""
|
||||
try:
|
||||
response = await self.http_client.get(f"/api/v1/contexts/{context_id}")
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()["data"]
|
||||
context = Context(**data)
|
||||
self._update_analytics("get_context", success=True)
|
||||
return context
|
||||
else:
|
||||
self._update_analytics("get_context", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("get_context", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Failed to get context: {str(e)}")
|
||||
|
||||
async def list_contexts(self,
|
||||
filter_opts: Optional[ContextFilter] = None,
|
||||
pagination: Optional[PaginationOptions] = None) -> List[Context]:
|
||||
"""
|
||||
List contexts with filtering and pagination asynchronously.
|
||||
|
||||
Args:
|
||||
filter_opts: Context filtering options
|
||||
pagination: Pagination configuration
|
||||
|
||||
Returns:
|
||||
List of contexts
|
||||
"""
|
||||
params = {}
|
||||
|
||||
if filter_opts:
|
||||
params.update(filter_opts.to_query_params())
|
||||
|
||||
if pagination:
|
||||
params.update(pagination.to_query_params())
|
||||
|
||||
try:
|
||||
response = await self.http_client.get("/api/v1/contexts", params=params)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()["data"]
|
||||
contexts = [Context(**ctx_data) for ctx_data in data]
|
||||
self._update_analytics("list_contexts", success=True)
|
||||
return contexts
|
||||
else:
|
||||
self._update_analytics("list_contexts", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("list_contexts", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Failed to list contexts: {str(e)}")
|
||||
|
||||
async def update_context(self, context_id: int, updates: Dict[str, Any]) -> Context:
|
||||
"""
|
||||
Update an existing context asynchronously.
|
||||
|
||||
Args:
|
||||
context_id: Context identifier
|
||||
updates: Fields to update
|
||||
|
||||
Returns:
|
||||
Updated context
|
||||
|
||||
Raises:
|
||||
HCFSNotFoundError: If context doesn't exist
|
||||
HCFSValidationError: If update data is invalid
|
||||
"""
|
||||
try:
|
||||
response = await self.http_client.put(
|
||||
f"/api/v1/contexts/{context_id}",
|
||||
json=updates
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()["data"]
|
||||
updated_context = Context(**data)
|
||||
self._update_analytics("update_context", success=True)
|
||||
|
||||
# Invalidate cache
|
||||
if self._cache:
|
||||
cache_key = f"get_context:{context_id}"
|
||||
self._cache.remove(cache_key)
|
||||
|
||||
return updated_context
|
||||
else:
|
||||
self._update_analytics("update_context", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("update_context", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Failed to update context: {str(e)}")
|
||||
|
||||
async def delete_context(self, context_id: int) -> bool:
|
||||
"""
|
||||
Delete a context asynchronously.
|
||||
|
||||
Args:
|
||||
context_id: Context identifier
|
||||
|
||||
Returns:
|
||||
True if deletion was successful
|
||||
|
||||
Raises:
|
||||
HCFSNotFoundError: If context doesn't exist
|
||||
"""
|
||||
try:
|
||||
response = await self.http_client.delete(f"/api/v1/contexts/{context_id}")
|
||||
|
||||
if response.status_code == 200:
|
||||
self._update_analytics("delete_context", success=True)
|
||||
|
||||
# Invalidate cache
|
||||
if self._cache:
|
||||
cache_key = f"get_context:{context_id}"
|
||||
self._cache.remove(cache_key)
|
||||
|
||||
return True
|
||||
else:
|
||||
self._update_analytics("delete_context", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("delete_context", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Failed to delete context: {str(e)}")
|
||||
|
||||
@rate_limited(requests_per_second=10.0)
|
||||
async def search_contexts(self,
|
||||
query: str,
|
||||
options: Optional[SearchOptions] = None) -> List[SearchResult]:
|
||||
"""
|
||||
Search contexts asynchronously using various search methods.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
options: Search configuration options
|
||||
|
||||
Returns:
|
||||
List of search results ordered by relevance
|
||||
"""
|
||||
search_opts = options or SearchOptions()
|
||||
|
||||
request_data = {
|
||||
"query": query,
|
||||
**search_opts.to_request_dict()
|
||||
}
|
||||
|
||||
try:
|
||||
response = await self.http_client.post(
|
||||
"/api/v1/search",
|
||||
json=request_data
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()["data"]
|
||||
results = []
|
||||
|
||||
for result_data in data:
|
||||
context = Context(**result_data["context"])
|
||||
search_result = SearchResult(
|
||||
context=context,
|
||||
score=result_data["score"],
|
||||
explanation=result_data.get("explanation"),
|
||||
highlights=result_data.get("highlights", [])
|
||||
)
|
||||
results.append(search_result)
|
||||
|
||||
self._update_analytics("search_contexts", success=True)
|
||||
return sorted(results, key=lambda x: x.score, reverse=True)
|
||||
else:
|
||||
self._update_analytics("search_contexts", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("search_contexts", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Search failed: {str(e)}")
|
||||
|
||||
async def batch_create_contexts(self, contexts: List[Context]) -> BatchResult:
|
||||
"""
|
||||
Create multiple contexts in a single batch operation asynchronously.
|
||||
|
||||
Args:
|
||||
contexts: List of contexts to create
|
||||
|
||||
Returns:
|
||||
Batch operation results
|
||||
"""
|
||||
request_data = {
|
||||
"contexts": [ctx.to_create_dict() for ctx in contexts]
|
||||
}
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
response = await self.http_client.post(
|
||||
"/api/v1/contexts/batch",
|
||||
json=request_data,
|
||||
timeout=self.config.timeout * 3 # Extended timeout for batch ops
|
||||
)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()["data"]
|
||||
|
||||
result = BatchResult(
|
||||
success_count=data["success_count"],
|
||||
error_count=data["error_count"],
|
||||
total_items=data["total_items"],
|
||||
successful_items=data.get("created_ids", []),
|
||||
failed_items=data.get("errors", []),
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
self._update_analytics("batch_create", success=True)
|
||||
return result
|
||||
else:
|
||||
self._update_analytics("batch_create", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
execution_time = time.time() - start_time
|
||||
self._update_analytics("batch_create", success=False, error=str(e))
|
||||
|
||||
return BatchResult(
|
||||
success_count=0,
|
||||
error_count=len(contexts),
|
||||
total_items=len(contexts),
|
||||
successful_items=[],
|
||||
failed_items=[{"error": str(e)}],
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
async def get_statistics(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get comprehensive system statistics asynchronously.
|
||||
|
||||
Returns:
|
||||
System statistics and metrics
|
||||
"""
|
||||
try:
|
||||
response = await self.http_client.get("/api/v1/stats")
|
||||
|
||||
if response.status_code == 200:
|
||||
self._update_analytics("get_statistics", success=True)
|
||||
return response.json()
|
||||
else:
|
||||
self._update_analytics("get_statistics", success=False)
|
||||
handle_api_error(response)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self._update_analytics("get_statistics", success=False, error=str(e))
|
||||
raise HCFSConnectionError(f"Failed to get statistics: {str(e)}")
|
||||
|
||||
async def iterate_contexts(self,
|
||||
filter_opts: Optional[ContextFilter] = None,
|
||||
page_size: int = 100) -> AsyncIterator[Context]:
|
||||
"""
|
||||
Asynchronously iterate through all contexts with automatic pagination.
|
||||
|
||||
Args:
|
||||
filter_opts: Context filtering options
|
||||
page_size: Number of contexts per page
|
||||
|
||||
Yields:
|
||||
Context objects
|
||||
"""
|
||||
page = 1
|
||||
|
||||
while True:
|
||||
pagination = PaginationOptions(page=page, page_size=page_size)
|
||||
contexts = await self.list_contexts(filter_opts, pagination)
|
||||
|
||||
if not contexts:
|
||||
break
|
||||
|
||||
for context in contexts:
|
||||
yield context
|
||||
|
||||
# If we got fewer contexts than requested, we've reached the end
|
||||
if len(contexts) < page_size:
|
||||
break
|
||||
|
||||
page += 1
|
||||
|
||||
async def connect_websocket(self,
|
||||
path_prefix: Optional[str] = None,
|
||||
event_types: Optional[List[str]] = None) -> None:
|
||||
"""
|
||||
Connect to WebSocket for real-time updates.
|
||||
|
||||
Args:
|
||||
path_prefix: Filter events by path prefix
|
||||
event_types: List of event types to subscribe to
|
||||
|
||||
Raises:
|
||||
HCFSStreamError: If WebSocket connection fails
|
||||
"""
|
||||
if self.websocket and not self.websocket.closed:
|
||||
return # Already connected
|
||||
|
||||
# Convert HTTP URL to WebSocket URL
|
||||
ws_url = self.config.base_url.replace("http://", "ws://").replace("https://", "wss://")
|
||||
ws_url += "/ws"
|
||||
|
||||
# Add authentication headers
|
||||
headers = {}
|
||||
if self.config.api_key:
|
||||
headers["X-API-Key"] = self.config.api_key
|
||||
elif self.config.jwt_token:
|
||||
headers["Authorization"] = f"Bearer {self.config.jwt_token}"
|
||||
|
||||
try:
|
||||
self.websocket = await websockets.connect(
|
||||
ws_url,
|
||||
extra_headers=headers,
|
||||
ping_interval=self.config.websocket.ping_interval,
|
||||
ping_timeout=self.config.websocket.ping_timeout
|
||||
)
|
||||
|
||||
# Send subscription request
|
||||
subscription = {
|
||||
"type": "subscribe",
|
||||
"data": {
|
||||
"path_prefix": path_prefix,
|
||||
"event_types": event_types or ["created", "updated", "deleted"],
|
||||
"filters": {}
|
||||
}
|
||||
}
|
||||
|
||||
await self.websocket.send(json.dumps(subscription))
|
||||
|
||||
# Start listening task
|
||||
self._websocket_task = asyncio.create_task(self._websocket_listener())
|
||||
|
||||
except (WebSocketException, ConnectionClosed) as e:
|
||||
raise HCFSStreamError(f"Failed to connect to WebSocket: {str(e)}")
|
||||
|
||||
async def disconnect_websocket(self) -> None:
|
||||
"""Disconnect from WebSocket."""
|
||||
if self._websocket_task:
|
||||
self._websocket_task.cancel()
|
||||
try:
|
||||
await self._websocket_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
self._websocket_task = None
|
||||
|
||||
if self.websocket:
|
||||
await self.websocket.close()
|
||||
self.websocket = None
|
||||
|
||||
def add_event_listener(self, listener: Callable[[StreamEvent], None]) -> None:
|
||||
"""
|
||||
Add an event listener for WebSocket events.
|
||||
|
||||
Args:
|
||||
listener: Function to call when events are received
|
||||
"""
|
||||
self._websocket_listeners.append(listener)
|
||||
|
||||
def remove_event_listener(self, listener: Callable[[StreamEvent], None]) -> None:
|
||||
"""
|
||||
Remove an event listener.
|
||||
|
||||
Args:
|
||||
listener: Function to remove
|
||||
"""
|
||||
if listener in self._websocket_listeners:
|
||||
self._websocket_listeners.remove(listener)
|
||||
|
||||
async def _websocket_listener(self) -> None:
|
||||
"""Internal WebSocket message listener."""
|
||||
try:
|
||||
async for message in self.websocket:
|
||||
try:
|
||||
data = json.loads(message)
|
||||
event = StreamEvent(
|
||||
event_type=data.get("type", "unknown"),
|
||||
data=data.get("data", {}),
|
||||
timestamp=datetime.fromisoformat(data.get("timestamp", datetime.utcnow().isoformat())),
|
||||
context_id=data.get("context_id"),
|
||||
path=data.get("path")
|
||||
)
|
||||
|
||||
# Notify all listeners
|
||||
for listener in self._websocket_listeners:
|
||||
try:
|
||||
if asyncio.iscoroutinefunction(listener):
|
||||
await listener(event)
|
||||
else:
|
||||
listener(event)
|
||||
except Exception:
|
||||
pass # Don't let listener errors break the connection
|
||||
|
||||
except json.JSONDecodeError:
|
||||
pass # Ignore malformed messages
|
||||
|
||||
except (WebSocketException, ConnectionClosed):
|
||||
# Connection was closed, attempt reconnection if configured
|
||||
if self.config.websocket.auto_reconnect:
|
||||
await self._attempt_websocket_reconnection()
|
||||
|
||||
async def _attempt_websocket_reconnection(self) -> None:
|
||||
"""Attempt to reconnect WebSocket with backoff."""
|
||||
for attempt in range(self.config.websocket.max_reconnect_attempts):
|
||||
try:
|
||||
await asyncio.sleep(self.config.websocket.reconnect_interval)
|
||||
await self.connect_websocket()
|
||||
return # Successfully reconnected
|
||||
except Exception:
|
||||
continue # Try again
|
||||
|
||||
# All reconnection attempts failed
|
||||
raise HCFSStreamError("Failed to reconnect WebSocket after multiple attempts")
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
"""Clear all cached data."""
|
||||
if self._cache:
|
||||
self._cache.clear()
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics."""
|
||||
if self._cache:
|
||||
stats = self._cache.stats()
|
||||
self.analytics.cache_stats = stats
|
||||
return stats
|
||||
return {}
|
||||
|
||||
def get_analytics(self) -> AnalyticsData:
|
||||
"""
|
||||
Get client analytics and usage statistics.
|
||||
|
||||
Returns:
|
||||
Analytics data including operation counts and performance metrics
|
||||
"""
|
||||
# Update cache stats
|
||||
if self._cache:
|
||||
self.analytics.cache_stats = self._cache.stats()
|
||||
|
||||
return self.analytics
|
||||
|
||||
def _update_analytics(self, operation: str, success: bool, error: Optional[str] = None):
|
||||
"""Update internal analytics tracking."""
|
||||
self.analytics.operation_count[operation] = self.analytics.operation_count.get(operation, 0) + 1
|
||||
|
||||
if not success:
|
||||
error_key = error or "unknown_error"
|
||||
self.analytics.error_stats[error_key] = self.analytics.error_stats.get(error_key, 0) + 1
|
||||
|
||||
async def close(self):
|
||||
"""Close the client and cleanup resources."""
|
||||
await self.disconnect_websocket()
|
||||
|
||||
if self.http_client:
|
||||
await self.http_client.aclose()
|
||||
Reference in New Issue
Block a user