Phase 2 build initial
This commit is contained in:
134
sdks/java/build.gradle.kts
Normal file
134
sdks/java/build.gradle.kts
Normal file
@@ -0,0 +1,134 @@
|
||||
plugins {
|
||||
`java-library`
|
||||
`maven-publish`
|
||||
signing
|
||||
id("io.github.gradle-nexus.publish-plugin") version "1.3.0"
|
||||
id("org.jetbrains.dokka") version "1.9.10"
|
||||
}
|
||||
|
||||
group = "dev.hcfs"
|
||||
version = "2.0.0"
|
||||
description = "Java SDK for the Context-Aware Hierarchical Context File System"
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_11
|
||||
targetCompatibility = JavaVersion.VERSION_11
|
||||
withJavadocJar()
|
||||
withSourcesJar()
|
||||
}
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// HTTP client
|
||||
api("com.squareup.okhttp3:okhttp:4.12.0")
|
||||
implementation("com.squareup.okhttp3:logging-interceptor:4.12.0")
|
||||
|
||||
// JSON serialization
|
||||
api("com.fasterxml.jackson.core:jackson-databind:2.16.0")
|
||||
implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.16.0")
|
||||
implementation("com.fasterxml.jackson.module:jackson-module-parameter-names:2.16.0")
|
||||
|
||||
// Reactive streams
|
||||
api("io.reactivex.rxjava3:rxjava:3.1.8")
|
||||
implementation("com.squareup.retrofit2:adapter-rxjava3:2.10.0")
|
||||
|
||||
// WebSocket support
|
||||
implementation("org.java-websocket:Java-WebSocket:1.5.4")
|
||||
|
||||
// Validation
|
||||
implementation("javax.validation:validation-api:2.0.1.Final")
|
||||
implementation("org.hibernate.validator:hibernate-validator:8.0.1.Final")
|
||||
|
||||
// Caching
|
||||
implementation("com.github.ben-manes.caffeine:caffeine:3.1.8")
|
||||
|
||||
// Retry logic
|
||||
implementation("dev.failsafe:failsafe:3.3.2")
|
||||
|
||||
// Logging
|
||||
implementation("org.slf4j:slf4j-api:2.0.9")
|
||||
|
||||
// Metrics
|
||||
compileOnly("io.micrometer:micrometer-core:1.12.0")
|
||||
|
||||
// Testing
|
||||
testImplementation("org.junit.jupiter:junit-jupiter:5.10.1")
|
||||
testImplementation("org.mockito:mockito-core:5.7.0")
|
||||
testImplementation("org.mockito:mockito-junit-jupiter:5.7.0")
|
||||
testImplementation("com.squareup.okhttp3:mockwebserver:4.12.0")
|
||||
testImplementation("org.assertj:assertj-core:3.24.2")
|
||||
testImplementation("ch.qos.logback:logback-classic:1.4.14")
|
||||
}
|
||||
|
||||
tasks.test {
|
||||
useJUnitPlatform()
|
||||
testLogging {
|
||||
events("passed", "skipped", "failed")
|
||||
}
|
||||
}
|
||||
|
||||
tasks.compileJava {
|
||||
options.compilerArgs.addAll(listOf("-parameters", "-Xlint:unchecked", "-Xlint:deprecation"))
|
||||
}
|
||||
|
||||
tasks.javadoc {
|
||||
if (JavaVersion.current().isJava9Compatible) {
|
||||
(options as StandardJavadocDocletOptions).addBooleanOption("html5", true)
|
||||
}
|
||||
options.encoding = "UTF-8"
|
||||
(options as StandardJavadocDocletOptions).addStringOption("Xdoclint:none", "-quiet")
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
create<MavenPublication>("maven") {
|
||||
from(components["java"])
|
||||
|
||||
pom {
|
||||
name.set("HCFS Java SDK")
|
||||
description.set("Java SDK for the Context-Aware Hierarchical Context File System")
|
||||
url.set("https://github.com/hcfs/hcfs")
|
||||
|
||||
licenses {
|
||||
license {
|
||||
name.set("MIT License")
|
||||
url.set("https://opensource.org/licenses/MIT")
|
||||
}
|
||||
}
|
||||
|
||||
developers {
|
||||
developer {
|
||||
id.set("hcfs-team")
|
||||
name.set("HCFS Development Team")
|
||||
email.set("dev@hcfs.dev")
|
||||
}
|
||||
}
|
||||
|
||||
scm {
|
||||
connection.set("scm:git:git://github.com/hcfs/hcfs.git")
|
||||
developerConnection.set("scm:git:ssh://github.com/hcfs/hcfs.git")
|
||||
url.set("https://github.com/hcfs/hcfs")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
signing {
|
||||
val signingKey: String? by project
|
||||
val signingPassword: String? by project
|
||||
useInMemoryPgpKeys(signingKey, signingPassword)
|
||||
sign(publishing.publications["maven"])
|
||||
}
|
||||
|
||||
nexusPublishing {
|
||||
repositories {
|
||||
sonatype {
|
||||
nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/"))
|
||||
snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/"))
|
||||
}
|
||||
}
|
||||
}
|
||||
755
sdks/java/src/main/java/dev/hcfs/sdk/HCFSClient.java
Normal file
755
sdks/java/src/main/java/dev/hcfs/sdk/HCFSClient.java
Normal file
@@ -0,0 +1,755 @@
|
||||
package dev.hcfs.sdk;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import dev.failsafe.Failsafe;
|
||||
import dev.failsafe.RetryPolicy;
|
||||
import io.reactivex.rxjava3.core.Observable;
|
||||
import io.reactivex.rxjava3.core.Single;
|
||||
import io.reactivex.rxjava3.schedulers.Schedulers;
|
||||
import okhttp3.*;
|
||||
import okhttp3.logging.HttpLoggingInterceptor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
/**
|
||||
* Main HCFS client for Java applications.
|
||||
*
|
||||
* <p>This client provides both synchronous and asynchronous (reactive) methods for interacting
|
||||
* with the HCFS API. It includes built-in caching, retry logic, rate limiting, and comprehensive
|
||||
* error handling.</p>
|
||||
*
|
||||
* <h3>Basic Usage</h3>
|
||||
* <pre>{@code
|
||||
* Config config = Config.builder()
|
||||
* .baseUrl("https://api.hcfs.dev/v1")
|
||||
* .apiKey("your-api-key")
|
||||
* .build();
|
||||
*
|
||||
* HCFSClient client = new HCFSClient(config);
|
||||
*
|
||||
* // Create a context
|
||||
* Context context = Context.builder()
|
||||
* .path("/docs/readme")
|
||||
* .content("Hello, HCFS!")
|
||||
* .summary("Getting started guide")
|
||||
* .build();
|
||||
*
|
||||
* Context created = client.createContext(context).blockingGet();
|
||||
* System.out.println("Created context: " + created.getId());
|
||||
*
|
||||
* // Search contexts
|
||||
* List<SearchResult> results = client.searchContexts("hello world")
|
||||
* .blockingGet();
|
||||
*
|
||||
* results.forEach(result ->
|
||||
* System.out.printf("Found: %s (score: %.3f)%n",
|
||||
* result.getContext().getPath(), result.getScore()));
|
||||
* }</pre>
|
||||
*
|
||||
* <h3>Reactive Usage</h3>
|
||||
* <pre>{@code
|
||||
* // Async operations with RxJava
|
||||
* client.createContext(context)
|
||||
* .subscribeOn(Schedulers.io())
|
||||
* .observeOn(Schedulers.computation())
|
||||
* .subscribe(
|
||||
* created -> System.out.println("Created: " + created.getId()),
|
||||
* error -> System.err.println("Error: " + error.getMessage())
|
||||
* );
|
||||
*
|
||||
* // Stream processing
|
||||
* client.searchContexts("query")
|
||||
* .flatMapObservable(Observable::fromIterable)
|
||||
* .filter(result -> result.getScore() > 0.8)
|
||||
* .map(result -> result.getContext())
|
||||
* .subscribe(context -> processContext(context));
|
||||
* }</pre>
|
||||
*
|
||||
* @author HCFS Development Team
|
||||
* @version 2.0.0
|
||||
* @since 1.0.0
|
||||
*/
|
||||
public class HCFSClient {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(HCFSClient.class);
|
||||
private static final String SDK_VERSION = "2.0.0";
|
||||
private static final String USER_AGENT = "hcfs-java/" + SDK_VERSION;
|
||||
|
||||
private final Config config;
|
||||
private final OkHttpClient httpClient;
|
||||
private final ObjectMapper objectMapper;
|
||||
private final Cache<String, Object> cache;
|
||||
private final RetryPolicy<Object> retryPolicy;
|
||||
private final Map<String, AtomicLong> analytics;
|
||||
private final Instant sessionStart;
|
||||
|
||||
/**
|
||||
* Creates a new HCFS client with the specified configuration.
|
||||
*
|
||||
* @param config the client configuration
|
||||
* @throws IllegalArgumentException if config is null
|
||||
*/
|
||||
public HCFSClient(Config config) {
|
||||
if (config == null) {
|
||||
throw new IllegalArgumentException("Config cannot be null");
|
||||
}
|
||||
|
||||
this.config = config;
|
||||
this.sessionStart = Instant.now();
|
||||
this.analytics = new ConcurrentHashMap<>();
|
||||
|
||||
// Initialize object mapper
|
||||
this.objectMapper = new ObjectMapper()
|
||||
.registerModule(new JavaTimeModule())
|
||||
.findAndRegisterModules();
|
||||
|
||||
// Initialize cache
|
||||
if (config.isCacheEnabled()) {
|
||||
this.cache = Caffeine.newBuilder()
|
||||
.maximumSize(config.getCacheSize())
|
||||
.expireAfterWrite(config.getCacheTtl())
|
||||
.recordStats()
|
||||
.build();
|
||||
} else {
|
||||
this.cache = null;
|
||||
}
|
||||
|
||||
// Initialize retry policy
|
||||
this.retryPolicy = RetryPolicy.builder()
|
||||
.handle(IOException.class, HCFSServerException.class, HCFSRateLimitException.class)
|
||||
.withDelay(config.getRetryBaseDelay())
|
||||
.withMaxRetries(config.getMaxRetries())
|
||||
.withJitter(Duration.ofMillis(100))
|
||||
.build();
|
||||
|
||||
// Initialize HTTP client
|
||||
this.httpClient = createHttpClient();
|
||||
|
||||
logger.info("HCFS client initialized with base URL: {}", config.getBaseUrl());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and configures the OkHttp client.
|
||||
*/
|
||||
private OkHttpClient createHttpClient() {
|
||||
OkHttpClient.Builder builder = new OkHttpClient.Builder()
|
||||
.connectTimeout(config.getTimeout())
|
||||
.readTimeout(config.getTimeout())
|
||||
.writeTimeout(config.getTimeout())
|
||||
.addInterceptor(new AuthenticationInterceptor())
|
||||
.addInterceptor(new AnalyticsInterceptor())
|
||||
.addInterceptor(new UserAgentInterceptor());
|
||||
|
||||
// Add logging interceptor for debugging
|
||||
if (logger.isDebugEnabled()) {
|
||||
HttpLoggingInterceptor loggingInterceptor = new HttpLoggingInterceptor(logger::debug);
|
||||
loggingInterceptor.setLevel(HttpLoggingInterceptor.Level.BASIC);
|
||||
builder.addInterceptor(loggingInterceptor);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the API health status.
|
||||
*
|
||||
* @return a Single emitting the health response
|
||||
*/
|
||||
public Single<HealthResponse> healthCheck() {
|
||||
return Single.fromCallable(() -> {
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + "/health")
|
||||
.get()
|
||||
.build();
|
||||
|
||||
return executeRequest(request, HealthResponse.class);
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new context.
|
||||
*
|
||||
* @param contextData the context data to create
|
||||
* @return a Single emitting the created context
|
||||
* @throws IllegalArgumentException if contextData is null or invalid
|
||||
*/
|
||||
public Single<Context> createContext(ContextCreate contextData) {
|
||||
if (contextData == null) {
|
||||
return Single.error(new IllegalArgumentException("Context data cannot be null"));
|
||||
}
|
||||
|
||||
if (!PathValidator.isValid(contextData.getPath())) {
|
||||
return Single.error(new HCFSValidationException("Invalid context path: " + contextData.getPath()));
|
||||
}
|
||||
|
||||
return Single.fromCallable(() -> {
|
||||
// Normalize path
|
||||
String normalizedPath = PathValidator.normalize(contextData.getPath());
|
||||
ContextCreate normalized = contextData.toBuilder()
|
||||
.path(normalizedPath)
|
||||
.build();
|
||||
|
||||
RequestBody body = RequestBody.create(
|
||||
objectMapper.writeValueAsString(normalized),
|
||||
MediaType.get("application/json")
|
||||
);
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + "/api/v1/contexts")
|
||||
.post(body)
|
||||
.build();
|
||||
|
||||
APIResponse<Context> response = executeRequest(request,
|
||||
new TypeReference<APIResponse<Context>>() {});
|
||||
|
||||
// Invalidate relevant cache entries
|
||||
invalidateCache("/api/v1/contexts");
|
||||
|
||||
return response.getData();
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a context by ID.
|
||||
*
|
||||
* @param contextId the context ID
|
||||
* @return a Single emitting the context
|
||||
* @throws IllegalArgumentException if contextId is invalid
|
||||
*/
|
||||
public Single<Context> getContext(int contextId) {
|
||||
if (contextId <= 0) {
|
||||
return Single.error(new IllegalArgumentException("Context ID must be positive"));
|
||||
}
|
||||
|
||||
return Single.fromCallable(() -> {
|
||||
String path = "/api/v1/contexts/" + contextId;
|
||||
String cacheKey = "GET:" + path;
|
||||
|
||||
// Check cache first
|
||||
if (cache != null) {
|
||||
Context cached = (Context) cache.getIfPresent(cacheKey);
|
||||
if (cached != null) {
|
||||
recordAnalytics("cache_hit");
|
||||
return cached;
|
||||
}
|
||||
recordAnalytics("cache_miss");
|
||||
}
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + path)
|
||||
.get()
|
||||
.build();
|
||||
|
||||
APIResponse<Context> response = executeRequest(request,
|
||||
new TypeReference<APIResponse<Context>>() {});
|
||||
|
||||
Context context = response.getData();
|
||||
|
||||
// Cache the result
|
||||
if (cache != null) {
|
||||
cache.put(cacheKey, context);
|
||||
}
|
||||
|
||||
return context;
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists contexts with optional filtering and pagination.
|
||||
*
|
||||
* @param filter the context filter (can be null)
|
||||
* @param pagination the pagination options (can be null)
|
||||
* @return a Single emitting the context list response
|
||||
*/
|
||||
public Single<ContextListResponse> listContexts(ContextFilter filter, PaginationOptions pagination) {
|
||||
return Single.fromCallable(() -> {
|
||||
HttpUrl.Builder urlBuilder = HttpUrl.parse(config.getBaseUrl() + "/api/v1/contexts").newBuilder();
|
||||
|
||||
// Add filter parameters
|
||||
if (filter != null) {
|
||||
addFilterParams(urlBuilder, filter);
|
||||
}
|
||||
|
||||
// Add pagination parameters
|
||||
if (pagination != null) {
|
||||
addPaginationParams(urlBuilder, pagination);
|
||||
}
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.url(urlBuilder.build())
|
||||
.get()
|
||||
.build();
|
||||
|
||||
return executeRequest(request, ContextListResponse.class);
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates an existing context.
|
||||
*
|
||||
* @param contextId the context ID
|
||||
* @param updates the context updates
|
||||
* @return a Single emitting the updated context
|
||||
*/
|
||||
public Single<Context> updateContext(int contextId, ContextUpdate updates) {
|
||||
if (contextId <= 0) {
|
||||
return Single.error(new IllegalArgumentException("Context ID must be positive"));
|
||||
}
|
||||
if (updates == null) {
|
||||
return Single.error(new IllegalArgumentException("Updates cannot be null"));
|
||||
}
|
||||
|
||||
return Single.fromCallable(() -> {
|
||||
RequestBody body = RequestBody.create(
|
||||
objectMapper.writeValueAsString(updates),
|
||||
MediaType.get("application/json")
|
||||
);
|
||||
|
||||
String path = "/api/v1/contexts/" + contextId;
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + path)
|
||||
.put(body)
|
||||
.build();
|
||||
|
||||
APIResponse<Context> response = executeRequest(request,
|
||||
new TypeReference<APIResponse<Context>>() {});
|
||||
|
||||
// Invalidate cache
|
||||
invalidateCache("GET:" + path);
|
||||
invalidateCache("/api/v1/contexts");
|
||||
|
||||
return response.getData();
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a context.
|
||||
*
|
||||
* @param contextId the context ID
|
||||
* @return a Single emitting completion
|
||||
*/
|
||||
public Single<Void> deleteContext(int contextId) {
|
||||
if (contextId <= 0) {
|
||||
return Single.error(new IllegalArgumentException("Context ID must be positive"));
|
||||
}
|
||||
|
||||
return Single.fromCallable(() -> {
|
||||
String path = "/api/v1/contexts/" + contextId;
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + path)
|
||||
.delete()
|
||||
.build();
|
||||
|
||||
executeRequest(request, SuccessResponse.class);
|
||||
|
||||
// Invalidate cache
|
||||
invalidateCache("GET:" + path);
|
||||
invalidateCache("/api/v1/contexts");
|
||||
|
||||
return null;
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches contexts using various search methods.
|
||||
*
|
||||
* @param query the search query
|
||||
* @param options the search options (can be null)
|
||||
* @return a Single emitting the search results
|
||||
*/
|
||||
public Single<List<SearchResult>> searchContexts(String query, SearchOptions options) {
|
||||
if (query == null || query.trim().isEmpty()) {
|
||||
return Single.error(new IllegalArgumentException("Query cannot be null or empty"));
|
||||
}
|
||||
|
||||
return Single.fromCallable(() -> {
|
||||
Map<String, Object> searchData = new HashMap<>();
|
||||
searchData.put("query", query);
|
||||
|
||||
if (options != null) {
|
||||
addSearchOptions(searchData, options);
|
||||
}
|
||||
|
||||
RequestBody body = RequestBody.create(
|
||||
objectMapper.writeValueAsString(searchData),
|
||||
MediaType.get("application/json")
|
||||
);
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + "/api/v1/search")
|
||||
.post(body)
|
||||
.build();
|
||||
|
||||
SearchResponse response = executeRequest(request, SearchResponse.class);
|
||||
return response.getData();
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches contexts with default options.
|
||||
*
|
||||
* @param query the search query
|
||||
* @return a Single emitting the search results
|
||||
*/
|
||||
public Single<List<SearchResult>> searchContexts(String query) {
|
||||
return searchContexts(query, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates multiple contexts in batch.
|
||||
*
|
||||
* @param contexts the list of contexts to create
|
||||
* @return a Single emitting the batch result
|
||||
*/
|
||||
public Single<BatchResult> batchCreateContexts(List<ContextCreate> contexts) {
|
||||
if (contexts == null || contexts.isEmpty()) {
|
||||
return Single.error(new IllegalArgumentException("Contexts cannot be null or empty"));
|
||||
}
|
||||
|
||||
return Single.fromCallable(() -> {
|
||||
Instant startTime = Instant.now();
|
||||
|
||||
// Validate and normalize all contexts
|
||||
List<ContextCreate> normalizedContexts = new ArrayList<>();
|
||||
for (ContextCreate context : contexts) {
|
||||
if (!PathValidator.isValid(context.getPath())) {
|
||||
throw new HCFSValidationException("Invalid context path: " + context.getPath());
|
||||
}
|
||||
|
||||
normalizedContexts.add(context.toBuilder()
|
||||
.path(PathValidator.normalize(context.getPath()))
|
||||
.build());
|
||||
}
|
||||
|
||||
Map<String, Object> batchData = new HashMap<>();
|
||||
batchData.put("contexts", normalizedContexts);
|
||||
|
||||
RequestBody body = RequestBody.create(
|
||||
objectMapper.writeValueAsString(batchData),
|
||||
MediaType.get("application/json")
|
||||
);
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + "/api/v1/contexts/batch")
|
||||
.post(body)
|
||||
.build();
|
||||
|
||||
APIResponse<BatchResult> response = executeRequest(request,
|
||||
new TypeReference<APIResponse<BatchResult>>() {});
|
||||
|
||||
BatchResult result = response.getData();
|
||||
|
||||
// Calculate additional metrics
|
||||
Duration executionTime = Duration.between(startTime, Instant.now());
|
||||
double successRate = (double) result.getSuccessCount() / result.getTotalItems();
|
||||
|
||||
// Invalidate cache
|
||||
invalidateCache("/api/v1/contexts");
|
||||
|
||||
return result.toBuilder()
|
||||
.executionTime(executionTime)
|
||||
.successRate(successRate)
|
||||
.build();
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through all contexts with automatic pagination.
|
||||
*
|
||||
* @param filter the context filter (can be null)
|
||||
* @param pageSize the page size
|
||||
* @return an Observable emitting contexts
|
||||
*/
|
||||
public Observable<Context> iterateContexts(ContextFilter filter, int pageSize) {
|
||||
if (pageSize <= 0) {
|
||||
pageSize = 100;
|
||||
}
|
||||
|
||||
final int finalPageSize = pageSize;
|
||||
|
||||
return Observable.create(emitter -> {
|
||||
int page = 1;
|
||||
|
||||
while (!emitter.isDisposed()) {
|
||||
PaginationOptions pagination = PaginationOptions.builder()
|
||||
.page(page)
|
||||
.pageSize(finalPageSize)
|
||||
.build();
|
||||
|
||||
try {
|
||||
ContextListResponse response = listContexts(filter, pagination).blockingGet();
|
||||
List<Context> contexts = response.getData();
|
||||
|
||||
if (contexts.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
for (Context context : contexts) {
|
||||
if (emitter.isDisposed()) {
|
||||
return;
|
||||
}
|
||||
emitter.onNext(context);
|
||||
}
|
||||
|
||||
// Check if we've reached the end
|
||||
if (contexts.size() < finalPageSize || !response.getPagination().isHasNext()) {
|
||||
break;
|
||||
}
|
||||
|
||||
page++;
|
||||
} catch (Exception e) {
|
||||
emitter.onError(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
emitter.onComplete();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets comprehensive system statistics.
|
||||
*
|
||||
* @return a Single emitting the statistics
|
||||
*/
|
||||
public Single<StatsResponse> getStatistics() {
|
||||
return Single.fromCallable(() -> {
|
||||
Request request = new Request.Builder()
|
||||
.url(config.getBaseUrl() + "/api/v1/stats")
|
||||
.get()
|
||||
.build();
|
||||
|
||||
return executeRequest(request, StatsResponse.class);
|
||||
}).subscribeOn(Schedulers.io());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets client analytics and usage statistics.
|
||||
*
|
||||
* @return the analytics data
|
||||
*/
|
||||
public Map<String, Object> getAnalytics() {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
result.put("session_start", sessionStart);
|
||||
result.put("operation_counts", new HashMap<>(analytics));
|
||||
|
||||
if (cache != null) {
|
||||
com.github.benmanes.caffeine.cache.stats.CacheStats stats = cache.stats();
|
||||
Map<String, Object> cacheStats = new HashMap<>();
|
||||
cacheStats.put("enabled", true);
|
||||
cacheStats.put("size", cache.estimatedSize());
|
||||
cacheStats.put("max_size", config.getCacheSize());
|
||||
cacheStats.put("hit_rate", stats.hitRate());
|
||||
cacheStats.put("miss_rate", stats.missRate());
|
||||
cacheStats.put("hit_count", stats.hitCount());
|
||||
cacheStats.put("miss_count", stats.missCount());
|
||||
result.put("cache_stats", cacheStats);
|
||||
} else {
|
||||
result.put("cache_stats", Map.of("enabled", false));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the client cache.
|
||||
*/
|
||||
public void clearCache() {
|
||||
if (cache != null) {
|
||||
cache.invalidateAll();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the client and releases resources.
|
||||
*/
|
||||
public void close() {
|
||||
if (cache != null) {
|
||||
cache.invalidateAll();
|
||||
}
|
||||
httpClient.dispatcher().executorService().shutdown();
|
||||
httpClient.connectionPool().evictAll();
|
||||
}
|
||||
|
||||
// Private helper methods
|
||||
|
||||
private <T> T executeRequest(Request request, Class<T> responseType) throws IOException {
|
||||
return executeRequest(request, TypeReference.constructType(responseType));
|
||||
}
|
||||
|
||||
private <T> T executeRequest(Request request, TypeReference<T> responseType) throws IOException {
|
||||
return Failsafe.with(retryPolicy).get(() -> {
|
||||
try (Response response = httpClient.newCall(request).execute()) {
|
||||
recordAnalytics("request");
|
||||
|
||||
if (!response.isSuccessful()) {
|
||||
recordAnalytics("error");
|
||||
handleErrorResponse(response);
|
||||
}
|
||||
|
||||
ResponseBody responseBody = response.body();
|
||||
if (responseBody == null) {
|
||||
throw new HCFSException("Empty response body");
|
||||
}
|
||||
|
||||
String json = responseBody.string();
|
||||
return objectMapper.readValue(json, responseType);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void handleErrorResponse(Response response) throws IOException {
|
||||
String body = response.body() != null ? response.body().string() : "";
|
||||
|
||||
try {
|
||||
APIErrorResponse errorResponse = objectMapper.readValue(body, APIErrorResponse.class);
|
||||
|
||||
switch (response.code()) {
|
||||
case 400:
|
||||
throw new HCFSValidationException(errorResponse.getError(), errorResponse.getErrorDetails());
|
||||
case 401:
|
||||
throw new HCFSAuthenticationException(errorResponse.getError());
|
||||
case 404:
|
||||
throw new HCFSNotFoundException(errorResponse.getError());
|
||||
case 429:
|
||||
String retryAfter = response.header("Retry-After");
|
||||
throw new HCFSRateLimitException(errorResponse.getError(), retryAfter);
|
||||
case 500:
|
||||
case 502:
|
||||
case 503:
|
||||
case 504:
|
||||
throw new HCFSServerException(errorResponse.getError(), response.code());
|
||||
default:
|
||||
throw new HCFSException(errorResponse.getError());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// If we can't parse the error response, throw a generic exception
|
||||
throw new HCFSException("HTTP " + response.code() + ": " + body);
|
||||
}
|
||||
}
|
||||
|
||||
private void addFilterParams(HttpUrl.Builder urlBuilder, ContextFilter filter) {
|
||||
if (filter.getPathPrefix() != null) {
|
||||
urlBuilder.addQueryParameter("path_prefix", filter.getPathPrefix());
|
||||
}
|
||||
if (filter.getAuthor() != null) {
|
||||
urlBuilder.addQueryParameter("author", filter.getAuthor());
|
||||
}
|
||||
if (filter.getStatus() != null) {
|
||||
urlBuilder.addQueryParameter("status", filter.getStatus().toString());
|
||||
}
|
||||
// Add other filter parameters as needed
|
||||
}
|
||||
|
||||
private void addPaginationParams(HttpUrl.Builder urlBuilder, PaginationOptions pagination) {
|
||||
if (pagination.getPage() != null) {
|
||||
urlBuilder.addQueryParameter("page", pagination.getPage().toString());
|
||||
}
|
||||
if (pagination.getPageSize() != null) {
|
||||
urlBuilder.addQueryParameter("page_size", pagination.getPageSize().toString());
|
||||
}
|
||||
if (pagination.getSortBy() != null) {
|
||||
urlBuilder.addQueryParameter("sort_by", pagination.getSortBy());
|
||||
}
|
||||
if (pagination.getSortOrder() != null) {
|
||||
urlBuilder.addQueryParameter("sort_order", pagination.getSortOrder().toString());
|
||||
}
|
||||
}
|
||||
|
||||
private void addSearchOptions(Map<String, Object> searchData, SearchOptions options) {
|
||||
if (options.getSearchType() != null) {
|
||||
searchData.put("search_type", options.getSearchType().toString());
|
||||
}
|
||||
if (options.getTopK() != null) {
|
||||
searchData.put("top_k", options.getTopK());
|
||||
}
|
||||
if (options.getSimilarityThreshold() != null) {
|
||||
searchData.put("similarity_threshold", options.getSimilarityThreshold());
|
||||
}
|
||||
if (options.getPathPrefix() != null) {
|
||||
searchData.put("path_prefix", options.getPathPrefix());
|
||||
}
|
||||
if (options.getSemanticWeight() != null) {
|
||||
searchData.put("semantic_weight", options.getSemanticWeight());
|
||||
}
|
||||
if (options.getIncludeContent() != null) {
|
||||
searchData.put("include_content", options.getIncludeContent());
|
||||
}
|
||||
if (options.getIncludeHighlights() != null) {
|
||||
searchData.put("include_highlights", options.getIncludeHighlights());
|
||||
}
|
||||
if (options.getMaxHighlights() != null) {
|
||||
searchData.put("max_highlights", options.getMaxHighlights());
|
||||
}
|
||||
}
|
||||
|
||||
private void invalidateCache(String pattern) {
|
||||
if (cache == null) return;
|
||||
|
||||
cache.asMap().keySet().removeIf(key -> key.contains(pattern));
|
||||
}
|
||||
|
||||
private void recordAnalytics(String operation) {
|
||||
analytics.computeIfAbsent(operation, k -> new AtomicLong(0)).incrementAndGet();
|
||||
}
|
||||
|
||||
// Inner classes for interceptors
|
||||
|
||||
private class AuthenticationInterceptor implements Interceptor {
|
||||
@Override
|
||||
public Response intercept(Chain chain) throws IOException {
|
||||
Request.Builder builder = chain.request().newBuilder();
|
||||
|
||||
if (config.getApiKey() != null) {
|
||||
builder.header("X-API-Key", config.getApiKey());
|
||||
}
|
||||
if (config.getJwtToken() != null) {
|
||||
builder.header("Authorization", "Bearer " + config.getJwtToken());
|
||||
}
|
||||
|
||||
return chain.proceed(builder.build());
|
||||
}
|
||||
}
|
||||
|
||||
private class UserAgentInterceptor implements Interceptor {
|
||||
@Override
|
||||
public Response intercept(Chain chain) throws IOException {
|
||||
Request request = chain.request().newBuilder()
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.build();
|
||||
return chain.proceed(request);
|
||||
}
|
||||
}
|
||||
|
||||
private class AnalyticsInterceptor implements Interceptor {
|
||||
@Override
|
||||
public Response intercept(Chain chain) throws IOException {
|
||||
long startTime = System.currentTimeMillis();
|
||||
Response response = chain.proceed(chain.request());
|
||||
long duration = System.currentTimeMillis() - startTime;
|
||||
|
||||
recordAnalytics("total_requests");
|
||||
recordAnalytics("response_time_" + response.code());
|
||||
|
||||
if (!response.isSuccessful()) {
|
||||
recordAnalytics("failed_requests");
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user