package storage import ( "bytes" "context" "strings" "testing" ) func TestLocalStorageCompression(t *testing.T) { // Create temporary directory for test tempDir := t.TempDir() // Create storage with compression enabled options := DefaultLocalStorageOptions() options.Compression = true storage, err := NewLocalStorage(tempDir, options) if err != nil { t.Fatalf("Failed to create storage: %v", err) } defer storage.Close() // Test data that should compress well largeData := strings.Repeat("This is a test string that should compress well! ", 100) // Store with compression enabled storeOptions := &StoreOptions{ Compress: true, } ctx := context.Background() err = storage.Store(ctx, "test-compress", largeData, storeOptions) if err != nil { t.Fatalf("Failed to store compressed data: %v", err) } // Retrieve and verify retrieved, err := storage.Retrieve(ctx, "test-compress") if err != nil { t.Fatalf("Failed to retrieve compressed data: %v", err) } // Verify data integrity if retrievedStr, ok := retrieved.(string); ok { if retrievedStr != largeData { t.Error("Retrieved data doesn't match original") } } else { t.Error("Retrieved data is not a string") } // Check compression stats stats, err := storage.GetCompressionStats() if err != nil { t.Fatalf("Failed to get compression stats: %v", err) } if stats.CompressedEntries == 0 { t.Error("Expected at least one compressed entry") } if stats.CompressionRatio == 0 { t.Error("Expected non-zero compression ratio") } t.Logf("Compression stats: %d/%d entries compressed, ratio: %.2f", stats.CompressedEntries, stats.TotalEntries, stats.CompressionRatio) } func TestCompressionMethods(t *testing.T) { // Create storage instance for testing compression methods tempDir := t.TempDir() storage, err := NewLocalStorage(tempDir, nil) if err != nil { t.Fatalf("Failed to create storage: %v", err) } defer storage.Close() // Test data originalData := []byte(strings.Repeat("Hello, World! ", 1000)) // Test compression compressed, err := storage.compress(originalData) if err != nil { t.Fatalf("Compression failed: %v", err) } t.Logf("Original size: %d bytes", len(originalData)) t.Logf("Compressed size: %d bytes", len(compressed)) // Compressed data should be smaller for repetitive data if len(compressed) >= len(originalData) { t.Log("Compression didn't reduce size (may be expected for small or non-repetitive data)") } // Test decompression decompressed, err := storage.decompress(compressed) if err != nil { t.Fatalf("Decompression failed: %v", err) } // Verify data integrity if !bytes.Equal(originalData, decompressed) { t.Error("Decompressed data doesn't match original") } } func TestStorageOptimization(t *testing.T) { // Create temporary directory for test tempDir := t.TempDir() storage, err := NewLocalStorage(tempDir, nil) if err != nil { t.Fatalf("Failed to create storage: %v", err) } defer storage.Close() ctx := context.Background() // Store multiple entries without compression testData := []struct { key string data string }{ {"small", "small data"}, {"large1", strings.Repeat("Large repetitive data ", 100)}, {"large2", strings.Repeat("Another large repetitive dataset ", 100)}, {"medium", strings.Repeat("Medium data ", 50)}, } for _, item := range testData { err = storage.Store(ctx, item.key, item.data, &StoreOptions{Compress: false}) if err != nil { t.Fatalf("Failed to store %s: %v", item.key, err) } } // Check initial stats initialStats, err := storage.GetCompressionStats() if err != nil { t.Fatalf("Failed to get initial stats: %v", err) } t.Logf("Initial: %d entries, %d compressed", initialStats.TotalEntries, initialStats.CompressedEntries) // Optimize storage with threshold (only compress entries larger than 100 bytes) err = storage.OptimizeStorage(ctx, 100) if err != nil { t.Fatalf("Storage optimization failed: %v", err) } // Check final stats finalStats, err := storage.GetCompressionStats() if err != nil { t.Fatalf("Failed to get final stats: %v", err) } t.Logf("Final: %d entries, %d compressed", finalStats.TotalEntries, finalStats.CompressedEntries) // Should have more compressed entries after optimization if finalStats.CompressedEntries <= initialStats.CompressedEntries { t.Log("Note: Optimization didn't increase compressed entries (may be expected)") } // Verify all data is still retrievable for _, item := range testData { retrieved, err := storage.Retrieve(ctx, item.key) if err != nil { t.Fatalf("Failed to retrieve %s after optimization: %v", item.key, err) } if retrievedStr, ok := retrieved.(string); ok { if retrievedStr != item.data { t.Errorf("Data mismatch for %s after optimization", item.key) } } } } func TestCompressionFallback(t *testing.T) { // Test that compression falls back gracefully for incompressible data tempDir := t.TempDir() storage, err := NewLocalStorage(tempDir, nil) if err != nil { t.Fatalf("Failed to create storage: %v", err) } defer storage.Close() // Random-like data that won't compress well randomData := []byte("a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6q7r8s9t0u1v2w3x4y5z6") // Test compression compressed, err := storage.compress(randomData) if err != nil { t.Fatalf("Compression failed: %v", err) } // Should return original data if compression doesn't help if len(compressed) >= len(randomData) { t.Log("Compression correctly returned original data for incompressible input") } // Test decompression of uncompressed data decompressed, err := storage.decompress(randomData) if err != nil { t.Fatalf("Decompression fallback failed: %v", err) } // Should return original data unchanged if !bytes.Equal(randomData, decompressed) { t.Error("Decompression fallback changed data") } }