001package com.streamconverter.examples; 002 003import com.streamconverter.StreamConverter; 004import com.streamconverter.command.IStreamCommand; 005import com.streamconverter.command.impl.SampleStreamCommand; 006import com.streamconverter.command.impl.csv.CsvNavigateCommand; 007import com.streamconverter.command.impl.json.JsonNavigateCommand; 008import com.streamconverter.command.rule.PassThroughRule; 009import com.streamconverter.path.CSVPath; 010import com.streamconverter.path.TreePath; 011import java.io.ByteArrayInputStream; 012import java.io.ByteArrayOutputStream; 013import java.io.IOException; 014import java.io.InputStream; 015import java.nio.charset.StandardCharsets; 016import org.slf4j.Logger; 017import org.slf4j.LoggerFactory; 018 019/** 020 * Performance optimization examples for StreamConverter. 021 * 022 * <p>This class demonstrates: - Memory-efficient processing techniques - Concurrent pipeline 023 * optimization - Large dataset handling - Performance monitoring 024 */ 025public class PerformanceOptimizationExamples { 026 027 private static final Logger logger = 028 LoggerFactory.getLogger(PerformanceOptimizationExamples.class); 029 030 /** 031 * アプリケーションのエントリーポイント。パフォーマンス最適化の例を実行します。 032 * 033 * @param args コマンドライン引数(使用されません) 034 */ 035 public static void main(String[] args) { 036 logger.info("⚡ StreamConverter - Performance Optimization Examples"); 037 logger.info("=====================================================\n"); 038 039 try { 040 // Example 1: Large dataset processing 041 largeDatasetProcessing(); 042 043 // Example 2: Concurrent pipeline optimization 044 concurrentPipelineOptimization(); 045 046 // Example 3: Memory-efficient processing 047 memoryEfficientProcessing(); 048 049 // Example 4: Performance monitoring 050 performanceMonitoring(); 051 052 } catch (Exception e) { 053 logger.error("Performance example failed: {}", e.getMessage(), e); 054 } 055 } 056 057 /** Example 1: Processing large datasets efficiently */ 058 private static void largeDatasetProcessing() throws IOException { 059 logger.info("📊 Large Dataset Processing"); 060 logger.info("============================"); 061 062 // Generate large CSV dataset 063 StringBuilder largeDataset = new StringBuilder(); 064 largeDataset.append("id,name,value,timestamp\n"); 065 066 for (int i = 1; i <= 10000; i++) { 067 largeDataset.append( 068 String.format( 069 "%d,Item%d,%.2f,2023-07-15T%02d:%02d:%02d\n", 070 i, i, Math.random() * 1000, (i % 24), (i % 60), (i % 60))); 071 } 072 073 long startTime = System.currentTimeMillis(); 074 075 // Process large dataset with memory-efficient navigation 076 logger.info("🔄 Processing 10,000 records..."); 077 processDataWithTiming( 078 largeDataset.toString(), 079 CsvNavigateCommand.create(new CSVPath("name"), new PassThroughRule())); 080 081 long endTime = System.currentTimeMillis(); 082 logger.info(String.format("⏱️ Processing completed in %d ms", endTime - startTime)); 083 084 // Show memory usage 085 Runtime runtime = Runtime.getRuntime(); 086 long memoryUsed = runtime.totalMemory() - runtime.freeMemory(); 087 logger.info(String.format("💾 Memory used: %.2f MB", memoryUsed / (1024.0 * 1024.0))); 088 089 logger.info("\n" + "=".repeat(60) + "\n"); 090 } 091 092 /** Example 2: Concurrent pipeline optimization */ 093 private static void concurrentPipelineOptimization() throws IOException { 094 logger.info("🔗 Concurrent Pipeline Optimization"); 095 logger.info("===================================="); 096 097 String jsonData = generateLargeJsonData(); 098 099 // Single command processing 100 long startTime = System.currentTimeMillis(); 101 logger.info("🔄 Single command processing..."); 102 processDataWithTiming( 103 jsonData, JsonNavigateCommand.create(TreePath.fromJson("$"), new PassThroughRule())); 104 long singleTime = System.currentTimeMillis() - startTime; 105 106 // Pipeline processing (demonstrates concurrent execution) 107 startTime = System.currentTimeMillis(); 108 logger.info("\n🔄 Pipeline processing (concurrent)..."); 109 IStreamCommand[] pipeline = { 110 JsonNavigateCommand.create(TreePath.fromJson("$"), new PassThroughRule()), 111 new SampleStreamCommand("stage1"), 112 new SampleStreamCommand("stage2") 113 }; 114 processDataWithTiming(jsonData, pipeline); 115 long pipelineTime = System.currentTimeMillis() - startTime; 116 117 logger.info(String.format("📈 Performance comparison:")); 118 logger.info(String.format(" Single command: %d ms", singleTime)); 119 logger.info(String.format(" Pipeline: %d ms", pipelineTime)); 120 logger.info( 121 String.format(" CPU cores utilized: %d", Runtime.getRuntime().availableProcessors())); 122 123 logger.info("\n" + "=".repeat(60) + "\n"); 124 } 125 126 /** Example 3: Memory-efficient processing demonstration */ 127 private static void memoryEfficientProcessing() throws IOException { 128 logger.info("💾 Memory-Efficient Processing"); 129 logger.info("==============================="); 130 131 // Generate data that would be problematic for memory 132 StringBuilder hugeDataset = new StringBuilder(); 133 hugeDataset.append("id,data\n"); 134 135 for (int i = 1; i <= 50000; i++) { 136 hugeDataset.append(String.format("%d,%s\n", i, "x".repeat(100))); // 100-char string per row 137 } 138 139 long beforeMemory = getUsedMemory(); 140 141 logger.info("🔄 Processing 50,000 records with 100-char data each..."); 142 processDataWithTiming( 143 hugeDataset.toString(), 144 CsvNavigateCommand.create(new CSVPath("id"), new PassThroughRule())); 145 146 long afterMemory = getUsedMemory(); 147 long memoryIncrease = afterMemory - beforeMemory; 148 149 logger.info(String.format("💾 Memory increase: %.2f MB", memoryIncrease / (1024.0 * 1024.0))); 150 logger.info("✅ Demonstrates streaming processing with constant memory usage"); 151 152 logger.info("\n" + "=".repeat(60) + "\n"); 153 } 154 155 /** Example 4: Performance monitoring and optimization */ 156 private static void performanceMonitoring() throws IOException { 157 logger.info("📊 Performance Monitoring"); 158 logger.info("=========================="); 159 160 String testData = generateTestData(); 161 162 // Test different processing strategies 163 logger.info("🧪 Testing different processing strategies:\n"); 164 165 // Strategy 1: Direct processing 166 long start = System.nanoTime(); 167 processDataWithTiming( 168 testData, CsvNavigateCommand.create(new CSVPath("name"), new PassThroughRule())); 169 long directTime = System.nanoTime() - start; 170 171 // Strategy 2: Pipeline processing 172 start = System.nanoTime(); 173 IStreamCommand[] pipeline = { 174 CsvNavigateCommand.create(new CSVPath("name"), new PassThroughRule()), 175 new SampleStreamCommand("processor") 176 }; 177 processDataWithTiming(testData, pipeline); 178 long pipelineTime = System.nanoTime() - start; 179 180 // Strategy 3: Multi-stage processing 181 start = System.nanoTime(); 182 IStreamCommand[] multiStage = { 183 CsvNavigateCommand.create(new CSVPath("name"), new PassThroughRule()), 184 new SampleStreamCommand("stage1"), 185 new SampleStreamCommand("stage2"), 186 new SampleStreamCommand("stage3") 187 }; 188 processDataWithTiming(testData, multiStage); 189 long multiStageTime = System.nanoTime() - start; 190 191 // Performance analysis 192 logger.info("\n📈 Performance Analysis:"); 193 logger.info(String.format(" Direct processing: %.2f ms", directTime / 1_000_000.0)); 194 logger.info(String.format(" Pipeline processing: %.2f ms", pipelineTime / 1_000_000.0)); 195 logger.info(String.format(" Multi-stage processing: %.2f ms", multiStageTime / 1_000_000.0)); 196 197 double throughput = (testData.length() * 1000.0) / (directTime / 1_000_000.0); 198 logger.info(String.format(" Throughput: %.2f MB/s", throughput / (1024.0 * 1024.0))); 199 200 logger.info("\n" + "=".repeat(60) + "\n"); 201 } 202 203 /** Helper method to process data with timing */ 204 private static void processDataWithTiming(String inputData, IStreamCommand command) 205 throws IOException { 206 processDataWithTiming(inputData, new IStreamCommand[] {command}); 207 } 208 209 /** Helper method to process data with timing for pipelines */ 210 private static void processDataWithTiming(String inputData, IStreamCommand[] commands) 211 throws IOException { 212 try (InputStream inputStream = 213 new ByteArrayInputStream(inputData.getBytes(StandardCharsets.UTF_8)); 214 ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { 215 216 StreamConverter converter = new StreamConverter(commands); 217 converter.run(inputStream, outputStream); 218 219 // Show only a sample of output for large datasets 220 String result = outputStream.toString(StandardCharsets.UTF_8); 221 String[] lines = result.split("\n"); 222 223 if (lines.length > 10) { 224 logger.info("Sample output (first 5 lines):"); 225 for (int i = 0; i < Math.min(5, lines.length); i++) { 226 logger.info(lines[i]); 227 } 228 logger.info("... (" + (lines.length - 5) + " more lines)"); 229 } else { 230 logger.info("Output:"); 231 logger.info(result.trim()); 232 } 233 } 234 } 235 236 /** Generate large JSON data for testing */ 237 private static String generateLargeJsonData() { 238 StringBuilder json = new StringBuilder(); 239 json.append("{\"data\":["); 240 241 for (int i = 1; i <= 1000; i++) { 242 if (i > 1) json.append(","); 243 json.append( 244 String.format( 245 "{\"id\":%d,\"name\":\"Item%d\",\"value\":%.2f,\"active\":%s}", 246 i, i, Math.random() * 1000, (i % 2 == 0) ? "true" : "false")); 247 } 248 249 json.append("],\"total\":1000}"); 250 return json.toString(); 251 } 252 253 /** Generate test data for performance monitoring */ 254 private static String generateTestData() { 255 StringBuilder data = new StringBuilder(); 256 data.append("id,name,category,price\n"); 257 258 for (int i = 1; i <= 5000; i++) { 259 data.append( 260 String.format("%d,Product%d,Category%d,%.2f\n", i, i, (i % 10) + 1, Math.random() * 100)); 261 } 262 263 return data.toString(); 264 } 265 266 /** Get current memory usage */ 267 private static long getUsedMemory() { 268 Runtime runtime = Runtime.getRuntime(); 269 return runtime.totalMemory() - runtime.freeMemory(); 270 } 271}