From 69448c8250f4b0bcfea9524cb14d2d47e7314edd Mon Sep 17 00:00:00 2001 From: Daniel Graf Date: Sat, 28 Feb 2026 06:30:51 +0100 Subject: [PATCH 1/2] refactor: streamline ImportService and migrate to ImportStatistics for progress tracking - Replaced inline progress tracking methods with centralized `ImportStatistics` class. - Removed redundant print methods and progress reporting logic. - Consolidated `ImportService` phases with cleaner method calls and improved encapsulation. - Migrated service classes into a dedicated `importer` package for improved organization. --- README.md | 2 +- .../paikka/PaikkaApplication.java | 3 +- .../paikka/service/PaikkaMetadata.java | 2 +- .../GeometrySimplificationService.java | 2 +- .../{ => importer}/HierarchyCache.java | 3 +- .../service/{ => importer}/ImportService.java | 333 +----------- .../service/importer/ImportStatistics.java | 494 ++++++++++++++++++ .../GeometrySimplificationServiceTest.java | 2 +- .../paikka/service/ImportServiceTest.java | 3 +- 9 files changed, 521 insertions(+), 323 deletions(-) rename src/main/java/com/dedicatedcode/paikka/service/{ => importer}/GeometrySimplificationService.java (98%) rename src/main/java/com/dedicatedcode/paikka/service/{ => importer}/HierarchyCache.java (98%) rename src/main/java/com/dedicatedcode/paikka/service/{ => importer}/ImportService.java (80%) create mode 100644 src/main/java/com/dedicatedcode/paikka/service/importer/ImportStatistics.java diff --git a/README.md b/README.md index 8f82988..333d069 100644 --- a/README.md +++ b/README.md @@ -114,7 +114,7 @@ The [OpenStreetMap website](https://www.openstreetmap.org/export/) allows you to | Dataset | original | filtered... | time taken | reduction | during import | imported | time taken | reduction | |------------|----------|-------------|------------|-----------|---------------|----------|------------|-----------| -| Planet | 86 GB | 33 GB | 40 min | ~60% | ~ 31 GB | 8.15 GB | ~ 16 h | ~90% | +| Planet | 86 GB | 34 GB | 40 min | ~60% | ~ 31 GB | 8.15 GB | ~ 16 h | ~90% | | Germany | 4.4 GB | 1.8 GB | 2 min | ~59% | ~ 14.4 GB | 3,81 GB | ~ 18 min | ~13% | | Netherland | 1.4 GB | 394 MB | 30 s | ~70% | ~ 2,69 GB | 705,7 MB | ~ 2 min | ~50% | diff --git a/src/main/java/com/dedicatedcode/paikka/PaikkaApplication.java b/src/main/java/com/dedicatedcode/paikka/PaikkaApplication.java index 86edebe..6cb1d13 100644 --- a/src/main/java/com/dedicatedcode/paikka/PaikkaApplication.java +++ b/src/main/java/com/dedicatedcode/paikka/PaikkaApplication.java @@ -16,11 +16,10 @@ package com.dedicatedcode.paikka; -import com.dedicatedcode.paikka.service.ImportService; +import com.dedicatedcode.paikka.service.importer.ImportService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; diff --git a/src/main/java/com/dedicatedcode/paikka/service/PaikkaMetadata.java b/src/main/java/com/dedicatedcode/paikka/service/PaikkaMetadata.java index f3fbf25..37cd74b 100644 --- a/src/main/java/com/dedicatedcode/paikka/service/PaikkaMetadata.java +++ b/src/main/java/com/dedicatedcode/paikka/service/PaikkaMetadata.java @@ -18,7 +18,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; -record PaikkaMetadata( +public record PaikkaMetadata( @JsonProperty("importTimestamp") String importTimestamp, @JsonProperty("dataVersion") String dataVersion, @JsonProperty("file") String file, diff --git a/src/main/java/com/dedicatedcode/paikka/service/GeometrySimplificationService.java b/src/main/java/com/dedicatedcode/paikka/service/importer/GeometrySimplificationService.java similarity index 98% rename from src/main/java/com/dedicatedcode/paikka/service/GeometrySimplificationService.java rename to src/main/java/com/dedicatedcode/paikka/service/importer/GeometrySimplificationService.java index 4681c28..04907db 100644 --- a/src/main/java/com/dedicatedcode/paikka/service/GeometrySimplificationService.java +++ b/src/main/java/com/dedicatedcode/paikka/service/importer/GeometrySimplificationService.java @@ -14,7 +14,7 @@ * along with Paikka. If not, see . */ -package com.dedicatedcode.paikka.service; +package com.dedicatedcode.paikka.service.importer; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.simplify.DouglasPeuckerSimplifier; diff --git a/src/main/java/com/dedicatedcode/paikka/service/HierarchyCache.java b/src/main/java/com/dedicatedcode/paikka/service/importer/HierarchyCache.java similarity index 98% rename from src/main/java/com/dedicatedcode/paikka/service/HierarchyCache.java rename to src/main/java/com/dedicatedcode/paikka/service/importer/HierarchyCache.java index 1d68ac1..cf6a33c 100644 --- a/src/main/java/com/dedicatedcode/paikka/service/HierarchyCache.java +++ b/src/main/java/com/dedicatedcode/paikka/service/importer/HierarchyCache.java @@ -14,9 +14,10 @@ * along with Paikka. If not, see . */ -package com.dedicatedcode.paikka.service; +package com.dedicatedcode.paikka.service.importer; import com.dedicatedcode.paikka.flatbuffers.Boundary; +import com.dedicatedcode.paikka.service.S2Helper; import com.github.benmanes.caffeine.cache.Cache; import org.locationtech.jts.algorithm.locate.IndexedPointInAreaLocator; import org.locationtech.jts.geom.Coordinate; diff --git a/src/main/java/com/dedicatedcode/paikka/service/ImportService.java b/src/main/java/com/dedicatedcode/paikka/service/importer/ImportService.java similarity index 80% rename from src/main/java/com/dedicatedcode/paikka/service/ImportService.java rename to src/main/java/com/dedicatedcode/paikka/service/importer/ImportService.java index 8a93af8..f779a5d 100644 --- a/src/main/java/com/dedicatedcode/paikka/service/ImportService.java +++ b/src/main/java/com/dedicatedcode/paikka/service/importer/ImportService.java @@ -14,11 +14,13 @@ * along with Paikka. If not, see . */ -package com.dedicatedcode.paikka.service; +package com.dedicatedcode.paikka.service.importer; import com.dedicatedcode.paikka.config.PaikkaConfiguration; import com.dedicatedcode.paikka.flatbuffers.*; import com.dedicatedcode.paikka.flatbuffers.Geometry; +import com.dedicatedcode.paikka.service.PaikkaMetadata; +import com.dedicatedcode.paikka.service.S2Helper; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.benmanes.caffeine.cache.Caffeine; import com.google.common.geometry.S2CellId; @@ -50,7 +52,6 @@ import java.time.format.DateTimeFormatter; import java.util.*; import java.util.concurrent.*; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Stream; @@ -66,9 +67,8 @@ public class ImportService { private final Map tagCache = new ConcurrentHashMap<>(1000); private final int fileReadWindowSize; - private int currentStep = 0; - private static final int TOTAL_STEPS = 5; private final AtomicLong sequence = new AtomicLong(0); + public ImportService(S2Helper s2Helper, GeometrySimplificationService geometrySimplificationService, PaikkaConfiguration config) { this.s2Helper = s2Helper; this.geometrySimplificationService = geometrySimplificationService; @@ -119,7 +119,7 @@ public void importData(String pbfFilePath, String dataDir) throws Exception { RocksDB.loadLibrary(); ImportStatistics stats = new ImportStatistics(); - startProgressReporter(stats); + stats.startProgressReporter(); try (Cache sharedCache = new LRUCache(2 * 1024 * 1024 * 1024L)) { BlockBasedTableConfig tableConfig = new BlockBasedTableConfig() @@ -189,32 +189,27 @@ public void importData(String pbfFilePath, String dataDir) throws Exception { RocksDB appendDb = RocksDB.open(appendOpts, appendDbPath.toString())) { // PASS 1: Discovery & Indexing - currentStep = 1; - printPhaseHeader("PASS 1: Discovery & Indexing"); + stats.printPhaseHeader("PASS 1: Discovery & Indexing"); long pass1Start = System.currentTimeMillis(); - stats.setCurrentPhase("1.1.1: Discovery & Indexing"); + stats.setCurrentPhase(1, "1.1.1: Discovery & Indexing"); pass1DiscoveryAndIndexing(pbfFile, wayIndexDb, neededNodesDb, relIndexDb, poiIndexDb, stats); - printPhaseSummary("PASS 1", pass1Start); + stats.printPhaseSummary("PASS 1", pass1Start); // PASS 2: Nodes Cache, Boundaries, POIs - currentStep = 2; - printPhaseHeader("PASS 2: Nodes Cache, Boundaries, POIs"); + stats.printPhaseHeader("PASS 2: Nodes Cache, Boundaries, POIs"); long pass2Start = System.currentTimeMillis(); - stats.setCurrentPhase("1.1.2: Caching node coordinates"); + stats.setCurrentPhase(2,"1.1.2: Caching node coordinates"); cacheNeededNodeCoordinates(pbfFile, neededNodesDb, nodeCache, stats); - currentStep = 3; - stats.setCurrentPhase("1.2: Processing administrative boundaries"); + stats.setCurrentPhase(3, "1.2: Processing administrative boundaries"); processAdministrativeBoundariesFromIndex(relIndexDb, nodeCache, wayIndexDb, gridIndexDb, boundariesDb, stats); - currentStep = 4; - stats.setCurrentPhase("2.1: Processing POIs & Sharding"); + stats.setCurrentPhase(4,"2.1: Processing POIs & Sharding"); pass2PoiShardingFromIndex(nodeCache, wayIndexDb, appendDb, boundariesDb, poiIndexDb, gridIndexDb, stats); - currentStep = 5; - stats.setCurrentPhase("2.2: Compacting POIs"); + stats.setCurrentPhase(5, "2.2: Compacting POIs"); compactShards(appendDb, shardsDb, stats); stats.stop(); - printPhaseSummary("PASS 2", pass2Start); + stats.printPhaseSummary("PASS 2", pass2Start); shardsDb.compactRange(); boundariesDb.compactRange(); @@ -236,13 +231,13 @@ public void importData(String pbfFilePath, String dataDir) throws Exception { shardsDb.flush(new FlushOptions().setWaitForFlush(true)); boundariesDb.flush(new FlushOptions().setWaitForFlush(true)); - printFinalStatistics(stats); - printSuccess(); + stats.printFinalStatistics(); + stats.printSuccess(); writeMetadataFile(pbfFile, dataDirectory); } catch (Exception e) { stats.stop(); - printError("IMPORT FAILED: " + e.getMessage()); + stats.printError("IMPORT FAILED: " + e.getMessage()); e.printStackTrace(); throw e; } @@ -273,89 +268,7 @@ private void writeMetadataFile(Path pbfFile, Path dataDirectory) throws IOExcept System.out.println("\n\033[1;32mMetadata file written to: " + metadataPath + "\033[0m"); } - private void startProgressReporter(ImportStatistics stats) { - boolean isTty = System.console() != null; - - Thread.ofPlatform().daemon().start(() -> { - while (stats.isRunning()) { - long elapsed = System.currentTimeMillis() - stats.getStartTime(); - long phaseElapsed = System.currentTimeMillis() - stats.getPhaseStartTime(); - double phaseSeconds = phaseElapsed / 1000.0; - - String phase = stats.getCurrentPhase(); - StringBuilder sb = new StringBuilder(); - - if (isTty) { - sb.append("\r\033[K"); - } - - // Add step indicator - sb.append(String.format("\033[1;90m[%d/%d]\033[0m ", currentStep, TOTAL_STEPS)); - - if (phase.contains("1.1.1")) { - long pbfPerSec = phaseSeconds > 0 ? (long)(stats.getEntitiesRead() / phaseSeconds) : 0; - sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mScanning PBF Structure\033[0m", formatTime(elapsed))); - sb.append(String.format(" │ \033[32mPBF Entities:\033[0m %s \033[33m(%s/s)\033[0m", - formatCompactNumber(stats.getEntitiesRead()), formatCompactRate(pbfPerSec))); - sb.append(String.format(" │ \033[37mNodes Found:\033[0m %s", formatCompactNumber(stats.getNodesFound()))); - sb.append(String.format(" │ \033[34mWays Found:\033[0m %s", formatCompactNumber(stats.getWaysProcessed()))); - sb.append(String.format(" │ \033[35mRelations:\033[0m %s", formatCompactNumber(stats.getRelationsFound()))); - - } else if (phase.contains("1.1.2")) { - long nodesPerSec = phaseSeconds > 0 ? (long)(stats.getNodesCached() / phaseSeconds) : 0; - sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mCaching Node Coordinates\033[0m", formatTime(elapsed))); - sb.append(String.format(" │ \033[32mNodes Cached:\033[0m %s \033[33m(%s/s)\033[0m", - formatCompactNumber(stats.getNodesCached()), formatCompactRate(nodesPerSec))); - sb.append(String.format(" │ \033[36mQueue:\033[0m %s", formatCompactNumber(stats.getQueueSize()))); - sb.append(String.format(" │ \033[37mThreads:\033[0m %d", stats.getActiveThreads())); - - } else if (phase.contains("1.2")) { - long boundsPerSec = phaseSeconds > 0 ? (long)(stats.getBoundariesProcessed() / phaseSeconds) : 0; - sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mProcessing Admin Boundaries\033[0m", formatTime(elapsed))); - sb.append(String.format(" │ \033[32mBoundaries:\033[0m %s \033[33m(%s/s)\033[0m", - formatCompactNumber(stats.getBoundariesProcessed()), formatCompactRate(boundsPerSec))); - sb.append(String.format(" │ \033[37mThreads:\033[0m %d", stats.getActiveThreads())); - - } else if (phase.contains("2.1")) { - long poisPerSec = phaseSeconds > 0 ? (long)(stats.getPoisProcessed() / phaseSeconds) : 0; - long poisReadSec = phaseSeconds > 0 ? (long)(stats.getPoiIndexRecRead() / phaseSeconds) : 0; - sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mProcessing POIs & Sharding\033[0m", formatTime(elapsed))); - sb.append(String.format(" │ \033[32mPOI Index Rec Read:\033[0m %s \033[33m%s\033[0m", formatCompactNumber(stats.getPoiIndexRecRead()), stats.isPoiIndexRecReadDone() ? "(done)" : String.format("(%s/s)",formatCompactRate(poisReadSec)))); - sb.append(String.format(" │ \033[32mPOIs Processed:\033[0m %s \033[33m(%s/s)\033[0m", formatCompactNumber(stats.getPoisProcessed()), formatCompactRate(poisPerSec))); - sb.append(String.format(" │ \033[36mQueue:\033[0m %s", formatCompactNumber(stats.getQueueSize()))); - sb.append(String.format(" │ \033[37mThreads:\033[0m %d", stats.getActiveThreads())); - - } else if (phase.contains("2.2")) { - long compactionElapsed = System.currentTimeMillis() - stats.getCompactionStartTime(); - double compactionPhaseSeconds = compactionElapsed / 1000.0; - long shardsCompacted = stats.getShardsCompacted(); - long shardsPerSec = compactionPhaseSeconds > 0 ? (long)(shardsCompacted / compactionPhaseSeconds) : 0; - sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mCompacting POIs\033[0m", formatTime(elapsed))); - sb.append(String.format(" │ \033[32mShards Compacted:\033[0m %s \033[33m(%s/s)\033[0m", - formatCompactNumber(shardsCompacted), formatCompactRate(shardsPerSec))); - - } else { - sb.append(String.format("\033[1;36m[%s]\033[0m %s", formatTime(elapsed), phase)); - } - sb.append(String.format(" │ \033[31mHeap:\033[0m %s", stats.getMemoryStats())); - - if (isTty) { - System.out.print(sb); - System.out.flush(); - } else { - System.out.println(sb); - } - - try { - Thread.sleep(isTty ? 500 : 5000); - } catch (InterruptedException e) { - break; - } - } - if (isTty) System.out.println(); - }); - } private void updateGridIndexEntry(RocksDB gridIndexDb, long cellId, long osmId) throws Exception { byte[] key = s2Helper.longToByteArray(cellId); @@ -452,7 +365,6 @@ private void pass2PoiShardingFromIndex(RocksDB nodeCache, RocksDB poiIndexDb, RocksDB gridIndexDb, ImportStatistics stats) throws Exception { - stats.setCurrentPhase("2.1: Processing POIs & Sharding"); final Map> shardBuffer = new ConcurrentHashMap<>(); Runnable flushTask = () -> { @@ -1500,34 +1412,6 @@ public int read(byte[] b, int off, int len) throws IOException { } } - private String formatTime(long ms) { - long s = ms / 1000; - return String.format("%d:%02d:%02d", s / 3600, (s % 3600) / 60, s % 60); - } - - private String formatCompactNumber(long n) { - if (n < 1000) return String.valueOf(n); - if (n < 1_000_000) return String.format("%.2fk", n / 1000.0); - return String.format("%.3fM", n / 1_000_000.0); - } - private String formatCompactRate(long n) { - if (n < 1000) return String.valueOf(n); - if (n < 1_000_000) return String.format("%.1fk", n / 1000.0); - return String.format("%.1fM", n / 1_000_000.0); - } - - private String formatSize(long bytes) { - if (bytes < 1024) return bytes + " B"; - double kb = bytes / 1024.0; - if (kb < 1024) return String.format("%.1f KB", kb); - double mb = kb / 1024.0; - if (mb < 1024) return String.format("%.1f MB", mb); - double gb = mb / 1024.0; - if (gb < 1024) return String.format("%.2f GB", gb); - double tb = gb / 1024.0; - return String.format("%.2f TB", tb); - } - private String centerText(String text) { int pad = (80 - text.length()) / 2; return " ".repeat(Math.max(0, pad)) + text; @@ -1544,24 +1428,6 @@ private void printHeader(String pbfFilePath, String dataDir) { System.out.println("File window size: " + (this.fileReadWindowSize / (1024 * 1024)) + "MB"); System.out.println("Sharding Chunk Size: " + this.config.getImportConfiguration().getChunkSize()); } - - private void printPhaseHeader(String phase) { - System.out.println("\n\033[1;36m" + "─".repeat(80) + "\n" + phase + "\n" + "─".repeat(80) + "\033[0m"); - } - - private void printSuccess() { - System.out.println("\n\033[1;32m" + "=".repeat(80) + "\n" + centerText("IMPORT COMPLETED SUCCESSFULLY") + "\n" + "=".repeat(80) + "\033[0m"); - } - - private void printError(String message) { - System.out.println("\n\033[1;31m" + "=".repeat(80) + "\n" + centerText(message) + "\n" + "=".repeat(80) + "\033[0m"); - } - - private void printPhaseSummary(String phaseName, long phaseStartTime) { - long phaseTime = System.currentTimeMillis() - phaseStartTime; - System.out.println(String.format("\n\033[1;32m✓ %s COMPLETED\033[0m \033[2m(%s)\033[0m", phaseName, formatTime(phaseTime))); - } - private ExecutorService createExecutorService(int maxThreads) { if (maxThreads <= 0) { return Executors.newVirtualThreadPerTaskExecutor(); @@ -1648,168 +1514,7 @@ private void recordSizeMetrics(ImportStatistics stats, stats.setTmpTotalBytes(tmpTotal); } - private void printFinalStatistics(ImportStatistics stats) { - System.out.println("\n\033[1;36m" + "═".repeat(80) + "\n" + centerText("🎯 FINAL IMPORT STATISTICS") + "\n" + "═".repeat(80) + "\033[0m"); - - long totalTime = Math.max(1, stats.getTotalTime()); - double totalSeconds = totalTime / 1000.0; - - System.out.printf("\n\033[1;37m⏱️ Total Import Time:\033[0m \033[1;33m%s\033[0m%n%n", formatTime(stats.getTotalTime())); - - System.out.println("\033[1;37m📊 Processing Summary:\033[0m"); - System.out.println("┌─────────────────┬─────────────────┬─────────────────┐"); - System.out.println("│ \033[1mEntity Type\033[0m │ \033[1mTotal Count\033[0m │ \033[1mAvg Speed\033[0m │"); - System.out.println("├─────────────────┼─────────────────┼─────────────────┤"); - System.out.printf("│ \033[32mPBF Entities\033[0m │ %15s │ %13s/s │%n", - formatCompactNumber(stats.getEntitiesRead()), - formatCompactNumber((long)(stats.getEntitiesRead() / totalSeconds))); - System.out.printf("│ \033[37mNodes Found\033[0m │ %15s │ %13s/s │%n", - formatCompactNumber(stats.getNodesFound()), - formatCompactNumber((long)(stats.getNodesFound() / totalSeconds))); - System.out.printf("│ \033[34mNodes Cached\033[0m │ %15s │ %13s/s │%n", - formatCompactNumber(stats.getNodesCached()), - formatCompactNumber((long)(stats.getNodesCached() / totalSeconds))); - System.out.printf("│ \033[35mWays Processed\033[0m │ %15s │ %13s/s │%n", - formatCompactNumber(stats.getWaysProcessed()), - formatCompactNumber((long)(stats.getWaysProcessed() / totalSeconds))); - System.out.printf("│ \033[36mBoundaries\033[0m │ %15s │ %13s/s │%n", - formatCompactNumber(stats.getBoundariesProcessed()), - formatCompactNumber((long)(stats.getBoundariesProcessed() / totalSeconds))); - System.out.printf("│ \033[33mPOIs Created\033[0m │ %15s │ %13s/s │%n", - formatCompactNumber(stats.getPoisProcessed()), - formatCompactNumber((long)(stats.getPoisProcessed() / totalSeconds))); - System.out.println("└─────────────────┴─────────────────┴─────────────────┘"); - - long totalObjects = stats.getNodesFound() + stats.getNodesCached() + stats.getWaysProcessed() + stats.getBoundariesProcessed() + stats.getPoisProcessed(); - System.out.printf("%n\033[1;37m🚀 Overall Throughput:\033[0m \033[1;32m%s objects\033[0m processed at \033[1;33m%s objects/sec\033[0m%n", - formatCompactNumber(totalObjects), - formatCompactNumber((long)(totalObjects / totalSeconds))); - - System.out.printf("\033[1;37m💾 Database Operations:\033[0m \033[1;36m%s writes\033[0m%n", - formatCompactNumber(stats.getRocksDbWrites())); - - System.out.println("\n\033[1;37m📦 Dataset Size:\033[0m " + formatSize(stats.getDatasetBytes())); - System.out.println(" • poi_shards: " + formatSize(stats.getShardsBytes())); - System.out.println(" • boundaries: " + formatSize(stats.getBoundariesBytes())); - - System.out.println("\n\033[1;37m🧹 Temporary DBs:\033[0m " + formatSize(stats.getTmpTotalBytes())); - System.out.println(" • grid_index: " + formatSize(stats.getTmpGridBytes())); - System.out.println(" • node_cache: " + formatSize(stats.getTmpNodeBytes())); - System.out.println(" • way_index: " + formatSize(stats.getTmpWayBytes())); - System.out.println(" • needed_nodes:" + formatSize(stats.getTmpNeededBytes())); - System.out.println(" • rel_index: " + formatSize(stats.getTmpRelBytes())); - System.out.println(" • poi_index: " + formatSize(stats.getTmpPoiBytes())); - System.out.println(" • append_index: " + formatSize(stats.getTmpAppendBytes())); - System.out.println(); - } - private static class ImportStatistics { - private final AtomicLong entitiesRead = new AtomicLong(0); - private final AtomicLong nodesCached = new AtomicLong(0); - private final AtomicLong nodesFound = new AtomicLong(0); - private final AtomicLong waysProcessed = new AtomicLong(0); - private final AtomicLong relationsFound = new AtomicLong(0); - private final AtomicLong boundariesProcessed = new AtomicLong(0); - private final AtomicLong poisProcessed = new AtomicLong(0); - private final AtomicLong poiIndexRecRead = new AtomicLong(0); - private final AtomicBoolean poiIndexRecReadDone = new AtomicBoolean(false); - private final AtomicLong rocksDbWrites = new AtomicLong(0); - private final AtomicLong queueSize = new AtomicLong(0); - private final AtomicLong activeThreads = new AtomicLong(0); - private volatile String currentPhase = "Initializing"; - private volatile boolean running = true; - private final long startTime = System.currentTimeMillis(); - private volatile long phaseStartTime = System.currentTimeMillis(); - private long totalTime; - private final AtomicLong shardsCompacted = new AtomicLong(0); - private volatile long compactionStartTime = 0; - - private volatile long datasetBytes; - private volatile long shardsBytes; - private volatile long boundariesBytes; - private volatile long tmpGridBytes; - private volatile long tmpNodeBytes; - private volatile long tmpWayBytes; - private volatile long tmpNeededBytes; - private volatile long tmpRelBytes; - private volatile long tmpPoiBytes; - private volatile long tmpTotalBytes; - private volatile long tmpAppendBytes; - - public long getEntitiesRead() { return entitiesRead.get(); } - public void incrementEntitiesRead() { entitiesRead.incrementAndGet(); } - public long getNodesCached() { return nodesCached.get(); } - public void incrementNodesCached() { nodesCached.incrementAndGet(); } - public long getNodesFound() { return nodesFound.get(); } - public void incrementNodesFound() { nodesFound.incrementAndGet(); } - public long getWaysProcessed() { return waysProcessed.get(); } - public void incrementWaysProcessed() { waysProcessed.incrementAndGet(); } - public long getRelationsFound() { return relationsFound.get(); } - public void incrementRelationsFound() { relationsFound.incrementAndGet(); } - public long getBoundariesProcessed() { return boundariesProcessed.get(); } - public void incrementBoundariesProcessed() { boundariesProcessed.incrementAndGet(); } - public long getPoisProcessed() { return poisProcessed.get(); } - - public void incrementPoisProcessed(int count) { - poisProcessed.addAndGet(count); - } - public long getPoiIndexRecRead() { return poiIndexRecRead.get(); } - public void incrementPoiIndexRecRead() { poiIndexRecRead.incrementAndGet(); } - public boolean isPoiIndexRecReadDone() { return poiIndexRecReadDone.get(); } - public void setPoiIndexRecReadDone() { poiIndexRecReadDone.set(true); } - public long getRocksDbWrites() { return rocksDbWrites.get(); } - public void incrementRocksDbWrites() { rocksDbWrites.incrementAndGet(); } - public int getQueueSize() { return (int) queueSize.get(); } - public void setQueueSize(int size) { queueSize.set(size); } - public int getActiveThreads() { return (int) activeThreads.get(); } - public void incrementActiveThreads() { activeThreads.incrementAndGet(); } - public void decrementActiveThreads() { activeThreads.decrementAndGet(); } - public String getCurrentPhase() { return currentPhase; } - public void setCurrentPhase(String phase) { - this.currentPhase = phase; - this.phaseStartTime = System.currentTimeMillis(); - } - public long getPhaseStartTime() { return phaseStartTime; } - public boolean isRunning() { return running; } - public void stop() { this.running = false; } - public long getStartTime() { return startTime; } - public long getTotalTime() { return totalTime; } - public void setTotalTime(long t) { this.totalTime = t; } - public long getShardsCompacted() { return shardsCompacted.get(); } - public void incrementShardsCompacted() { shardsCompacted.incrementAndGet(); } - public void setCompactionStartTime(long t) { this.compactionStartTime = t; } - public long getCompactionStartTime() { return compactionStartTime; } - - public long getDatasetBytes() { return datasetBytes; } - public void setDatasetBytes(long v) { this.datasetBytes = v; } - public long getShardsBytes() { return shardsBytes; } - public void setShardsBytes(long v) { this.shardsBytes = v; } - public long getBoundariesBytes() { return boundariesBytes; } - public void setBoundariesBytes(long v) { this.boundariesBytes = v; } - public long getTmpGridBytes() { return tmpGridBytes; } - public void setTmpGridBytes(long v) { this.tmpGridBytes = v; } - public long getTmpNodeBytes() { return tmpNodeBytes; } - public void setTmpNodeBytes(long v) { this.tmpNodeBytes = v; } - public long getTmpWayBytes() { return tmpWayBytes; } - public void setTmpWayBytes(long v) { this.tmpWayBytes = v; } - public long getTmpNeededBytes() { return tmpNeededBytes; } - public void setTmpNeededBytes(long v) { this.tmpNeededBytes = v; } - public long getTmpRelBytes() { return tmpRelBytes; } - public void setTmpRelBytes(long v) { this.tmpRelBytes = v; } - public long getTmpPoiBytes() { return tmpPoiBytes; } - public void setTmpPoiBytes(long v) { this.tmpPoiBytes = v; } - public long getTmpAppendBytes() { return tmpAppendBytes; } - public void setTmpAppendBytes(long v) { this.tmpAppendBytes = v; } - public long getTmpTotalBytes() { return tmpTotalBytes; } - public void setTmpTotalBytes(long v) { this.tmpTotalBytes = v; } - - public String getMemoryStats() { - Runtime r = Runtime.getRuntime(); - long used = (r.totalMemory() - r.freeMemory()) / 1024 / 1024 / 1024; - long max = r.maxMemory() / 1024 / 1024 / 1024; - return String.format("%dGB/%dGB", used, max); - } - } private record PoiData(long id, double lat, double lon, String type, String subtype, List names, AddressData address, List hierarchy, @@ -1879,9 +1584,7 @@ public void close() throws Exception { try { flush(); } finally { - if (batch != null) { - batch.close(); - } + batch.close(); if (writeOptions != null) { writeOptions.close(); } diff --git a/src/main/java/com/dedicatedcode/paikka/service/importer/ImportStatistics.java b/src/main/java/com/dedicatedcode/paikka/service/importer/ImportStatistics.java new file mode 100644 index 0000000..c3b08ba --- /dev/null +++ b/src/main/java/com/dedicatedcode/paikka/service/importer/ImportStatistics.java @@ -0,0 +1,494 @@ +/* + * This file is part of paikka. + * + * Paikka is free software: you can redistribute it and/or + * modify it under the terms of the GNU Affero General Public License + * as published by the Free Software Foundation, either version 3 or + * any later version. + * + * Paikka is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied + * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + * See the GNU Affero General Public License for more details. + * You should have received a copy of the GNU Affero General Public License + * along with Paikka. If not, see . + */ + +package com.dedicatedcode.paikka.service.importer; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; + +class ImportStatistics { + private final AtomicLong entitiesRead = new AtomicLong(0); + private final AtomicLong nodesCached = new AtomicLong(0); + private final AtomicLong nodesFound = new AtomicLong(0); + private final AtomicLong waysProcessed = new AtomicLong(0); + private final AtomicLong relationsFound = new AtomicLong(0); + private final AtomicLong boundariesProcessed = new AtomicLong(0); + private final AtomicLong poisProcessed = new AtomicLong(0); + private final AtomicLong poiIndexRecRead = new AtomicLong(0); + private final AtomicBoolean poiIndexRecReadDone = new AtomicBoolean(false); + private final AtomicLong rocksDbWrites = new AtomicLong(0); + private final AtomicLong queueSize = new AtomicLong(0); + private final AtomicLong activeThreads = new AtomicLong(0); + private volatile String currentPhase = "Initializing"; + private volatile boolean running = true; + private final long startTime = System.currentTimeMillis(); + private volatile long phaseStartTime = System.currentTimeMillis(); + private long totalTime; + private final AtomicLong shardsCompacted = new AtomicLong(0); + private volatile long compactionStartTime = 0; + + private volatile long datasetBytes; + private volatile long shardsBytes; + private volatile long boundariesBytes; + private volatile long tmpGridBytes; + private volatile long tmpNodeBytes; + private volatile long tmpWayBytes; + private volatile long tmpNeededBytes; + private volatile long tmpRelBytes; + private volatile long tmpPoiBytes; + private volatile long tmpTotalBytes; + private volatile long tmpAppendBytes; + + private final int TOTAL_STEPS = 5; + private int currentStep = 0; + + public long getEntitiesRead() { + return entitiesRead.get(); + } + + public void incrementEntitiesRead() { + entitiesRead.incrementAndGet(); + } + + public long getNodesCached() { + return nodesCached.get(); + } + + public void incrementNodesCached() { + nodesCached.incrementAndGet(); + } + + public long getNodesFound() { + return nodesFound.get(); + } + + public void incrementNodesFound() { + nodesFound.incrementAndGet(); + } + + public long getWaysProcessed() { + return waysProcessed.get(); + } + + public void incrementWaysProcessed() { + waysProcessed.incrementAndGet(); + } + + public long getRelationsFound() { + return relationsFound.get(); + } + + public void incrementRelationsFound() { + relationsFound.incrementAndGet(); + } + + public long getBoundariesProcessed() { + return boundariesProcessed.get(); + } + + public void incrementBoundariesProcessed() { + boundariesProcessed.incrementAndGet(); + } + + public long getPoisProcessed() { + return poisProcessed.get(); + } + + public void incrementPoisProcessed(int count) { + poisProcessed.addAndGet(count); + } + + public long getPoiIndexRecRead() { + return poiIndexRecRead.get(); + } + + public void incrementPoiIndexRecRead() { + poiIndexRecRead.incrementAndGet(); + } + + public boolean isPoiIndexRecReadDone() { + return poiIndexRecReadDone.get(); + } + + public void setPoiIndexRecReadDone() { + poiIndexRecReadDone.set(true); + } + + public long getRocksDbWrites() { + return rocksDbWrites.get(); + } + + public void incrementRocksDbWrites() { + rocksDbWrites.incrementAndGet(); + } + + public int getQueueSize() { + return (int) queueSize.get(); + } + + public void setQueueSize(int size) { + queueSize.set(size); + } + + public int getActiveThreads() { + return (int) activeThreads.get(); + } + + public void incrementActiveThreads() { + activeThreads.incrementAndGet(); + } + + public void decrementActiveThreads() { + activeThreads.decrementAndGet(); + } + + public String getCurrentPhase() { + return currentPhase; + } + + public void setCurrentPhase(int step, String phase) { + this.currentPhase = phase; + this.phaseStartTime = System.currentTimeMillis(); + this.currentStep = step; + } + + public long getPhaseStartTime() { + return phaseStartTime; + } + + public boolean isRunning() { + return running; + } + + public void stop() { + this.running = false; + } + + public long getStartTime() { + return startTime; + } + + public long getTotalTime() { + return totalTime; + } + + public void setTotalTime(long t) { + this.totalTime = t; + } + + public long getShardsCompacted() { + return shardsCompacted.get(); + } + + public void incrementShardsCompacted() { + shardsCompacted.incrementAndGet(); + } + + public void setCompactionStartTime(long t) { + this.compactionStartTime = t; + } + + public long getCompactionStartTime() { + return compactionStartTime; + } + + public long getDatasetBytes() { + return datasetBytes; + } + + public void setDatasetBytes(long v) { + this.datasetBytes = v; + } + + public long getShardsBytes() { + return shardsBytes; + } + + public void setShardsBytes(long v) { + this.shardsBytes = v; + } + + public long getBoundariesBytes() { + return boundariesBytes; + } + + public void setBoundariesBytes(long v) { + this.boundariesBytes = v; + } + + public long getTmpGridBytes() { + return tmpGridBytes; + } + + public void setTmpGridBytes(long v) { + this.tmpGridBytes = v; + } + + public long getTmpNodeBytes() { + return tmpNodeBytes; + } + + public void setTmpNodeBytes(long v) { + this.tmpNodeBytes = v; + } + + public long getTmpWayBytes() { + return tmpWayBytes; + } + + public void setTmpWayBytes(long v) { + this.tmpWayBytes = v; + } + + public long getTmpNeededBytes() { + return tmpNeededBytes; + } + + public void setTmpNeededBytes(long v) { + this.tmpNeededBytes = v; + } + + public long getTmpRelBytes() { + return tmpRelBytes; + } + + public void setTmpRelBytes(long v) { + this.tmpRelBytes = v; + } + + public long getTmpPoiBytes() { + return tmpPoiBytes; + } + + public void setTmpPoiBytes(long v) { + this.tmpPoiBytes = v; + } + + public long getTmpAppendBytes() { + return tmpAppendBytes; + } + + public void setTmpAppendBytes(long v) { + this.tmpAppendBytes = v; + } + + public long getTmpTotalBytes() { + return tmpTotalBytes; + } + + public void setTmpTotalBytes(long v) { + this.tmpTotalBytes = v; + } + + public String getMemoryStats() { + Runtime r = Runtime.getRuntime(); + long used = (r.totalMemory() - r.freeMemory()) / 1024 / 1024 / 1024; + long max = r.maxMemory() / 1024 / 1024 / 1024; + return String.format("%dGB/%dGB", used, max); + } + + public void startProgressReporter() { + boolean isTty = System.console() != null; + + Thread.ofPlatform().daemon().start(() -> { + while (isRunning()) { + long elapsed = System.currentTimeMillis() - getStartTime(); + long phaseElapsed = System.currentTimeMillis() - getPhaseStartTime(); + double phaseSeconds = phaseElapsed / 1000.0; + + String phase = getCurrentPhase(); + StringBuilder sb = new StringBuilder(); + + if (isTty) { + sb.append("\r\033[K"); + } + + // Add step indicator + sb.append(String.format("\033[1;90m[%d/%d]\033[0m ", currentStep, TOTAL_STEPS)); + + if (phase.contains("1.1.1")) { + long pbfPerSec = phaseSeconds > 0 ? (long)(getEntitiesRead() / phaseSeconds) : 0; + sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mScanning PBF Structure\033[0m", formatTime(elapsed))); + sb.append(String.format(" │ \033[32mPBF Entities:\033[0m %s \033[33m(%s/s)\033[0m", + formatCompactNumber(getEntitiesRead()), formatCompactRate(pbfPerSec))); + sb.append(String.format(" │ \033[37mNodes Found:\033[0m %s", formatCompactNumber(getNodesFound()))); + sb.append(String.format(" │ \033[34mWays Found:\033[0m %s", formatCompactNumber(getWaysProcessed()))); + sb.append(String.format(" │ \033[35mRelations:\033[0m %s", formatCompactNumber(getRelationsFound()))); + + } else if (phase.contains("1.1.2")) { + long nodesPerSec = phaseSeconds > 0 ? (long)(getNodesCached() / phaseSeconds) : 0; + sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mCaching Node Coordinates\033[0m", formatTime(elapsed))); + sb.append(String.format(" │ \033[32mNodes Cached:\033[0m %s \033[33m(%s/s)\033[0m", + formatCompactNumber(getNodesCached()), formatCompactRate(nodesPerSec))); + sb.append(String.format(" │ \033[36mQueue:\033[0m %s", formatCompactNumber(getQueueSize()))); + sb.append(String.format(" │ \033[37mThreads:\033[0m %d", getActiveThreads())); + + } else if (phase.contains("1.2")) { + long boundsPerSec = phaseSeconds > 0 ? (long)(getBoundariesProcessed() / phaseSeconds) : 0; + sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mProcessing Admin Boundaries\033[0m", formatTime(elapsed))); + sb.append(String.format(" │ \033[32mBoundaries:\033[0m %s \033[33m(%s/s)\033[0m", + formatCompactNumber(getBoundariesProcessed()), formatCompactRate(boundsPerSec))); + sb.append(String.format(" │ \033[37mThreads:\033[0m %d", getActiveThreads())); + + } else if (phase.contains("2.1")) { + long poisPerSec = phaseSeconds > 0 ? (long)(getPoisProcessed() / phaseSeconds) : 0; + long poisReadSec = phaseSeconds > 0 ? (long)(getPoiIndexRecRead() / phaseSeconds) : 0; + sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mProcessing POIs & Sharding\033[0m", formatTime(elapsed))); + sb.append(String.format(" │ \033[32mPOI Index Rec Read:\033[0m %s \033[33m%s\033[0m", formatCompactNumber(getPoiIndexRecRead()), isPoiIndexRecReadDone() ? "(done)" : String.format("(%s/s)",formatCompactRate(poisReadSec)))); + sb.append(String.format(" │ \033[32mPOIs Processed:\033[0m %s \033[33m(%s/s)\033[0m", formatCompactNumber(getPoisProcessed()), formatCompactRate(poisPerSec))); + sb.append(String.format(" │ \033[36mQueue:\033[0m %s", formatCompactNumber(getQueueSize()))); + sb.append(String.format(" │ \033[37mThreads:\033[0m %d", getActiveThreads())); + + } else if (phase.contains("2.2")) { + long compactionElapsed = System.currentTimeMillis() - getCompactionStartTime(); + double compactionPhaseSeconds = compactionElapsed / 1000.0; + long shardsCompacted = getShardsCompacted(); + long shardsPerSec = compactionPhaseSeconds > 0 ? (long)(shardsCompacted / compactionPhaseSeconds) : 0; + sb.append(String.format("\033[1;36m[%s]\033[0m \033[1mCompacting POIs\033[0m", formatTime(elapsed))); + sb.append(String.format(" │ \033[32mShards Compacted:\033[0m %s \033[33m(%s/s)\033[0m", + formatCompactNumber(shardsCompacted), formatCompactRate(shardsPerSec))); + + } else { + sb.append(String.format("\033[1;36m[%s]\033[0m %s", formatTime(elapsed), phase)); + } + + sb.append(String.format(" │ \033[31mHeap:\033[0m %s", getMemoryStats())); + + if (isTty) { + System.out.print(sb); + System.out.flush(); + } else { + System.out.println(sb); + } + + try { + Thread.sleep(isTty ? 500 : 5000); + } catch (InterruptedException e) { + break; + } + } + if (isTty) System.out.println(); + }); + } + + public void printFinalStatistics() { + System.out.println("\n\033[1;36m" + "═".repeat(80) + "\n" + centerText("🎯 FINAL IMPORT STATISTICS") + "\n" + "═".repeat(80) + "\033[0m"); + + long totalTime = Math.max(1, getTotalTime()); + double totalSeconds = totalTime / 1000.0; + + System.out.printf("\n\033[1;37m⏱️ Total Import Time:\033[0m \033[1;33m%s\033[0m%n%n", formatTime(getTotalTime())); + + System.out.println("\033[1;37m📊 Processing Summary:\033[0m"); + System.out.println("┌─────────────────┬─────────────────┬─────────────────┐"); + System.out.println("│ \033[1mEntity Type\033[0m │ \033[1mTotal Count\033[0m │ \033[1mAvg Speed\033[0m │"); + System.out.println("├─────────────────┼─────────────────┼─────────────────┤"); + System.out.printf("│ \033[32mPBF Entities\033[0m │ %15s │ %13s/s │%n", + formatCompactNumber(getEntitiesRead()), + formatCompactNumber((long)(getEntitiesRead() / totalSeconds))); + System.out.printf("│ \033[37mNodes Found\033[0m │ %15s │ %13s/s │%n", + formatCompactNumber(getNodesFound()), + formatCompactNumber((long)(getNodesFound() / totalSeconds))); + System.out.printf("│ \033[34mNodes Cached\033[0m │ %15s │ %13s/s │%n", + formatCompactNumber(getNodesCached()), + formatCompactNumber((long)(getNodesCached() / totalSeconds))); + System.out.printf("│ \033[35mWays Processed\033[0m │ %15s │ %13s/s │%n", + formatCompactNumber(getWaysProcessed()), + formatCompactNumber((long)(getWaysProcessed() / totalSeconds))); + System.out.printf("│ \033[36mBoundaries\033[0m │ %15s │ %13s/s │%n", + formatCompactNumber(getBoundariesProcessed()), + formatCompactNumber((long)(getBoundariesProcessed() / totalSeconds))); + System.out.printf("│ \033[33mPOIs Created\033[0m │ %15s │ %13s/s │%n", + formatCompactNumber(getPoisProcessed()), + formatCompactNumber((long)(getPoisProcessed() / totalSeconds))); + System.out.println("└─────────────────┴─────────────────┴─────────────────┘"); + + long totalObjects = getNodesFound() + getNodesCached() + getWaysProcessed() + getBoundariesProcessed() + getPoisProcessed(); + System.out.printf("%n\033[1;37m🚀 Overall Throughput:\033[0m \033[1;32m%s objects\033[0m processed at \033[1;33m%s objects/sec\033[0m%n", + formatCompactNumber(totalObjects), + formatCompactNumber((long)(totalObjects / totalSeconds))); + + System.out.printf("\033[1;37m💾 Database Operations:\033[0m \033[1;36m%s writes\033[0m%n", + formatCompactNumber(getRocksDbWrites())); + + System.out.println("\n\033[1;37m📦 Dataset Size:\033[0m " + formatSize(getDatasetBytes())); + System.out.println(" • poi_shards: " + formatSize(getShardsBytes())); + System.out.println(" • boundaries: " + formatSize(getBoundariesBytes())); + + System.out.println("\n\033[1;37m🧹 Temporary DBs:\033[0m " + formatSize(getTmpTotalBytes())); + System.out.println(" • grid_index: " + formatSize(getTmpGridBytes())); + System.out.println(" • node_cache: " + formatSize(getTmpNodeBytes())); + System.out.println(" • way_index: " + formatSize(getTmpWayBytes())); + System.out.println(" • needed_nodes:" + formatSize(getTmpNeededBytes())); + System.out.println(" • rel_index: " + formatSize(getTmpRelBytes())); + System.out.println(" • poi_index: " + formatSize(getTmpPoiBytes())); + System.out.println(" • append_index: " + formatSize(getTmpAppendBytes())); + System.out.println(); + } + + + private String formatTime(long ms) { + long s = ms / 1000; + return String.format("%d:%02d:%02d", s / 3600, (s % 3600) / 60, s % 60); + } + + private String formatCompactNumber(long n) { + if (n < 1000) return String.valueOf(n); + if (n < 1_000_000) return String.format("%.2fk", n / 1000.0); + return String.format("%.3fM", n / 1_000_000.0); + } + + private String formatCompactRate(long n) { + if (n < 1000) return String.valueOf(n); + if (n < 1_000_000) return String.format("%.1fk", n / 1000.0); + return String.format("%.1fM", n / 1_000_000.0); + } + + private String formatSize(long bytes) { + if (bytes < 1024) return bytes + " B"; + double kb = bytes / 1024.0; + if (kb < 1024) return String.format("%.1f KB", kb); + double mb = kb / 1024.0; + if (mb < 1024) return String.format("%.1f MB", mb); + double gb = mb / 1024.0; + if (gb < 1024) return String.format("%.2f GB", gb); + double tb = gb / 1024.0; + return String.format("%.2f TB", tb); + } + + private String centerText(String text) { + int pad = (80 - text.length()) / 2; + return " ".repeat(Math.max(0, pad)) + text; + } + + + public void printPhaseHeader(String phase) { + System.out.println("\n\033[1;36m" + "─".repeat(80) + "\n" + phase + "\n" + "─".repeat(80) + "\033[0m"); + } + + public void printSuccess() { + System.out.println("\n\033[1;32m" + "=".repeat(80) + "\n" + centerText("IMPORT COMPLETED SUCCESSFULLY") + "\n" + "=".repeat(80) + "\033[0m"); + } + + public void printError(String message) { + System.out.println("\n\033[1;31m" + "=".repeat(80) + "\n" + centerText(message) + "\n" + "=".repeat(80) + "\033[0m"); + } + + public void printPhaseSummary(String phaseName, long phaseStartTime) { + long phaseTime = System.currentTimeMillis() - phaseStartTime; + System.out.printf("\n\u001B[1;32m✓ %s COMPLETED\u001B[0m \u001B[2m(%s)\u001B[0m%n", phaseName, formatTime(phaseTime)); + } +} diff --git a/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java b/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java index 6764e2e..63cb9b5 100644 --- a/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java +++ b/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java @@ -16,12 +16,12 @@ package com.dedicatedcode.paikka.service; +import com.dedicatedcode.paikka.service.importer.GeometrySimplificationService; import org.junit.jupiter.api.Test; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LinearRing; -import org.locationtech.jts.geom.Polygon; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; diff --git a/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java b/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java index a5a34b6..570e407 100644 --- a/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java +++ b/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java @@ -21,6 +21,8 @@ import com.dedicatedcode.paikka.flatbuffers.Name; import com.dedicatedcode.paikka.flatbuffers.POI; import com.dedicatedcode.paikka.flatbuffers.POIList; +import com.dedicatedcode.paikka.service.importer.GeometrySimplificationService; +import com.dedicatedcode.paikka.service.importer.ImportService; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -28,7 +30,6 @@ import org.rocksdb.RocksDB; import org.rocksdb.RocksDBException; -import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.file.Files; From 42749bae6ce528aaf3a10b087dd15d712aa29ef5 Mon Sep 17 00:00:00 2001 From: Daniel Graf Date: Sat, 28 Feb 2026 20:21:54 +0100 Subject: [PATCH 2/2] refactor: simplify StatsService and update configuration properties - Added in-memory stats DB option and cron-based flush scheduling. - Removed unused `clientIp` field and related logic. - Replaced custom `@SpringBootTest` annotations with `@IntegrationTest` for improved test consistency. - Consolidated stats recording and database initialization logic. - Updated test cases and introduced `StatsServiceIntegrationTest` for better coverage. --- scripts/import.sh | 3 +- .../paikka/config/SecurityConfig.java | 3 + .../paikka/config/StatsInterceptor.java | 13 +- .../paikka/controller/AdminController.java | 5 +- .../paikka/service/StatsService.java | 159 ++++++--------- .../resources/application-docker.properties | 4 +- src/main/resources/application.properties | 1 + .../dedicatedcode/paikka/IntegrationTest.java | 34 ++++ .../GeocodingControllerIntegrationTest.java | 12 +- .../GeometrySimplificationServiceTest.java | 4 +- .../paikka/service/ImportServiceTest.java | 1 + .../paikka/service/S2HelperTest.java | 5 +- .../service/StatsServiceIntegrationTest.java | 190 ++++++++++++++++++ .../resources/application-test.properties | 5 + src/test/resources/application.properties | 25 --- 15 files changed, 317 insertions(+), 147 deletions(-) create mode 100644 src/test/java/com/dedicatedcode/paikka/IntegrationTest.java create mode 100644 src/test/java/com/dedicatedcode/paikka/service/StatsServiceIntegrationTest.java create mode 100644 src/test/resources/application-test.properties delete mode 100644 src/test/resources/application.properties diff --git a/scripts/import.sh b/scripts/import.sh index a57489c..609b0db 100755 --- a/scripts/import.sh +++ b/scripts/import.sh @@ -174,8 +174,7 @@ java $JVM_ARGS \ -jar "$JAR_FILE" \ --import \ --pbf-file "$PBF_FILE" \ - --data-dir "$DATA_DIR" \ - --paikka.admin.password test + --data-dir "$DATA_DIR" EXIT_CODE=$? diff --git a/src/main/java/com/dedicatedcode/paikka/config/SecurityConfig.java b/src/main/java/com/dedicatedcode/paikka/config/SecurityConfig.java index 1f8e9eb..09f98f8 100644 --- a/src/main/java/com/dedicatedcode/paikka/config/SecurityConfig.java +++ b/src/main/java/com/dedicatedcode/paikka/config/SecurityConfig.java @@ -17,6 +17,7 @@ package com.dedicatedcode.paikka.config; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.web.builders.HttpSecurity; @@ -31,11 +32,13 @@ @Configuration @EnableWebSecurity +@ConditionalOnProperty(name = "paikka.import-mode", havingValue = "false", matchIfMissing = true) public class SecurityConfig { @Bean public SecurityFilterChain securityFilterChain(HttpSecurity http, AdminTokenFilter adminTokenFilter) throws Exception { http .authorizeHttpRequests(authorize -> authorize + .requestMatchers("/admin/**").hasRole("ADMIN") .requestMatchers("/", "/login", "/error", "/logout").permitAll() .requestMatchers("/api/**", "/css/**", "/js/**", "/images/**", "/img/**", "/fonts/**").permitAll() .anyRequest().authenticated() diff --git a/src/main/java/com/dedicatedcode/paikka/config/StatsInterceptor.java b/src/main/java/com/dedicatedcode/paikka/config/StatsInterceptor.java index 60c3248..6161155 100644 --- a/src/main/java/com/dedicatedcode/paikka/config/StatsInterceptor.java +++ b/src/main/java/com/dedicatedcode/paikka/config/StatsInterceptor.java @@ -43,7 +43,6 @@ public StatsInterceptor(StatsService statsService) { @Override public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) { - // Only track API endpoints if (request.getRequestURI().startsWith("/api/v1/")) { request.setAttribute("startTime", System.currentTimeMillis()); } @@ -53,8 +52,7 @@ public boolean preHandle(HttpServletRequest request, HttpServletResponse respons @Override public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) { - - // Only track API endpoints and successful requests + if (!request.getRequestURI().startsWith("/api/v1/") || request.getAttribute("startTime") == null || response.getStatus() >= 400) { @@ -63,17 +61,15 @@ public void afterCompletion(HttpServletRequest request, HttpServletResponse resp try { long responseTime = System.currentTimeMillis() - (Long) request.getAttribute("startTime"); - - // Extract and sort parameters + Map sortedParams = request.getParameterMap().entrySet().stream() .collect(Collectors.toMap( - entry -> entry.getKey(), + Map.Entry::getKey, entry -> String.join(",", entry.getValue()), (e1, e2) -> e1, TreeMap::new )); - - // Extract result count from response header (set by controllers) + int resultCount = 0; String resultCountHeader = response.getHeader("X-Result-Count"); if (resultCountHeader != null) { @@ -91,7 +87,6 @@ public void afterCompletion(HttpServletRequest request, HttpServletResponse resp sortedParams, responseTime, resultCount, - clientIp, response.getStatus() ); diff --git a/src/main/java/com/dedicatedcode/paikka/controller/AdminController.java b/src/main/java/com/dedicatedcode/paikka/controller/AdminController.java index ab7ff60..5d8bf47 100644 --- a/src/main/java/com/dedicatedcode/paikka/controller/AdminController.java +++ b/src/main/java/com/dedicatedcode/paikka/controller/AdminController.java @@ -29,11 +29,12 @@ import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.RestController; import java.util.HashMap; import java.util.Map; -@Controller +@RestController @RequestMapping("/admin") @ConditionalOnProperty(name = "paikka.import-mode", havingValue = "false", matchIfMissing = true) public class AdminController { @@ -51,8 +52,6 @@ public AdminController(ReverseGeocodingService reverseGeocodingService, Boundary } @PostMapping(value = "/refresh-db", produces = "application/json") - @PreAuthorize("hasRole('ADMIN')") - @ResponseBody public ResponseEntity refreshDatabase() { logger.info("Database refresh requested"); diff --git a/src/main/java/com/dedicatedcode/paikka/service/StatsService.java b/src/main/java/com/dedicatedcode/paikka/service/StatsService.java index 8a09c76..1d390c4 100644 --- a/src/main/java/com/dedicatedcode/paikka/service/StatsService.java +++ b/src/main/java/com/dedicatedcode/paikka/service/StatsService.java @@ -79,14 +79,14 @@ public void cleanup() { } private void initializeDatabase() throws SQLException { - // Ensure directory exists - File dbFile = new File(config.getStatsDbPath()); - dbFile.getParentFile().mkdirs(); - - // Connect to SQLite database - String url = "jdbc:sqlite:" + config.getStatsDbPath(); - connection = DriverManager.getConnection(url); - + if (config.getStatsDbPath().equals("memory")) { + connection = DriverManager.getConnection("jdbc:sqlite::memory:"); + } else { + File dbFile = new File(config.getStatsDbPath()); + dbFile.getParentFile().mkdirs(); + String url = "jdbc:sqlite:" + config.getStatsDbPath(); + connection = DriverManager.getConnection(url); + } // Create table if not exists String createTableSQL = """ CREATE TABLE IF NOT EXISTS query_stats ( @@ -96,7 +96,6 @@ endpoint VARCHAR(100), parameters TEXT, response_time_ms INTEGER, result_count INTEGER, - client_ip VARCHAR(45), date_only DATE, hour_bucket INTEGER, status_code INTEGER @@ -139,8 +138,8 @@ CREATE TABLE IF NOT EXISTS location_stats ( } @Async - public void recordQuery(String endpoint, Map sortedParams, - long responseTimeMs, int resultCount, String clientIp, int statusCode) { + public void recordQuery(String endpoint, Map sortedParams, + long responseTimeMs, int resultCount, int statusCode) { try { String parametersJson = objectMapper.writeValueAsString(sortedParams); LocalDateTime now = LocalDateTime.now(); @@ -150,7 +149,6 @@ public void recordQuery(String endpoint, Map sortedParams, parametersJson, responseTimeMs, resultCount, - clientIp, now.toLocalDate(), now.getHour(), statusCode @@ -158,7 +156,6 @@ public void recordQuery(String endpoint, Map sortedParams, pendingStats.offer(record); - // Record location if this is a reverse geocoding query if ("/api/v1/reverse".equals(endpoint) && sortedParams.containsKey("lat") && sortedParams.containsKey("lon")) { try { double lat = Double.parseDouble(sortedParams.get("lat")); @@ -198,48 +195,50 @@ ON CONFLICT(rounded_lat, rounded_lon) logger.error("Failed to record location stats", e); } } - - @Scheduled(fixedDelay = 10000) // Every 10 seconds + + @Scheduled(cron = "${paikka.stats-db.flush}") // Every 10 seconds public void flushPendingStats() { if (pendingStats.isEmpty() || connection == null) { return; } - - List batch = new ArrayList<>(); - StatsRecord record; - while ((record = pendingStats.poll()) != null && batch.size() < 1000) { - batch.add(record); - } - - if (batch.isEmpty()) { - return; - } - - String insertSQL = """ - INSERT INTO query_stats (endpoint, parameters, response_time_ms, result_count, - client_ip, date_only, hour_bucket, status_code) - VALUES (?, ?, ?, ?, ?, ?, ?, ?) - """; - - try (PreparedStatement pstmt = connection.prepareStatement(insertSQL)) { - for (StatsRecord statsRecord : batch) { - pstmt.setString(1, statsRecord.endpoint); - pstmt.setString(2, statsRecord.parametersJson); - pstmt.setLong(3, statsRecord.responseTimeMs); - pstmt.setInt(4, statsRecord.resultCount); - pstmt.setString(5, statsRecord.clientIp); - pstmt.setString(6, statsRecord.dateOnly.toString()); - pstmt.setInt(7, statsRecord.hourBucket); - pstmt.setInt(8, statsRecord.statusCode); - pstmt.addBatch(); + synchronized (pendingStats) { + + List batch = new ArrayList<>(); + StatsRecord record; + while ((record = pendingStats.poll()) != null && batch.size() < 1000) { + batch.add(record); + } + + if (batch.isEmpty()) { + return; + } + + String insertSQL = """ + INSERT INTO query_stats (endpoint, parameters, response_time_ms, result_count, + date_only, hour_bucket, status_code) + VALUES (?, ?, ?, ?, ?, ?, ?) + """; + + try (PreparedStatement pstmt = connection.prepareStatement(insertSQL)) { + for (StatsRecord statsRecord : batch) { + pstmt.setString(1, statsRecord.endpoint); + pstmt.setString(2, statsRecord.parametersJson); + pstmt.setLong(3, statsRecord.responseTimeMs); + pstmt.setInt(4, statsRecord.resultCount); + pstmt.setString(5, statsRecord.dateOnly.toString()); + pstmt.setInt(6, statsRecord.hourBucket); + pstmt.setInt(7, statsRecord.statusCode); + pstmt.addBatch(); + } + pstmt.executeBatch(); + logger.debug("Flushed {} stats records to database", batch.size()); + } catch (SQLException e) { + logger.error("Failed to flush stats to database", e); + // Re-add failed records to queue + batch.forEach(pendingStats::offer); } - pstmt.executeBatch(); - logger.debug("Flushed {} stats records to database", batch.size()); - } catch (SQLException e) { - logger.error("Failed to flush stats to database", e); - // Re-add failed records to queue - batch.forEach(pendingStats::offer); } + } public List getDailyStats(LocalDate startDate, LocalDate endDate, String endpoint) { @@ -355,11 +354,9 @@ public List getAvailableEndpoints() { public List getLocationStats() { String sql = """ - SELECT rounded_lat, rounded_lon, query_count, last_queried + SELECT rounded_lat, rounded_lon, query_count, last_queried FROM location_stats - WHERE query_count >= 5 - ORDER BY query_count DESC - LIMIT 1000 + WHERE query_count >= 5 ORDER BY query_count DESC """; List results = new ArrayList<>(); @@ -408,46 +405,24 @@ public void cleanupOldStats() { logger.error("Failed to cleanup old location stats", e); } } - - public static class LocationStatsResponse { - private final double lat; - private final double lon; - private final int queryCount; - private final String lastQueried; - - public LocationStatsResponse(double lat, double lon, int queryCount, String lastQueried) { - this.lat = lat; - this.lon = lon; - this.queryCount = queryCount; - this.lastQueried = lastQueried; + + void clearDatabase() { + try (PreparedStatement pstmt = connection.prepareStatement("DELETE FROM location_stats")) { + pstmt.executeUpdate(); + } catch (SQLException e) { + logger.error("Failed to cleanup old location stats", e); } - - public double getLat() { return lat; } - public double getLon() { return lon; } - public int getQueryCount() { return queryCount; } - public String getLastQueried() { return lastQueried; } - } - - private static class StatsRecord { - final String endpoint; - final String parametersJson; - final long responseTimeMs; - final int resultCount; - final String clientIp; - final LocalDate dateOnly; - final int hourBucket; - final int statusCode; - - StatsRecord(String endpoint, String parametersJson, long responseTimeMs, - int resultCount, String clientIp, LocalDate dateOnly, int hourBucket, int statusCode) { - this.endpoint = endpoint; - this.parametersJson = parametersJson; - this.responseTimeMs = responseTimeMs; - this.resultCount = resultCount; - this.clientIp = clientIp; - this.dateOnly = dateOnly; - this.hourBucket = hourBucket; - this.statusCode = statusCode; + try (PreparedStatement pstmt = connection.prepareStatement("DELETE FROM query_stats")) { + pstmt.executeUpdate(); + } catch (SQLException e) { + logger.error("Failed to cleanup old location stats", e); } } + + public record LocationStatsResponse(double lat, double lon, int queryCount, String lastQueried) { + } + + private record StatsRecord(String endpoint, String parametersJson, long responseTimeMs, int resultCount, + LocalDate dateOnly, int hourBucket, int statusCode) { + } } diff --git a/src/main/resources/application-docker.properties b/src/main/resources/application-docker.properties index 144ff1e..44dfbd0 100644 --- a/src/main/resources/application-docker.properties +++ b/src/main/resources/application-docker.properties @@ -13,12 +13,12 @@ spring.web.resources.chain.strategy.content.enabled=true spring.web.resources.chain.strategy.content.paths=/css/**,/js/**,/img/**,/fonts/** # Paikka configuration - all configurable via environment variables -paikka.data-dir=${DATA_DIR:./data} +paikka.data-dir=${DATA_DIR:/data} paikka.import-configuration.threads=${MAX_IMPORT_THREADS:10} paikka.query.max-results=${MAX_RESULTS:500} paikka.query.default-results=${DEFAULT_RESULTS:10} paikka.query.base-url=${BASE_URL:http://localhost:8080} -paikka.stats-db-path=${STATS_DB_PATH:./data/stats.db} +paikka.stats-db-path=${STATS_DB_PATH:/data/stats.db} paikka.admin.password=${ADMIN_PASSWORD:} # Logging configuration diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index c5339ae..a8f3a67 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -21,6 +21,7 @@ paikka.query.default-results=10 paikka.query.base-url=http://localhost:8080 paikka.stats-db-path=./data/stats.db +paikka.stats-db.flush=0/10 * * * * * logging.level.com.dedicatedcode.paikka=INFO logging.level.root=WARN diff --git a/src/test/java/com/dedicatedcode/paikka/IntegrationTest.java b/src/test/java/com/dedicatedcode/paikka/IntegrationTest.java new file mode 100644 index 0000000..9380f5c --- /dev/null +++ b/src/test/java/com/dedicatedcode/paikka/IntegrationTest.java @@ -0,0 +1,34 @@ +/* + * This file is part of paikka. + * + * Paikka is free software: you can redistribute it and/or + * modify it under the terms of the GNU Affero General Public License + * as published by the Free Software Foundation, either version 3 or + * any later version. + * + * Paikka is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied + * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + * See the GNU Affero General Public License for more details. + * You should have received a copy of the GNU Affero General Public License + * along with Paikka. If not, see . + */ + +package com.dedicatedcode.paikka; + + +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@Retention(RetentionPolicy.RUNTIME) +@SpringBootTest +@ActiveProfiles("test") +@AutoConfigureMockMvc +@SpringJUnitConfig +public @interface IntegrationTest { +} diff --git a/src/test/java/com/dedicatedcode/paikka/controller/GeocodingControllerIntegrationTest.java b/src/test/java/com/dedicatedcode/paikka/controller/GeocodingControllerIntegrationTest.java index 724fc6a..70916fa 100644 --- a/src/test/java/com/dedicatedcode/paikka/controller/GeocodingControllerIntegrationTest.java +++ b/src/test/java/com/dedicatedcode/paikka/controller/GeocodingControllerIntegrationTest.java @@ -16,6 +16,7 @@ package com.dedicatedcode.paikka.controller; +import com.dedicatedcode.paikka.IntegrationTest; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -42,8 +43,7 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; -@SpringBootTest -@AutoConfigureMockMvc +@IntegrationTest class GeocodingControllerIntegrationTest { @Autowired @@ -77,8 +77,7 @@ void setupDataAndRefresh() throws Exception { System.out.println("Extracted data-monaco.zip to: " + dataDirectory.toAbsolutePath()); // Perform admin refresh using MockMvc - mockMvc.perform(post("/admin/refresh-db") - .with(user("admin")) + mockMvc.perform(post("/admin/refresh-db").header("X-Admin-Token", "test") .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(jsonPath("$.success").value(true)) @@ -109,11 +108,6 @@ private static void extractZip(Path zipFilePath, Path destinationDir) throws IOE } } - @Test - void contextLoads() { - assertThat(mockMvc).isNotNull(); - } - @Test void testHealthEndpoint() throws Exception { mockMvc.perform(get("/api/v1/health")) diff --git a/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java b/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java index 63cb9b5..89edffb 100644 --- a/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java +++ b/src/test/java/com/dedicatedcode/paikka/service/GeometrySimplificationServiceTest.java @@ -16,6 +16,7 @@ package com.dedicatedcode.paikka.service; +import com.dedicatedcode.paikka.IntegrationTest; import com.dedicatedcode.paikka.service.importer.GeometrySimplificationService; import org.junit.jupiter.api.Test; import org.locationtech.jts.geom.Coordinate; @@ -27,8 +28,7 @@ import static org.junit.jupiter.api.Assertions.*; -@SpringBootTest -@SpringJUnitConfig +@IntegrationTest class GeometrySimplificationServiceTest { private final GeometrySimplificationService service = new GeometrySimplificationService(); diff --git a/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java b/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java index 570e407..3d701af 100644 --- a/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java +++ b/src/test/java/com/dedicatedcode/paikka/service/ImportServiceTest.java @@ -16,6 +16,7 @@ package com.dedicatedcode.paikka.service; +import com.dedicatedcode.paikka.IntegrationTest; import com.dedicatedcode.paikka.config.PaikkaConfiguration; import com.dedicatedcode.paikka.flatbuffers.Address; import com.dedicatedcode.paikka.flatbuffers.Name; diff --git a/src/test/java/com/dedicatedcode/paikka/service/S2HelperTest.java b/src/test/java/com/dedicatedcode/paikka/service/S2HelperTest.java index e7ec74e..11a2d94 100644 --- a/src/test/java/com/dedicatedcode/paikka/service/S2HelperTest.java +++ b/src/test/java/com/dedicatedcode/paikka/service/S2HelperTest.java @@ -16,14 +16,14 @@ package com.dedicatedcode.paikka.service; +import com.dedicatedcode.paikka.IntegrationTest; import org.junit.jupiter.api.Test; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import static org.junit.jupiter.api.Assertions.*; -@SpringBootTest -@SpringJUnitConfig +@IntegrationTest class S2HelperTest { private final S2Helper s2Helper = new S2Helper(); @@ -39,7 +39,6 @@ void testGetShardId() { // Should return a valid S2 cell ID assertNotEquals(0, shardId); - // Same coordinates should return same shard ID long shardId2 = s2Helper.getShardId(lat, lon); assertEquals(shardId, shardId2); } diff --git a/src/test/java/com/dedicatedcode/paikka/service/StatsServiceIntegrationTest.java b/src/test/java/com/dedicatedcode/paikka/service/StatsServiceIntegrationTest.java new file mode 100644 index 0000000..a4d4458 --- /dev/null +++ b/src/test/java/com/dedicatedcode/paikka/service/StatsServiceIntegrationTest.java @@ -0,0 +1,190 @@ +/* + * This file is part of paikka. + * + * Paikka is free software: you can redistribute it and/or + * modify it under the terms of the GNU Affero General Public License + * as published by the Free Software Foundation, either version 3 or + * any later version. + * + * Paikka is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied + * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + * See the GNU Affero General Public License for more details. + * You should have received a copy of the GNU Affero General Public License + * along with Paikka. If not, see . + */ + +package com.dedicatedcode.paikka.service; + +import com.dedicatedcode.paikka.IntegrationTest; +import com.dedicatedcode.paikka.dto.StatsAggregationResponse; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.time.LocalDate; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + +@IntegrationTest +class StatsServiceIntegrationTest { + + @Autowired + private StatsService statsService; + + @BeforeEach + void setUp() { + statsService.clearDatabase(); + } + @Test + void testRecordQueryAndRetrieveDailyStats() { + // Record some queries + statsService.recordQuery("/api/v1/search", Map.of("q", "Berlin"), 150L, 10, 200); + statsService.recordQuery("/api/v1/search", Map.of("q", "Munich"), 200L, 5, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "52.5", "lon", "13.4"), 100L, 3, 200); + + // Wait for async processing and flush + await().atMost(15, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .until(() -> { + statsService.flushPendingStats(); + List stats = statsService.getDailyStats( + LocalDate.now().minusDays(1), LocalDate.now(), null); + return stats.stream().mapToLong(StatsAggregationResponse::getQueryCount).sum() >= 3; + }); + + // Verify daily stats + List dailyStats = statsService.getDailyStats( + LocalDate.now().minusDays(1), LocalDate.now(), null); + + assertThat(dailyStats).isNotEmpty(); + StatsAggregationResponse todayStats = dailyStats.stream() + .filter(s -> s.getDate().equals(LocalDate.now())) + .findFirst() + .orElse(null); + assertThat(todayStats).isNotNull(); + assertThat(todayStats.getQueryCount()).isEqualTo(3); + assertThat(todayStats.getAvgResponseTime()).isBetween(100.0, 200.0); + } + + @Test + void testRecordQueryWithErrorStatus() { + statsService.recordQuery("/api/v1/search", Map.of("q", "test"), 50L, 0, 404); + + await().atMost(15, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .until(() -> { + statsService.flushPendingStats(); + List stats = statsService.getDailyStats( + LocalDate.now().minusDays(1), LocalDate.now(), null); + return stats.stream().anyMatch(s -> s.getErrorCount() > 0); + }); + + List dailyStats = statsService.getDailyStats( + LocalDate.now().minusDays(1), LocalDate.now(), null); + + assertThat(dailyStats).isNotEmpty(); + assertThat(dailyStats.get(0).getErrorCount()).isGreaterThan(0); + } + + @Test + void testGetHourlyStats() { + statsService.recordQuery("/api/v1/search", Map.of("q", "test"), 100L, 5, 200); + + await().atMost(15, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .until(() -> { + statsService.flushPendingStats(); + List stats = statsService.getHourlyStats( + LocalDate.now().minusDays(1), LocalDate.now(), null); + return !stats.isEmpty(); + }); + + List hourlyStats = statsService.getHourlyStats( + LocalDate.now().minusDays(1), LocalDate.now(), null); + + assertThat(hourlyStats).isNotEmpty(); + assertThat(hourlyStats.get(0).getHour()).isBetween(0, 23); + } + + @Test + void testGetAvailableEndpoints() { + statsService.recordQuery("/api/v1/search", Map.of("q", "test"), 100L, 5, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "0", "lon", "0"), 50L, 2, 200); + + await().atMost(15, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .until(() -> { + statsService.flushPendingStats(); + return !statsService.getAvailableEndpoints().isEmpty(); + }); + + List endpoints = statsService.getAvailableEndpoints(); + + assertThat(endpoints).contains("/api/v1/search", "/api/v1/reverse"); + } + + @Test + void testLocationStatsRecording() { + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "52.5", "lon", "13.4"), 100L, 3, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "52.5", "lon", "13.4"), 150L, 5, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "52.5", "lon", "13.4"), 150L, 5, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "52.5", "lon", "13.4"), 150L, 5, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "52.5", "lon", "13.4"), 150L, 5, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "52.5", "lon", "13.4"), 150L, 5, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "48.5", "lon", "9.5"), 80L, 2, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "48.5", "lon", "9.5"), 80L, 2, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "48.5", "lon", "9.5"), 80L, 2, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "48.5", "lon", "9.5"), 80L, 2, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "48.5", "lon", "9.5"), 80L, 2, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "48.5", "lon", "9.5"), 80L, 2, 200); + + await().atMost(15, TimeUnit.SECONDS).until(() -> { + statsService.flushPendingStats(); + return !statsService.getLocationStats().isEmpty(); + }); + List locationStats = statsService.getLocationStats(); + + assertThat(locationStats).isNotEmpty(); + // The coordinates are rounded to 0.5 degrees + StatsService.LocationStatsResponse location52 = locationStats.stream() + .filter(l -> Math.abs(l.lat() - 52.5) < 0.25) + .findFirst() + .orElse(null); + assertThat(location52).isNotNull(); + + assertThat(location52.queryCount()).isEqualTo(6); + } + + @Test + void testStatsAggregationWithEndpointFilter() { + statsService.recordQuery("/api/v1/search", Map.of("q", "test1"), 100L, 5, 200); + statsService.recordQuery("/api/v1/search", Map.of("q", "test2"), 150L, 10, 200); + statsService.recordQuery("/api/v1/reverse", Map.of("lat", "0", "lon", "0"), 50L, 2, 200); + + await().atMost(15, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .until(() -> { + statsService.flushPendingStats(); + List stats = statsService.getDailyStats( + LocalDate.now().minusDays(1), LocalDate.now(), "/api/v1/search"); + return stats.stream().mapToLong(StatsAggregationResponse::getQueryCount).sum() >= 2; + }); + + List searchStats = statsService.getDailyStats( + LocalDate.now().minusDays(1), LocalDate.now(), "/api/v1/search"); + + assertThat(searchStats).isNotEmpty(); + assertThat(searchStats.get(0).getQueryCount()).isEqualTo(2); + + List reverseStats = statsService.getDailyStats( + LocalDate.now().minusDays(1), LocalDate.now(), "/api/v1/reverse"); + + assertThat(reverseStats).isNotEmpty(); + assertThat(reverseStats.get(0).getQueryCount()).isEqualTo(1); + } +} \ No newline at end of file diff --git a/src/test/resources/application-test.properties b/src/test/resources/application-test.properties new file mode 100644 index 0000000..7f048ee --- /dev/null +++ b/src/test/resources/application-test.properties @@ -0,0 +1,5 @@ +paikka.data-dir=${java.io.tmpdir}/paikka-test-data +"paikka.stats-db-path=memory +paikka.query.base-url=http://localhost:8080 +paikka.admin.password=test +paikka.stats-db.flush=- diff --git a/src/test/resources/application.properties b/src/test/resources/application.properties deleted file mode 100644 index fdf178c..0000000 --- a/src/test/resources/application.properties +++ /dev/null @@ -1,25 +0,0 @@ -server.port=8080 - -server.compression.enabled=true -# Minimum response size to trigger compression -server.compression.min-response-size=1024 -# Content types to compress -server.compression.mime-types=text/plain,application/json - -# Static resource caching - only for actual static resources, not HTML templates -spring.web.resources.cache.cachecontrol.max-age=31536000 -spring.web.resources.cache.cachecontrol.cache-public=true -spring.web.resources.chain.strategy.content.enabled=true -spring.web.resources.chain.strategy.content.paths=/css/**,/js/**,/img/**,/fonts/** - -paikka.data-dir=${java.io.tmpdir}/paikka-test-data -paikka.import-configuration.threads=10 -paikka.import-configuration.s2-level=14 -paikka.import-configuration.chunk-size=500000 -paikka.query.max-results=500 -paikka.query.default-results=10 -paikka.query.base-url=http://localhost:8080 -paikka.admin.password=test - -logging.level.com.dedicatedcode.paikka=INFO -logging.level.root=WARN