Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ The [OpenStreetMap website](https://www.openstreetmap.org/export/) allows you to

| Dataset | original | filtered... | time taken | reduction | during import | imported | time taken | reduction |
|------------|----------|-------------|------------|-----------|---------------|----------|------------|-----------|
| Planet | 86 GB | 33 GB | 40 min | ~60% | ~ 31 GB | 8.15 GB | ~ 16 h | ~90% |
| Planet | 86 GB | 34 GB | 40 min | ~60% | ~ 31 GB | 8.15 GB | ~ 16 h | ~90% |
| Germany | 4.4 GB | 1.8 GB | 2 min | ~59% | ~ 14.4 GB | 3,81 GB | ~ 18 min | ~13% |
| Netherland | 1.4 GB | 394 MB | 30 s | ~70% | ~ 2,69 GB | 705,7 MB | ~ 2 min | ~50% |

Expand Down
3 changes: 1 addition & 2 deletions scripts/import.sh
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,7 @@ java $JVM_ARGS \
-jar "$JAR_FILE" \
--import \
--pbf-file "$PBF_FILE" \
--data-dir "$DATA_DIR" \
--paikka.admin.password test
--data-dir "$DATA_DIR"

EXIT_CODE=$?

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,10 @@

package com.dedicatedcode.paikka;

import com.dedicatedcode.paikka.service.ImportService;
import com.dedicatedcode.paikka.service.importer.ImportService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package com.dedicatedcode.paikka.config;

import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
Expand All @@ -31,11 +32,13 @@

@Configuration
@EnableWebSecurity
@ConditionalOnProperty(name = "paikka.import-mode", havingValue = "false", matchIfMissing = true)
public class SecurityConfig {
@Bean
public SecurityFilterChain securityFilterChain(HttpSecurity http, AdminTokenFilter adminTokenFilter) throws Exception {
http
.authorizeHttpRequests(authorize -> authorize
.requestMatchers("/admin/**").hasRole("ADMIN")
.requestMatchers("/", "/login", "/error", "/logout").permitAll()
.requestMatchers("/api/**", "/css/**", "/js/**", "/images/**", "/img/**", "/fonts/**").permitAll()
.anyRequest().authenticated()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ public StatsInterceptor(StatsService statsService) {

@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) {
// Only track API endpoints
if (request.getRequestURI().startsWith("/api/v1/")) {
request.setAttribute("startTime", System.currentTimeMillis());
}
Expand All @@ -53,8 +52,7 @@ public boolean preHandle(HttpServletRequest request, HttpServletResponse respons
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response,
Object handler, Exception ex) {

// Only track API endpoints and successful requests

if (!request.getRequestURI().startsWith("/api/v1/") ||
request.getAttribute("startTime") == null ||
response.getStatus() >= 400) {
Expand All @@ -63,17 +61,15 @@ public void afterCompletion(HttpServletRequest request, HttpServletResponse resp

try {
long responseTime = System.currentTimeMillis() - (Long) request.getAttribute("startTime");

// Extract and sort parameters

Map<String, String> sortedParams = request.getParameterMap().entrySet().stream()
.collect(Collectors.toMap(
entry -> entry.getKey(),
Map.Entry::getKey,
entry -> String.join(",", entry.getValue()),
(e1, e2) -> e1,
TreeMap::new
));

// Extract result count from response header (set by controllers)

int resultCount = 0;
String resultCountHeader = response.getHeader("X-Result-Count");
if (resultCountHeader != null) {
Expand All @@ -91,7 +87,6 @@ public void afterCompletion(HttpServletRequest request, HttpServletResponse resp
sortedParams,
responseTime,
resultCount,
clientIp,
response.getStatus()
);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,12 @@
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;

import java.util.HashMap;
import java.util.Map;

@Controller
@RestController
@RequestMapping("/admin")
@ConditionalOnProperty(name = "paikka.import-mode", havingValue = "false", matchIfMissing = true)
public class AdminController {
Expand All @@ -51,8 +52,6 @@ public AdminController(ReverseGeocodingService reverseGeocodingService, Boundary
}

@PostMapping(value = "/refresh-db", produces = "application/json")
@PreAuthorize("hasRole('ADMIN')")
@ResponseBody
public ResponseEntity<?> refreshDatabase() {
logger.info("Database refresh requested");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

import com.fasterxml.jackson.annotation.JsonProperty;

record PaikkaMetadata(
public record PaikkaMetadata(
@JsonProperty("importTimestamp") String importTimestamp,
@JsonProperty("dataVersion") String dataVersion,
@JsonProperty("file") String file,
Expand Down
159 changes: 67 additions & 92 deletions src/main/java/com/dedicatedcode/paikka/service/StatsService.java
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,14 @@ public void cleanup() {
}

private void initializeDatabase() throws SQLException {
// Ensure directory exists
File dbFile = new File(config.getStatsDbPath());
dbFile.getParentFile().mkdirs();

// Connect to SQLite database
String url = "jdbc:sqlite:" + config.getStatsDbPath();
connection = DriverManager.getConnection(url);

if (config.getStatsDbPath().equals("memory")) {
connection = DriverManager.getConnection("jdbc:sqlite::memory:");
} else {
File dbFile = new File(config.getStatsDbPath());
dbFile.getParentFile().mkdirs();
String url = "jdbc:sqlite:" + config.getStatsDbPath();
connection = DriverManager.getConnection(url);
}
// Create table if not exists
String createTableSQL = """
CREATE TABLE IF NOT EXISTS query_stats (
Expand All @@ -96,7 +96,6 @@ endpoint VARCHAR(100),
parameters TEXT,
response_time_ms INTEGER,
result_count INTEGER,
client_ip VARCHAR(45),
date_only DATE,
hour_bucket INTEGER,
status_code INTEGER
Expand Down Expand Up @@ -139,8 +138,8 @@ CREATE TABLE IF NOT EXISTS location_stats (
}

@Async
public void recordQuery(String endpoint, Map<String, String> sortedParams,
long responseTimeMs, int resultCount, String clientIp, int statusCode) {
public void recordQuery(String endpoint, Map<String, String> sortedParams,
long responseTimeMs, int resultCount, int statusCode) {
try {
String parametersJson = objectMapper.writeValueAsString(sortedParams);
LocalDateTime now = LocalDateTime.now();
Expand All @@ -150,15 +149,13 @@ public void recordQuery(String endpoint, Map<String, String> sortedParams,
parametersJson,
responseTimeMs,
resultCount,
clientIp,
now.toLocalDate(),
now.getHour(),
statusCode
);

pendingStats.offer(record);

// Record location if this is a reverse geocoding query
if ("/api/v1/reverse".equals(endpoint) && sortedParams.containsKey("lat") && sortedParams.containsKey("lon")) {
try {
double lat = Double.parseDouble(sortedParams.get("lat"));
Expand Down Expand Up @@ -198,48 +195,50 @@ ON CONFLICT(rounded_lat, rounded_lon)
logger.error("Failed to record location stats", e);
}
}
@Scheduled(fixedDelay = 10000) // Every 10 seconds

@Scheduled(cron = "${paikka.stats-db.flush}") // Every 10 seconds
public void flushPendingStats() {
if (pendingStats.isEmpty() || connection == null) {
return;
}

List<StatsRecord> batch = new ArrayList<>();
StatsRecord record;
while ((record = pendingStats.poll()) != null && batch.size() < 1000) {
batch.add(record);
}

if (batch.isEmpty()) {
return;
}

String insertSQL = """
INSERT INTO query_stats (endpoint, parameters, response_time_ms, result_count,
client_ip, date_only, hour_bucket, status_code)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""";

try (PreparedStatement pstmt = connection.prepareStatement(insertSQL)) {
for (StatsRecord statsRecord : batch) {
pstmt.setString(1, statsRecord.endpoint);
pstmt.setString(2, statsRecord.parametersJson);
pstmt.setLong(3, statsRecord.responseTimeMs);
pstmt.setInt(4, statsRecord.resultCount);
pstmt.setString(5, statsRecord.clientIp);
pstmt.setString(6, statsRecord.dateOnly.toString());
pstmt.setInt(7, statsRecord.hourBucket);
pstmt.setInt(8, statsRecord.statusCode);
pstmt.addBatch();
synchronized (pendingStats) {

List<StatsRecord> batch = new ArrayList<>();
StatsRecord record;
while ((record = pendingStats.poll()) != null && batch.size() < 1000) {
batch.add(record);
}

if (batch.isEmpty()) {
return;
}

String insertSQL = """
INSERT INTO query_stats (endpoint, parameters, response_time_ms, result_count,
date_only, hour_bucket, status_code)
VALUES (?, ?, ?, ?, ?, ?, ?)
""";

try (PreparedStatement pstmt = connection.prepareStatement(insertSQL)) {
for (StatsRecord statsRecord : batch) {
pstmt.setString(1, statsRecord.endpoint);
pstmt.setString(2, statsRecord.parametersJson);
pstmt.setLong(3, statsRecord.responseTimeMs);
pstmt.setInt(4, statsRecord.resultCount);
pstmt.setString(5, statsRecord.dateOnly.toString());
pstmt.setInt(6, statsRecord.hourBucket);
pstmt.setInt(7, statsRecord.statusCode);
pstmt.addBatch();
}
pstmt.executeBatch();
logger.debug("Flushed {} stats records to database", batch.size());
} catch (SQLException e) {
logger.error("Failed to flush stats to database", e);
// Re-add failed records to queue
batch.forEach(pendingStats::offer);
}
pstmt.executeBatch();
logger.debug("Flushed {} stats records to database", batch.size());
} catch (SQLException e) {
logger.error("Failed to flush stats to database", e);
// Re-add failed records to queue
batch.forEach(pendingStats::offer);
}

}

public List<StatsAggregationResponse> getDailyStats(LocalDate startDate, LocalDate endDate, String endpoint) {
Expand Down Expand Up @@ -355,11 +354,9 @@ public List<String> getAvailableEndpoints() {

public List<LocationStatsResponse> getLocationStats() {
String sql = """
SELECT rounded_lat, rounded_lon, query_count, last_queried
SELECT rounded_lat, rounded_lon, query_count, last_queried
FROM location_stats
WHERE query_count >= 5
ORDER BY query_count DESC
LIMIT 1000
WHERE query_count >= 5 ORDER BY query_count DESC
""";

List<LocationStatsResponse> results = new ArrayList<>();
Expand Down Expand Up @@ -408,46 +405,24 @@ public void cleanupOldStats() {
logger.error("Failed to cleanup old location stats", e);
}
}

public static class LocationStatsResponse {
private final double lat;
private final double lon;
private final int queryCount;
private final String lastQueried;

public LocationStatsResponse(double lat, double lon, int queryCount, String lastQueried) {
this.lat = lat;
this.lon = lon;
this.queryCount = queryCount;
this.lastQueried = lastQueried;

void clearDatabase() {
try (PreparedStatement pstmt = connection.prepareStatement("DELETE FROM location_stats")) {
pstmt.executeUpdate();
} catch (SQLException e) {
logger.error("Failed to cleanup old location stats", e);
}

public double getLat() { return lat; }
public double getLon() { return lon; }
public int getQueryCount() { return queryCount; }
public String getLastQueried() { return lastQueried; }
}

private static class StatsRecord {
final String endpoint;
final String parametersJson;
final long responseTimeMs;
final int resultCount;
final String clientIp;
final LocalDate dateOnly;
final int hourBucket;
final int statusCode;

StatsRecord(String endpoint, String parametersJson, long responseTimeMs,
int resultCount, String clientIp, LocalDate dateOnly, int hourBucket, int statusCode) {
this.endpoint = endpoint;
this.parametersJson = parametersJson;
this.responseTimeMs = responseTimeMs;
this.resultCount = resultCount;
this.clientIp = clientIp;
this.dateOnly = dateOnly;
this.hourBucket = hourBucket;
this.statusCode = statusCode;
try (PreparedStatement pstmt = connection.prepareStatement("DELETE FROM query_stats")) {
pstmt.executeUpdate();
} catch (SQLException e) {
logger.error("Failed to cleanup old location stats", e);
}
}

public record LocationStatsResponse(double lat, double lon, int queryCount, String lastQueried) {
}

private record StatsRecord(String endpoint, String parametersJson, long responseTimeMs, int resultCount,
LocalDate dateOnly, int hourBucket, int statusCode) {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
* along with Paikka. If not, see <https://www.gnu.org/licenses/>.
*/

package com.dedicatedcode.paikka.service;
package com.dedicatedcode.paikka.service.importer;

import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.simplify.DouglasPeuckerSimplifier;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@
* along with Paikka. If not, see <https://www.gnu.org/licenses/>.
*/

package com.dedicatedcode.paikka.service;
package com.dedicatedcode.paikka.service.importer;

import com.dedicatedcode.paikka.flatbuffers.Boundary;
import com.dedicatedcode.paikka.service.S2Helper;
import com.github.benmanes.caffeine.cache.Cache;
import org.locationtech.jts.algorithm.locate.IndexedPointInAreaLocator;
import org.locationtech.jts.geom.Coordinate;
Expand Down
Loading
Loading