#
# A fatal error has been detected by the Java Runtime Environment:
#
# SIGSEGV (0xb) at pc=0x000000013dec82fc, pid=49078, tid=35331
#
# JRE version: OpenJDK Runtime Environment Corretto-21.0.4.7.1 (21.0.4+7) (build 21.0.4+7-LTS)
# Java VM: OpenJDK 64-Bit Server VM Corretto-21.0.4.7.1 (21.0.4+7-LTS, mixed mode, sharing, tiered, compressed oops, compressed class ptrs, g1 gc, bsd-aarch64)
# Problematic frame:
# C [libduckdb_java8757326263049628978.so+0xdc82fc] duckdb::StandardBufferManager::Pin(duckdb::shared_ptr<duckdb::BlockHandle, true>&)+0x4c
#
package org.bjenuhb.learn.postgresduckdb;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import lombok.SneakyThrows;
import org.junit.jupiter.api.Test;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Random;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
public class ConcurrencyTestV2 {
AtomicLong writeCount = new AtomicLong(0);
@Test
@SneakyThrows
void concurrentReadWrite() {
HikariDataSource dataSource = createDataSource();
int numShards = 3;
int numRows = 10000000;
int numTreads = 10;
setupShards(dataSource, numShards, numRows);
concurrentWrite(dataSource, numShards, numTreads, numRows);
monitor();
TimeUnit.SECONDS.sleep(1000);
}
void monitor() {
new Thread(() -> {
while (true) {
try {
Thread.sleep(1000);
System.out.println("Write count: " + writeCount.get());
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
}
void concurrentWrite(HikariDataSource dataSource,
int numShards,
int numThreads,
int numRows) {
AtomicInteger atomicInteger = new AtomicInteger(0);
var executorService = Executors.newFixedThreadPool(numThreads);
Random random = new Random();
for (int i = 0; i < numThreads; i++) {
executorService.submit(() -> {
while (true) {
try (Connection connection = dataSource.getConnection()) {
int shardId = random.nextInt(numShards) % numShards;
executeQuery(connection, "use shard" + shardId);
int rowId = getNext(atomicInteger, numRows);
executeQuery(connection, "update test set amount = amount + 1 where id = " + rowId);
connection.commit();
writeCount.incrementAndGet();
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
}
int getNext(AtomicInteger integer, int max) {
if (integer.get() >= max) {
synchronized (integer) {
if (integer.get() >= max) {
integer.set(0);
}
return integer.incrementAndGet();
}
}
return integer.incrementAndGet();
}
HikariDataSource createDataSource() {
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setJdbcUrl("jdbc:duckdb:test.db");
hikariConfig.setMaximumPoolSize(10);
hikariConfig.setMaximumPoolSize(100);
hikariConfig.setAutoCommit(false);
return new HikariDataSource(hikariConfig);
}
@SneakyThrows
void setupShards(HikariDataSource dataSource,
int numShards,
int numRows) {
Connection connection = dataSource.getConnection();
for (int i = 0; i < numShards; i++) {
executeQuery(connection, "attach database 'shard" + i + ".db' as shard" + i);
executeQuery(connection, "use shard" + i);
executeQuery(connection, "create or replace table test (id bigint primary key, amount int, description varchar)");
executeQuery(connection, "insert into test SELECT range id, cast(random() * 100000 as bigint) as amount, repeat('x', 10) as description FROM range(" + numRows + ");");
connection.commit();
}
connection.close();
}
@SneakyThrows
void executeQuery(Connection connection, String query) {
try (PreparedStatement statement = connection.prepareStatement(query)) {
statement.execute();
}
}
}
What happens?
Seeing SIGSEGV with concurrent Write
To Reproduce
OS:
OSX
DuckDB Version:
1.2.1
DuckDB Client:
Java
Hardware:
Mac M1
Full Name:
Basanth Jenu H B
Affiliation:
Intuit
What is the latest build you tested with? If possible, we recommend testing with the latest nightly build.
I have tested with a stable release
Did you include all relevant data sets for reproducing the issue?
Yes
Did you include all code required to reproduce the issue?
Did you include all relevant configuration (e.g., CPU architecture, Python version, Linux distribution) to reproduce the issue?