4 Commits

Author SHA1 Message Date
eaa413e200 Objects: interfacify MaybeTombstone Data 2025-04-19 17:25:06 +02:00
f3e4d99fcb Objects: seal JDataVersionedWrapper 2025-04-19 12:07:36 +02:00
1c71b26ed8 Objects: 1 less field in JDataVersionedWrapperLazy 2025-04-19 12:06:33 +02:00
e6f95ef028 Remove supportlib
nice idea, but ram usage explosion seems to cancel out the benefits
2025-04-19 11:32:35 +02:00
56 changed files with 88 additions and 933 deletions

View File

@@ -89,102 +89,6 @@ jobs:
name: Webui
path: webui/dist
build-native-libs:
strategy:
matrix:
include:
- os: ubuntu-latest
cross: "linux/amd64"
- os: ubuntu-latest
cross: "linux/arm64"
- os: macos-latest
runs-on: ${{ matrix.os }}
env:
DO_LOCAL_BUILD: ${{ matrix.os == 'macos-latest' }}
DOCKER_PLATFORM: ${{ matrix.cross || 'NATIVE' }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set SANITIZED_DOCKER_PLATFORM
run: echo "SANITIZED_DOCKER_PLATFORM=$(echo $DOCKER_PLATFORM | tr / _ )" >> $GITHUB_ENV
- name: Set DOCKER_BUILDER_IMAGE
run: echo "DOCKER_BUILDER_IMAGE=dhfs_lib_builder-${{matrix.os}}-$SANITIZED_DOCKER_PLATFORM" >> $GITHUB_ENV
- name: Build config
run: |
echo DO_LOCAL_BUILD: $DO_LOCAL_BUILD
echo DOCKER_PLATFORM: $DOCKER_PLATFORM
echo SANITIZED_DOCKER_PLATFORM: $SANITIZED_DOCKER_PLATFORM
echo DOCKER_BUILDER_IMAGE: $DOCKER_BUILDER_IMAGE
- name: Set up JDK 21
if: ${{ env.DO_LOCAL_BUILD == 'TRUE' }}
uses: actions/setup-java@v4
with:
java-version: "21"
distribution: "zulu"
cache: maven
- name: Set up Docker Buildx
if: ${{ env.DO_LOCAL_BUILD != 'TRUE' }}
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
if: ${{ env.DO_LOCAL_BUILD != 'TRUE' }}
uses: docker/setup-qemu-action@v3
- name: Build Docker builder image
if: ${{ env.DO_LOCAL_BUILD != 'TRUE' }}
uses: docker/build-push-action@v5
with:
context: ./libdhfs_support/builder
file: ./libdhfs_support/builder/Dockerfile
push: false
platforms: ${{ env.DOCKER_PLATFORM }}
tags: ${{ env.DOCKER_BUILDER_IMAGE }}
cache-from: type=gha,scope=build-${{ env.DOCKER_BUILDER_IMAGE }}
cache-to: type=gha,mode=max,scope=build-${{ env.DOCKER_BUILDER_IMAGE }}
load: true
- name: Build the library
run: |
CMAKE_ARGS="-DCMAKE_BUILD_TYPE=Release" libdhfs_support/builder/cross-build.sh both build "$(pwd)/result"
- name: Upload build
uses: actions/upload-artifact@v4
with:
name: NativeLib-${{ matrix.os }}-${{ env.SANITIZED_DOCKER_PLATFORM }}
path: result
merge-native-libs:
runs-on: ubuntu-latest
needs: [build-native-libs]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: downloaded-libs
- name: Merge all
run: rsync -av downloaded-libs/NativeLib*/* result/
- name: Check that libs exists
run: |
test -f "result/Linux-x86_64/libdhfs_support.so" || exit 1
- name: Upload
uses: actions/upload-artifact@v4
with:
name: NativeLibs
path: result
publish-docker:
runs-on: ubuntu-latest
permissions:
@@ -194,7 +98,7 @@ jobs:
# with sigstore/fulcio when running outside of PRs.
id-token: write
needs: [build-webui, merge-native-libs, build-dhfs]
needs: [build-webui, build-dhfs]
steps:
- name: Checkout repository
@@ -212,12 +116,6 @@ jobs:
name: Webui
path: webui-dist-downloaded
- name: Download native libs
uses: actions/download-artifact@v4
with:
name: NativeLibs
path: dhfs-native-downloaded
- name: Show all the files
run: find .
@@ -293,7 +191,7 @@ jobs:
# with sigstore/fulcio when running outside of PRs.
id-token: write
needs: [build-webui, merge-native-libs, build-dhfs]
needs: [build-webui, build-dhfs]
steps:
- name: Checkout repository
@@ -309,11 +207,6 @@ jobs:
name: Webui
path: webui-dist-downloaded
- uses: actions/download-artifact@v4
with:
name: NativeLibs
path: dhfs-native-downloaded
- name: Show all the files
run: find .
@@ -326,9 +219,6 @@ jobs:
- name: Copy Webui
run: cp -r ./webui-dist-downloaded "run-wrapper-out/dhfs/app/Webui"
- name: Copy Webui
run: cp -r ./dhfs-native-downloaded "run-wrapper-out/dhfs/app/NativeLibs"
- name: Copy run wrapper
run: cp -r ./run-wrapper/* "run-wrapper-out/dhfs/app/"

View File

@@ -9,8 +9,6 @@ COPY ./dhfs-package-downloaded/*.jar .
COPY ./dhfs-package-downloaded/app .
COPY ./dhfs-package-downloaded/quarkus .
WORKDIR /usr/src/app/native-libs
COPY ./dhfs-native-downloaded/. .
WORKDIR /usr/src/app/webui
COPY ./webui-dist-downloaded/. .

View File

@@ -2,7 +2,7 @@
<configuration default="false" name="Main 2" type="QsApplicationConfigurationType" factoryName="QuarkusApplication">
<option name="MAIN_CLASS_NAME" value="com.usatiuk.dhfs.app.Main" />
<module name="dhfs-app" />
<option name="VM_PARAMETERS" value="-XX:+UnlockDiagnosticVMOptions -XX:+DebugNonSafepoints --add-exports java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-exports java.base/jdk.internal.access=ALL-UNNAMED -ea -Dcom.usatiuk.dhfs.supportlib.native-path=$ProjectFileDir$/target/classes/native -Xmx2G -Ddhfs.webui.root=$ProjectFileDir$/../webui/dist -Ddhfs.fuse.root=${HOME}/dhfs_test/2/fuse -Ddhfs.objects.persistence.files.root=${HOME}/dhfs_test/2/data -Ddhfs.objects.persistence.stuff.root=${HOME}/dhfs_test/2/data/stuff -Ddhfs.objects.peerdiscovery.broadcast=false -Dquarkus.http.port=9020 -Dquarkus.http.ssl-port=9021 -Ddhfs.peerdiscovery.preset-uuid=22000000-0000-0000-0000-000000000000 -Ddhfs.peerdiscovery.static-peers=11000000-0000-0000-0000-000000000000:127.0.0.1:9010:9011" />
<option name="VM_PARAMETERS" value="-XX:+UnlockDiagnosticVMOptions -XX:+DebugNonSafepoints --add-exports java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-exports java.base/jdk.internal.access=ALL-UNNAMED -ea -Xmx2G -Ddhfs.webui.root=$ProjectFileDir$/../webui/dist -Ddhfs.fuse.root=${HOME}/dhfs_test/2/fuse -Ddhfs.objects.persistence.files.root=${HOME}/dhfs_test/2/data -Ddhfs.objects.persistence.stuff.root=${HOME}/dhfs_test/2/data/stuff -Ddhfs.objects.peerdiscovery.broadcast=false -Dquarkus.http.port=9020 -Dquarkus.http.ssl-port=9021 -Ddhfs.peerdiscovery.preset-uuid=22000000-0000-0000-0000-000000000000 -Ddhfs.peerdiscovery.static-peers=11000000-0000-0000-0000-000000000000:127.0.0.1:8080:9011" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.usatiuk.dhfs.*" />

View File

@@ -2,7 +2,7 @@
<configuration default="false" name="Main" type="QsApplicationConfigurationType" factoryName="QuarkusApplication" nameIsGenerated="true">
<option name="MAIN_CLASS_NAME" value="com.usatiuk.dhfs.app.Main" />
<module name="dhfs-app" />
<option name="VM_PARAMETERS" value="-XX:+UnlockDiagnosticVMOptions -XX:+DebugNonSafepoints --add-exports java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-exports java.base/jdk.internal.access=ALL-UNNAMED -ea -Dcom.usatiuk.dhfs.supportlib.native-path=$ProjectFileDir$/target/classes/native -Xmx2G -Ddhfs.webui.root=$ProjectFileDir$/../webui/dist -Ddhfs.fuse.root=${HOME}/dhfs_test/1/fuse -Ddhfs.objects.persistence.files.root=${HOME}/dhfs_test/1/data -Ddhfs.objects.persistence.stuff.root=${HOME}/dhfs_test/1/data/stuff -Ddhfs.objects.peerdiscovery.broadcast=true -Dquarkus.http.port=8080 -Dquarkus.http.ssl-port=9011 -Ddhfs.peerdiscovery.preset-uuid=11000000-0000-0000-0000-000000000000 -Ddhfs.peerdiscovery.static-peers=22000000-0000-0000-0000-000000000000:127.0.0.1:9020:9021 -Dquarkus.http.host=0.0.0.0" />
<option name="VM_PARAMETERS" value="-XX:+UnlockDiagnosticVMOptions --enable-preview -XX:+UseParallelGC -XX:+DebugNonSafepoints --add-exports java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-exports java.base/jdk.internal.access=ALL-UNNAMED -ea -Xmx2G -Ddhfs.webui.root=$ProjectFileDir$/../webui/dist -Ddhfs.fuse.root=${HOME}/dhfs_test/1/fuse -Ddhfs.objects.persistence.files.root=${HOME}/dhfs_test/1/data -Ddhfs.objects.persistence.stuff.root=${HOME}/dhfs_test/1/data/stuff -Ddhfs.objects.peerdiscovery.broadcast=true -Dquarkus.http.port=8080 -Dquarkus.http.ssl-port=9011 -Ddhfs.peerdiscovery.preset-uuid=11000000-0000-0000-0000-000000000000 -Ddhfs.peerdiscovery.static-peers=22000000-0000-0000-0000-000000000000:127.0.0.1:9020:9021 -Dquarkus.http.host=0.0.0.0" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.usatiuk.dhfs.*" />

View File

@@ -127,11 +127,6 @@
<artifactId>kleppmanntree</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>supportlib</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>objects</artifactId>

View File

@@ -79,7 +79,6 @@ public class DhfsImage implements Future<String> {
"-Ddhfs.objects.sync.timeout=10",
"-Ddhfs.objects.sync.ping.timeout=5",
"-Ddhfs.objects.reconnect_interval=1s",
"-Dcom.usatiuk.dhfs.supportlib.native-path=/libs",
"-Dquarkus.log.category.\"com.usatiuk\".level=TRACE",
"-Dquarkus.log.category.\"com.usatiuk.dhfs\".level=TRACE",
"-Ddhfs.objects.periodic-push-op-interval=5s",

View File

@@ -5,7 +5,6 @@ dhfs.objects.ref_verification=true
dhfs.objects.deletion.delay=0
quarkus.log.category."com.usatiuk.dhfs".level=TRACE
quarkus.log.category."com.usatiuk.dhfs".min-level=TRACE
quarkus.class-loading.parent-first-artifacts=com.usatiuk.dhfs:supportlib
quarkus.http.test-port=0
quarkus.http.test-ssl-port=0
dhfs.local-discovery=false

View File

@@ -127,11 +127,6 @@
<artifactId>kleppmanntree</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>supportlib</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>objects</artifactId>

View File

@@ -5,7 +5,6 @@ dhfs.objects.ref_verification=true
dhfs.objects.deletion.delay=0
quarkus.log.category."com.usatiuk.dhfs".level=TRACE
quarkus.log.category."com.usatiuk.dhfs".min-level=TRACE
quarkus.class-loading.parent-first-artifacts=com.usatiuk.dhfs:supportlib
quarkus.http.test-port=0
quarkus.http.test-ssl-port=0
dhfs.local-discovery=false

View File

@@ -127,11 +127,6 @@
<artifactId>kleppmanntree</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>supportlib</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>objects</artifactId>

View File

@@ -5,7 +5,6 @@ import com.sun.security.auth.module.UnixSystem;
import com.usatiuk.dhfs.files.service.DhfsFileService;
import com.usatiuk.dhfs.files.service.DirectoryNotEmptyException;
import com.usatiuk.dhfs.files.service.GetattrRes;
import com.usatiuk.dhfs.supportlib.UninitializedByteBuffer;
import com.usatiuk.kleppmanntree.AlreadyExistsException;
import com.usatiuk.objects.JObjectKey;
import io.grpc.Status;
@@ -28,6 +27,7 @@ import ru.serce.jnrfuse.struct.FuseFileInfo;
import ru.serce.jnrfuse.struct.Statvfs;
import ru.serce.jnrfuse.struct.Timespec;
import java.nio.ByteBuffer;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Optional;
@@ -237,7 +237,7 @@ public class DhfsFuse extends FuseStubFS {
if (offset < 0) return -ErrorCodes.EINVAL();
try {
var fileKey = getFromHandle(fi.fh.get());
var buffer = UninitializedByteBuffer.allocateUninitialized((int) size);
var buffer = ByteBuffer.allocateDirect((int) size);
if (buffer.isDirect()) {
jnrPtrByteOutputAccessors.getUnsafe().copyMemory(

View File

@@ -5,7 +5,6 @@ dhfs.objects.ref_verification=true
dhfs.objects.deletion.delay=0
quarkus.log.category."com.usatiuk.dhfs".level=TRACE
quarkus.log.category."com.usatiuk.dhfs".min-level=TRACE
quarkus.class-loading.parent-first-artifacts=com.usatiuk.dhfs:supportlib
quarkus.http.test-port=0
quarkus.http.test-ssl-port=0
dhfs.local-discovery=false

View File

@@ -59,11 +59,6 @@
<artifactId>utils</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>supportlib</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-junit5-mockito</artifactId>

View File

@@ -1,6 +1,13 @@
package com.usatiuk.objects;
public interface JDataVersionedWrapper {
import com.usatiuk.objects.iterators.Data;
public sealed interface JDataVersionedWrapper extends Data<JDataVersionedWrapper> permits JDataVersionedWrapperLazy, JDataVersionedWrapperImpl {
@Override
default JDataVersionedWrapper value() {
return this;
}
JData data();
long version();

View File

@@ -2,12 +2,11 @@ package com.usatiuk.objects;
import java.util.function.Supplier;
public class JDataVersionedWrapperLazy implements JDataVersionedWrapper {
public final class JDataVersionedWrapperLazy implements JDataVersionedWrapper {
private JData _data;
private final long _version;
private final int _estimatedSize;
private Supplier<JData> _producer;
private Runnable _cacheCallback;
private JData _data;
public JDataVersionedWrapperLazy(long version, int estimatedSize, Supplier<JData> producer) {
_version = version;
@@ -19,8 +18,12 @@ public class JDataVersionedWrapperLazy implements JDataVersionedWrapper {
if (_data != null) {
throw new IllegalStateException("Cache callback can be set only before data is loaded");
}
_cacheCallback = cacheCallback;
var oldProducer = _producer;
_producer = () -> {
var ret = oldProducer.get();
cacheCallback.run();
return ret;
};
}
public JData data() {
@@ -32,10 +35,6 @@ public class JDataVersionedWrapperLazy implements JDataVersionedWrapper {
return _data;
_data = _producer.get();
if (_cacheCallback != null) {
_cacheCallback.run();
_cacheCallback = null;
}
_producer = null;
return _data;
}

View File

@@ -1,7 +1,5 @@
package com.usatiuk.objects;
import com.usatiuk.dhfs.supportlib.UninitializedByteBuffer;
import java.io.Serial;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
@@ -48,7 +46,7 @@ public final class JObjectKeyImpl implements JObjectKey {
synchronized (this) {
if (_bb != null) return _bb;
var bytes = value.getBytes(StandardCharsets.ISO_8859_1);
var directBb = UninitializedByteBuffer.allocateUninitialized(bytes.length);
var directBb = ByteBuffer.allocateDirect(bytes.length);
directBb.put(bytes);
directBb.flip();
_bb = directBb;

View File

@@ -2,9 +2,6 @@ package com.usatiuk.objects.iterators;
import java.util.Optional;
public record Data<V>(V value) implements MaybeTombstone<V> {
@Override
public Optional<V> opt() {
return Optional.of(value);
}
public interface Data<V> extends MaybeTombstone<V> {
V value();
}

View File

@@ -0,0 +1,6 @@
package com.usatiuk.objects.iterators;
import java.util.Optional;
public record DataWrapper<V>(V value) implements Data<V> {
}

View File

@@ -3,5 +3,4 @@ package com.usatiuk.objects.iterators;
import java.util.Optional;
public interface MaybeTombstone<T> {
Optional<T> opt();
}

View File

@@ -9,22 +9,22 @@ public class NavigableMapKvIterator<K extends Comparable<K>, V> extends Reversib
private Iterator<Map.Entry<K, V>> _iterator;
private Map.Entry<K, V> _next;
public NavigableMapKvIterator(NavigableMap<K, V> map, IteratorStart start, K key) {
_map = map;
public NavigableMapKvIterator(NavigableMap<K, ? extends V> map, IteratorStart start, K key) {
_map = (NavigableMap<K, V>) map;
SortedMap<K, V> _view;
_goingForward = true;
switch (start) {
case GE -> _view = map.tailMap(key, true);
case GT -> _view = map.tailMap(key, false);
case GE -> _view = _map.tailMap(key, true);
case GT -> _view = _map.tailMap(key, false);
case LE -> {
var floorKey = map.floorKey(key);
var floorKey = _map.floorKey(key);
if (floorKey == null) _view = _map;
else _view = map.tailMap(floorKey, true);
else _view = _map.tailMap(floorKey, true);
}
case LT -> {
var lowerKey = map.lowerKey(key);
if (lowerKey == null) _view = _map;
else _view = map.tailMap(lowerKey, true);
else _view = _map.tailMap(lowerKey, true);
}
default -> throw new IllegalArgumentException("Unknown start type");
}

View File

@@ -2,9 +2,5 @@ package com.usatiuk.objects.iterators;
import java.util.Optional;
public record Tombstone<V>() implements MaybeTombstone<V> {
@Override
public Optional<V> opt() {
return Optional.empty();
}
public interface Tombstone<V> extends MaybeTombstone<V> {
}

View File

@@ -0,0 +1,4 @@
package com.usatiuk.objects.iterators;
public record TombstoneImpl<V>() implements Tombstone<V> {
}

View File

@@ -16,7 +16,7 @@ public class TombstoneMergingKvIterator<K extends Comparable<K>, V> implements C
startType, startKey,
pair -> {
Log.tracev("{0} - Processing pair {1}", _name, pair);
if (pair instanceof Tombstone) {
if (pair instanceof Tombstone<V>) {
return null;
}
return ((Data<V>) pair).value();

View File

@@ -34,10 +34,9 @@ public class CachingObjectPersistentStore {
long version,
int sizeLimit) {
public Cache withPut(JObjectKey key, Optional<JDataVersionedWrapper> obj) {
int objSize = obj.map(JDataVersionedWrapper::estimateSize).orElse(16);
var entry = obj.<CacheEntry>map(o -> new CacheEntryPresent(o, o.estimateSize())).orElse(new CacheEntryMiss());
int newSize = size() + objSize;
var entry = new CacheEntry(obj.<MaybeTombstone<JDataVersionedWrapper>>map(Data::new).orElse(new Tombstone<>()), objSize);
int newSize = size() + entry.size();
var old = map.get(key);
if (old != null)
@@ -188,15 +187,8 @@ public class CachingObjectPersistentStore {
@Override
public CloseableKvIterator<JObjectKey, JDataVersionedWrapper> getIterator(IteratorStart start, JObjectKey key) {
return new TombstoneMergingKvIterator<>("cache", start, key,
(mS, mK)
-> new MappingKvIterator<>(
new NavigableMapKvIterator<>(_curCache.map(), mS, mK),
e -> {
// Log.tracev("Taken from cache: {0}", e);
return e.object();
}
),
(mS, mK) -> new MappingKvIterator<>(new CachingKvIterator(_backing.getIterator(start, key)), Data::new));
(mS, mK) -> new NavigableMapKvIterator<JObjectKey, MaybeTombstone<JDataVersionedWrapper>>(_curCache.map(), mS, mK),
(mS, mK) -> new CachingKvIterator(_backing.getIterator(start, key)));
}
@Nonnull
@@ -204,12 +196,12 @@ public class CachingObjectPersistentStore {
public Optional<JDataVersionedWrapper> readObject(JObjectKey name) {
var cached = _curCache.map().get(name);
if (cached != null) {
return switch (cached.object()) {
case Data<JDataVersionedWrapper> data -> Optional.of(data.value());
case Tombstone<JDataVersionedWrapper> tombstone -> {
return switch (cached) {
case CacheEntryPresent data -> Optional.of(data.value());
case CacheEntryMiss tombstone -> {
yield Optional.empty();
}
default -> throw new IllegalStateException("Unexpected value: " + cached.object());
default -> throw new IllegalStateException("Unexpected value: " + cached);
};
}
var read = _backing.readObject(name);
@@ -228,7 +220,7 @@ public class CachingObjectPersistentStore {
_backing.close();
}
private class CachingKvIterator implements CloseableKvIterator<JObjectKey, JDataVersionedWrapper> {
private class CachingKvIterator implements CloseableKvIterator<JObjectKey, MaybeTombstone<JDataVersionedWrapper>> {
private final CloseableKvIterator<JObjectKey, JDataVersionedWrapper> _delegate;
private CachingKvIterator(CloseableKvIterator<JObjectKey, JDataVersionedWrapper> delegate) {
@@ -261,10 +253,10 @@ public class CachingObjectPersistentStore {
}
@Override
public Pair<JObjectKey, JDataVersionedWrapper> prev() {
public Pair<JObjectKey, MaybeTombstone<JDataVersionedWrapper>> prev() {
var prev = _delegate.prev();
maybeCache(prev.getKey(), Optional.of(prev.getValue()));
return prev;
return (Pair<JObjectKey, MaybeTombstone<JDataVersionedWrapper>>) (Pair<JObjectKey, ?>) prev;
}
@Override
@@ -278,10 +270,10 @@ public class CachingObjectPersistentStore {
}
@Override
public Pair<JObjectKey, JDataVersionedWrapper> next() {
public Pair<JObjectKey, MaybeTombstone<JDataVersionedWrapper>> next() {
var next = _delegate.next();
maybeCache(next.getKey(), Optional.of(next.getValue()));
return next;
return (Pair<JObjectKey, MaybeTombstone<JDataVersionedWrapper>>) (Pair<JObjectKey, ?>) next;
}
}
};
@@ -294,6 +286,18 @@ public class CachingObjectPersistentStore {
}
}
private record CacheEntry(MaybeTombstone<JDataVersionedWrapper> object, int size) {
private interface CacheEntry extends MaybeTombstone<JDataVersionedWrapper> {
int size();
}
private record CacheEntryPresent(JDataVersionedWrapper value,
int size) implements CacheEntry, Data<JDataVersionedWrapper> {
}
private record CacheEntryMiss() implements CacheEntry, Tombstone<JDataVersionedWrapper> {
@Override
public int size() {
return 64;
}
}
}

View File

@@ -1,6 +1,9 @@
package com.usatiuk.objects.stores;
import com.usatiuk.objects.JDataVersionedWrapper;
import com.usatiuk.objects.JObjectKey;
import com.usatiuk.objects.iterators.Tombstone;
public record PendingDelete(JObjectKey key, long bundleId) implements PendingWriteEntry {
public record PendingDelete(JObjectKey key,
long bundleId) implements PendingWriteEntry, Tombstone<JDataVersionedWrapper> {
}

View File

@@ -1,6 +1,7 @@
package com.usatiuk.objects.stores;
import com.usatiuk.objects.JDataVersionedWrapper;
import com.usatiuk.objects.iterators.Data;
public record PendingWrite(JDataVersionedWrapper data, long bundleId) implements PendingWriteEntry {
public record PendingWrite(JDataVersionedWrapper value, long bundleId) implements PendingWriteEntry, Data<JDataVersionedWrapper> {
}

View File

@@ -1,5 +1,8 @@
package com.usatiuk.objects.stores;
public interface PendingWriteEntry {
import com.usatiuk.objects.JDataVersionedWrapper;
import com.usatiuk.objects.iterators.MaybeTombstone;
public interface PendingWriteEntry extends MaybeTombstone<JDataVersionedWrapper> {
long bundleId();
}

View File

@@ -350,15 +350,9 @@ public class WritebackObjectPersistentStore {
@Override
public CloseableKvIterator<JObjectKey, JDataVersionedWrapper> getIterator(IteratorStart start, JObjectKey key) {
return new TombstoneMergingKvIterator<>("writeback-ps", start, key,
(tS, tK) -> new MappingKvIterator<>(
new NavigableMapKvIterator<>(_pendingWrites, tS, tK),
e -> switch (e) {
case PendingWrite pw -> new Data<>(pw.data());
case PendingDelete d -> new Tombstone<>();
default -> throw new IllegalStateException("Unexpected value: " + e);
}),
(tS, tK) -> new MappingKvIterator<>(_cache.getIterator(tS, tK), Data::new));
return new TombstoneMergingKvIterator<JObjectKey, JDataVersionedWrapper>("writeback-ps", start, key,
(tS, tK) -> new NavigableMapKvIterator<>(_pendingWrites, tS, tK),
(tS, tK) -> (CloseableKvIterator<JObjectKey, MaybeTombstone<JDataVersionedWrapper>>) (CloseableKvIterator<JObjectKey, ?>) _cache.getIterator(tS, tK));
}
@Nonnull
@@ -367,7 +361,7 @@ public class WritebackObjectPersistentStore {
var cached = _pendingWrites.get(name);
if (cached != null) {
return switch (cached) {
case PendingWrite c -> Optional.of(c.data());
case PendingWrite c -> Optional.of(c.value());
case PendingDelete d -> {
yield Optional.empty();
}

View File

@@ -21,10 +21,6 @@ import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
// Manages all access to com.usatiuk.objects.JData objects.
// In particular, it serves as a source of truth for what is committed to the backing storage.
// All data goes through it, it is responsible for transaction atomicity
// TODO: persistent tx id
@ApplicationScoped
public class JObjectManager {
private final List<PreCommitTxHook> _preCommitTxHooks;
@@ -219,7 +215,7 @@ public class JObjectManager {
// TODO: Every write gets a dependency due to hooks
continue;
// assert false;
// throw new TxCommitException("Serialization hazard: " + dep.isEmpty() + " vs " + read.getValue().data().isEmpty());
// throw new TxCommitException("Serialization hazard: " + dep.isEmpty() + " vs " + read.getValue().value().isEmpty());
}
if (current.get().version() > snapshotId) {
@@ -270,31 +266,4 @@ public class JObjectManager {
});
tx.close();
}
// private class TransactionObjectSourceImpl implements TransactionObjectSource {
// private final long _txId;
//
// private TransactionObjectSourceImpl(long txId) {
// _txId = txId;
// }
//
// @Override
// public <T extends JData> TransactionObject<T> get(Class<T> type, JObjectKey key) {
// var got = getObj(type, key);
// if (got.data().isPresent() && got.data().get().version() > _txId) {
// throw new TxCommitException("Serialization race for " + key + ": " + got.data().get().version() + " vs " + _txId);
// }
// return got;
// }
//
// @Override
// public <T extends JData> TransactionObject<T> getWriteLocked(Class<T> type, JObjectKey key) {
// var got = getObjLock(type, key);
// if (got.data().isPresent() && got.data().get().version() > _txId) {
// got.lock().close();
// throw new TxCommitException("Serialization race for " + key + ": " + got.data().get().version() + " vs " + _txId);
// }
// return got;
// }
// }
}

View File

@@ -7,7 +7,6 @@ import com.usatiuk.objects.iterators.*;
import com.usatiuk.objects.snapshot.Snapshot;
import com.usatiuk.objects.snapshot.SnapshotManager;
import io.quarkus.logging.Log;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import org.apache.commons.lang3.tuple.Pair;
@@ -166,12 +165,12 @@ public class TransactionFactoryImpl implements TransactionFactory {
(tS, tK) -> new MappingKvIterator<>(new NavigableMapKvIterator<>(_writes, tS, tK),
t -> switch (t) {
case TxRecord.TxObjectRecordWrite<?> write ->
new Data<>(new ReadTrackingInternalCrapTx(write.data()));
case TxRecord.TxObjectRecordDeleted deleted -> new Tombstone<>();
new DataWrapper<>(new ReadTrackingInternalCrapTx(write.data()));
case TxRecord.TxObjectRecordDeleted deleted -> new TombstoneImpl<>();
case null, default -> null;
}),
(tS, tK) -> new MappingKvIterator<>(_snapshot.getIterator(tS, tK),
d -> new Data<ReadTrackingInternalCrap>(new ReadTrackingInternalCrapSource(d)))));
d -> new DataWrapper<ReadTrackingInternalCrap>(new ReadTrackingInternalCrapSource(d)))));
}
@Override

View File

@@ -16,7 +16,6 @@
<module>dhfs-fuse</module>
<module>dhfs-app</module>
<module>kleppmanntree</module>
<module>supportlib</module>
<module>objects</module>
<module>utils</module>
</modules>
@@ -122,9 +121,6 @@
<systemPropertyVariables>
<java.util.logging.manager>org.jboss.logmanager.LogManager</java.util.logging.manager>
<maven.home>${maven.home}</maven.home>
<com.usatiuk.dhfs.supportlib.native-path>
${dhfs.native-libs-dir}
</com.usatiuk.dhfs.supportlib.native-path>
</systemPropertyVariables>
<argLine>
--add-exports java.base/sun.nio.ch=ALL-UNNAMED
@@ -192,7 +188,6 @@
--initialize-at-run-time=com.usatiuk.dhfs.utils.DataLocker$Lock,
--initialize-at-run-time=com.usatiuk.objects.stores.LmdbObjectPersistentStore$LmdbKvIterator,
--initialize-at-run-time=com.usatiuk.objects.stores.LmdbObjectPersistentStore,
--initialize-at-run-time=com.usatiuk.dhfs.supportlib.UninitializedByteBuffer,
--initialize-at-run-time=com.google.protobuf.UnsafeUtil
</quarkus.native.additional-build-args>
</properties>

View File

@@ -1,114 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>parent</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<artifactId>supportlib</artifactId>
<properties>
<cmake.download>false</cmake.download>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>exec-maven-plugin</artifactId>
<groupId>org.codehaus.mojo</groupId>
<version>3.4.1</version>
<executions>
<execution>
<id>CMake Configure</id>
<phase>generate-sources</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>
${project.parent.basedir}/../libdhfs_support/builder/cross-build.sh
</executable>
<arguments>
<argument>configure</argument>
<argument>${project.build.outputDirectory}/native-build</argument>
<argument>${dhfs.native-libs-dir}</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>CMake Build</id>
<phase>compile</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>
${project.parent.basedir}/../libdhfs_support/builder/cross-build.sh
</executable>
<arguments>
<argument>build</argument>
<argument>${project.build.outputDirectory}/native-build</argument>
<argument>${dhfs.native-libs-dir}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.googlecode.cmake-maven-project</groupId>
<artifactId>cmake-maven-plugin</artifactId>
<version>3.30.2-b1</version>
<executions>
<execution>
<id>cmake-generate</id>
<goals>
<goal>generate</goal>
</goals>
<configuration>
<sourcePath>
${project.parent.basedir}/../libdhfs_support
</sourcePath>
<targetPath>
${project.build.outputDirectory}/native-build-local
</targetPath>
<options>
<option>
-DJAVA_HOME=${java.home}
</option>
<option>
-DDHFS_LIB_INSTALL=${dhfs.native-libs-dir}
</option>
</options>
</configuration>
</execution>
<execution>
<id>cmake-compile</id>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<target>
install
</target>
<projectDirectory>
${project.build.outputDirectory}/native-build-local
</projectDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,15 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.nio.file.Path;
class DhfsNativeLibFinder {
static private final String LibName = "libdhfs_support";
static Path getLibPath() {
var override = System.getProperty("com.usatiuk.dhfs.supportlib.native-path-override");
if (override != null)
return Path.of(override);
return Path.of(System.getProperty("com.usatiuk.dhfs.supportlib.native-path"))
.resolve(SysUtils.getLibPlatform() + "-" + SysUtils.getLibArch()).resolve(LibName + "." + SysUtils.getLibExtension());
}
}

View File

@@ -1,35 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.nio.ByteBuffer;
import java.util.logging.Logger;
public class DhfsSupport {
public static final int PAGE_SIZE;
private static final Logger LOGGER = Logger.getLogger(DhfsSupport.class.getName());
private static final DhfsSupportImpl IMPLEMENTATION;
static {
DhfsSupportImpl tmp;
try {
System.load(DhfsNativeLibFinder.getLibPath().toAbsolutePath().toString());
tmp = new DhfsSupportImplNative();
} catch (Throwable e) {
LOGGER.warning("Failed to load native libraries, using fallback: \n" + e);
tmp = new DhfsSupportImplFallback();
}
IMPLEMENTATION = tmp;
PAGE_SIZE = getPageSizeInternal();
}
static long allocateUninitializedByteBuffer(ByteBuffer[] bb, int size) {
return IMPLEMENTATION.allocateUninitializedByteBuffer(bb, size);
}
static void releaseByteBuffer(long token) {
IMPLEMENTATION.releaseByteBuffer(token);
}
private static int getPageSizeInternal() {
return IMPLEMENTATION.getPageSizeInternal();
}
}

View File

@@ -1,11 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.nio.ByteBuffer;
interface DhfsSupportImpl {
long allocateUninitializedByteBuffer(ByteBuffer[] bb, int size);
void releaseByteBuffer(long token);
int getPageSizeInternal();
}

View File

@@ -1,21 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.nio.ByteBuffer;
class DhfsSupportImplFallback implements DhfsSupportImpl {
@Override
public long allocateUninitializedByteBuffer(ByteBuffer[] bb, int size) {
bb[0] = ByteBuffer.allocateDirect(size);
return -1;
}
@Override
public void releaseByteBuffer(long token) {
// GC
}
@Override
public int getPageSizeInternal() {
return 4096; // FIXME:?
}
}

View File

@@ -1,20 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.nio.ByteBuffer;
class DhfsSupportImplNative implements DhfsSupportImpl {
@Override
public long allocateUninitializedByteBuffer(ByteBuffer[] bb, int size) {
return DhfsSupportNative.allocateUninitializedByteBuffer(bb, size);
}
@Override
public void releaseByteBuffer(long token) {
DhfsSupportNative.releaseByteBuffer(token);
}
@Override
public int getPageSizeInternal() {
return DhfsSupportNative.PAGE_SIZE;
}
}

View File

@@ -1,20 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.nio.ByteBuffer;
class DhfsSupportNative {
static public final int PAGE_SIZE;
static {
System.load(DhfsNativeLibFinder.getLibPath().toAbsolutePath().toString());
PAGE_SIZE = getPageSizeInternal();
}
static native long allocateUninitializedByteBuffer(ByteBuffer[] bb, int size);
static native void releaseByteBuffer(long token);
private static native int getPageSizeInternal();
}

View File

@@ -1,43 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import org.apache.commons.lang3.SystemUtils;
class SysUtils {
static String getLibPlatform() {
if (SystemUtils.IS_OS_MAC) {
return "Darwin";
} else if (SystemUtils.IS_OS_LINUX) {
return "Linux";
} else {
throw new IllegalStateException("Unsupported OS: " + SystemUtils.OS_NAME);
}
}
static String getLibExtension() {
if (SystemUtils.IS_OS_MAC) {
return "dylib";
} else if (SystemUtils.IS_OS_LINUX) {
return "so";
} else {
throw new IllegalStateException("Unsupported OS: " + SystemUtils.OS_NAME);
}
}
static String getLibArch() {
if (SystemUtils.IS_OS_MAC) {
return switch (SystemUtils.OS_ARCH) {
case "aarch64" -> "arm64";
default -> throw new IllegalStateException("Unsupported architecture: " + SystemUtils.OS_ARCH);
};
} else if (SystemUtils.IS_OS_LINUX) {
return switch (SystemUtils.OS_ARCH) {
case "aarch64" -> "aarch64";
case "amd64" -> "x86_64";
default -> throw new IllegalStateException("Unsupported architecture: " + SystemUtils.OS_ARCH);
};
} else {
throw new IllegalStateException("Unsupported OS: " + SystemUtils.OS_NAME);
}
}
}

View File

@@ -1,28 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.lang.ref.Cleaner;
import java.nio.ByteBuffer;
import java.util.logging.Logger;
public class UninitializedByteBuffer {
private static final Cleaner CLEANER = Cleaner.create();
private static final Logger LOGGER = Logger.getLogger(UninitializedByteBuffer.class.getName());
public static ByteBuffer allocateUninitialized(int size) {
if (size < DhfsSupport.PAGE_SIZE)
return ByteBuffer.allocateDirect(size);
var bb = new ByteBuffer[1];
long token = DhfsSupport.allocateUninitializedByteBuffer(bb, size);
var ret = bb[0];
CLEANER.register(ret, () -> {
try {
DhfsSupport.releaseByteBuffer(token);
} catch (Throwable e) {
LOGGER.severe("Error releasing buffer: " + e);
System.exit(-1);
}
});
return ret;
}
}

View File

@@ -127,11 +127,6 @@
<artifactId>kleppmanntree</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>supportlib</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.usatiuk.dhfs</groupId>
<artifactId>objects</artifactId>

View File

@@ -5,7 +5,6 @@ dhfs.objects.ref_verification=true
dhfs.objects.deletion.delay=0
quarkus.log.category."com.usatiuk.dhfs".level=TRACE
quarkus.log.category."com.usatiuk.dhfs".min-level=TRACE
quarkus.class-loading.parent-first-artifacts=com.usatiuk.dhfs:supportlib
quarkus.http.test-port=0
quarkus.http.test-ssl-port=0
dhfs.local-discovery=false

View File

@@ -14,7 +14,6 @@ exec java \
-Ddhfs.fuse.root=/dhfs_root_fuse \
-Dquarkus.http.host=0.0.0.0 \
-Ddhfs.objects.ref_verification=false \
-Dcom.usatiuk.dhfs.supportlib.native-path=/usr/src/app/native-libs \
-Dquarkus.log.category.\"com.usatiuk.dhfs\".level=$DHFS_LOGLEVEL \
"$@" \
-jar quarkus-run.jar

View File

@@ -1,83 +0,0 @@
.DS_Store
/toolchain
/cmake-build-debug
/sysroot
/mvn-build
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser

View File

@@ -1,39 +0,0 @@
cmake_minimum_required(VERSION 3.24)
project(libdhfs_support CXX)
if (CMAKE_BUILD_TYPE STREQUAL "Debug")
if (NOT SANITIZE)
set(SANITIZE YES)
endif ()
endif ()
include(CheckCXXCompilerFlag)
if (SANITIZE STREQUAL "YES")
message(STATUS "Enabling sanitizers!")
add_compile_options(-Werror -Wall -Wextra -pedantic -Wshadow -Wformat=2 -Wfloat-equal -D_GLIBCXX_DEBUG -Wconversion)
check_cxx_compiler_flag(-fsanitize-trap=all CAN_TRAP)
if (CAN_TRAP)
add_compile_options(-fsanitize=undefined -fsanitize-trap=all -fno-sanitize-recover)
add_link_options(-fsanitize=undefined -fsanitize-trap=all -fno-sanitize-recover)
else ()
message(WARNING "Sanitizers not supported!")
endif ()
endif ()
if (CMAKE_BUILD_TYPE STREQUAL "Release")
add_compile_options(-flto)
add_link_options(-flto)
endif ()
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
add_compile_options(-O3)
add_link_options(-O3)
endif ()
message(STATUS "Build type: ${CMAKE_BUILD_TYPE}")
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
add_subdirectory(helpers)
add_subdirectory(DhfsSupportNative)

View File

@@ -1,26 +0,0 @@
set(CMAKE_CXX_STANDARD 17)
find_package(JNI REQUIRED COMPONENTS JVM)
find_package(Java REQUIRED)
include(UseJava)
add_jar(DhfsSupportNative
"${PROJECT_SOURCE_DIR}/../dhfs-parent/supportlib/src/main/java/com/usatiuk/dhfs/supportlib/DhfsSupportNative.java"
"LibPathDummy.java"
GENERATE_NATIVE_HEADERS DhfsSupportNative-native
)
add_library(dhfs_support SHARED
src/DhfsSupportNative.cpp
)
target_compile_options(dhfs_support PRIVATE
-Wno-unused-parameter
)
target_link_libraries(dhfs_support PRIVATE
helpers
DhfsSupportNative-native
)
install(TARGETS dhfs_support LIBRARY DESTINATION "${DHFS_LIB_INSTALL}/${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}")

View File

@@ -1,9 +0,0 @@
package com.usatiuk.dhfs.supportlib;
import java.nio.file.Path;
class DhfsNativeLibFinder {
static Path getLibPath() {
return null;
}
}

View File

@@ -1,55 +0,0 @@
#include <cstdio>
#include <cstdlib>
#include <cstdint>
#include <cassert>
#include "com_usatiuk_dhfs_supportlib_DhfsSupportNative.h"
#include "Utils.h"
#include "MemoryHelpers.h"
extern "C" {
JNIEXPORT jlong JNICALL Java_com_usatiuk_dhfs_supportlib_DhfsSupportNative_allocateUninitializedByteBuffer
(JNIEnv* env, jclass klass, jobjectArray bb, jint size) {
if (size < 0) {
env->ThrowNew(env->FindClass("java/lang/IllegalArgumentException"), "Size less than 0?");
return 0;
}
size_t checked_size = checked_cast<size_t>(size);
void* buf;
if (checked_size < MemoryHelpers::get_page_size())
buf = malloc(checked_size);
else
buf = std::aligned_alloc(MemoryHelpers::get_page_size(),
align_up(checked_size, MemoryHelpers::get_page_size()));
if (buf == nullptr) {
env->ThrowNew(env->FindClass("java/lang/OutOfMemoryError"), "Buffer memory allocation failed");
return 0;
}
env->SetObjectArrayElement(bb, 0, env->NewDirectByteBuffer(buf, checked_cast<jlong>(checked_size)));
jlong token = checked_cast<jlong>((uintptr_t) buf);
return token;
}
JNIEXPORT void JNICALL Java_com_usatiuk_dhfs_supportlib_DhfsSupportNative_releaseByteBuffer
(JNIEnv* env, jclass klass, jlong token) {
const auto addr = checked_cast<uintptr_t>(token);
if (addr == 0) {
env->ThrowNew(env->FindClass("java/lang/IllegalArgumentException"), "Trying to free null pointer");
return;
}
free((void*) addr);
}
JNIEXPORT jint JNICALL Java_com_usatiuk_dhfs_supportlib_DhfsSupportNative_getPageSizeInternal
(JNIEnv*, jclass) {
return checked_cast<jint>(MemoryHelpers::get_page_size());
}
}

View File

@@ -1,3 +0,0 @@
FROM rockylinux:8
RUN dnf install -y java-21-openjdk-headless java-21-openjdk-devel cmake gcc gcc-c++

View File

@@ -1,56 +0,0 @@
#!/usr/bin/env bash
CMAKE_ARGS="${CMAKE_ARGS:--DCMAKE_BUILD_TYPE=Debug}"
export SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
cd "$SCRIPT_DIR"
if [[ "${DO_LOCAL_BUILD^^}" != "TRUE" ]]; then
if [[ "$(uname)" == "Linux" ]]; then
if [[ -z "${DOCKER_PLATFORM}" ]]; then
echo "Already on linux"
exit 0
fi
fi
exec "$SCRIPT_DIR"/docker-launch.sh "$@"
fi
set -euxo pipefail
if [ $# -lt 3 ]; then
echo "Not enough arguments supplied: (build/configure) (build dir) (output dir)"
exit 1
fi
PROJECT_DIR="$SCRIPT_DIR/.."
CONFIGURE_DIR="$2"
INSTALL_DIR="$3"
function configure() {
cmake -B"$CONFIGURE_DIR" -S"$PROJECT_DIR" -DDHFS_LIB_INSTALL="$INSTALL_DIR" $CMAKE_ARGS
}
function build() {
cmake --build "$CONFIGURE_DIR" --target install
}
mkdir -p "$2"
mkdir -p "$3"
case "$1" in
"configure")
configure
;;
"build")
build
;;
"both")
configure
build
;;
*)
echo "Unknown command"
exit 1
;;
esac

View File

@@ -1,21 +0,0 @@
#!/usr/bin/env bash
set -exo pipefail
export SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
cd "$SCRIPT_DIR"
PLATFORM_ARG=""
if [[ ! -z "${DOCKER_PLATFORM}" ]]; then
PLATFORM_ARG="--platform $DOCKER_PLATFORM"
fi
if [[ -z "${DOCKER_BUILDER_IMAGE}" ]]; then
DOCKER_IMG_FILE=$(mktemp)
docker build $PLATFORM_ARG --iidfile "$DOCKER_IMG_FILE" .
DOCKER_BUILDER_IMAGE="$(cat "$DOCKER_IMG_FILE")"
fi
ROOT_DIR="$(dirname "$(dirname "$SCRIPT_DIR")")"
docker run $PLATFORM_ARG --rm -v "$ROOT_DIR:$ROOT_DIR" -e DO_LOCAL_BUILD=TRUE "$DOCKER_BUILDER_IMAGE" \
"$SCRIPT_DIR/cross-build.sh" "$@"

View File

@@ -1,9 +0,0 @@
set(CMAKE_CXX_STANDARD 17)
add_library(helpers
include/MemoryHelpers.h
src/MemoryHelpers.cpp
include/Utils.h
)
target_include_directories(helpers PUBLIC include)

View File

@@ -1,12 +0,0 @@
//
// Created by stepus53 on 24.8.24.
//
#ifndef MEMORYHELPERS_H
#define MEMORYHELPERS_H
namespace MemoryHelpers {
unsigned int get_page_size();
}
#endif //MEMORYHELPERS_H

View File

@@ -1,41 +0,0 @@
//
// Created by stepus53 on 24.8.24.
//
#ifndef UTILS_H
#define UTILS_H
#include <cassert>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wsign-conversion"
#pragma GCC diagnostic ignored "-Wsign-compare"
template<typename To, typename From>
constexpr To checked_cast(const From& f) {
To result = static_cast<To>(f);
assert(f == result);
return result;
}
#pragma GCC diagnostic pop
template<typename T, typename A>
T align_up(T what, A alignment) {
assert(__builtin_popcount(alignment) == 1);
const T mask = checked_cast<T>(alignment - 1);
T ret;
if (what & mask)
ret = (what + mask) & ~mask;
else
ret = what;
assert((ret & mask) == 0);
return ret;
}
#endif //UTILS_H

View File

@@ -1,14 +0,0 @@
//
// Created by stepus53 on 24.8.24.
//
#include "MemoryHelpers.h"
#include <unistd.h>
#include "Utils.h"
unsigned int MemoryHelpers::get_page_size() {
static const auto PAGE_SIZE = checked_cast<unsigned int>(sysconf(_SC_PAGESIZE));
return PAGE_SIZE;
}

View File

@@ -36,7 +36,6 @@ java \
-Dquarkus.http.host=0.0.0.0 \
-Dquarkus.log.category.\"com.usatiuk\".level=INFO \
-Dquarkus.log.category.\"com.usatiuk.dhfs\".level=INFO \
-Dcom.usatiuk.dhfs.supportlib.native-path="$SCRIPT_DIR"/NativeLibs \
-Ddhfs.webui.root="$SCRIPT_DIR"/Webui $EXTRAOPTS_PARSED \
-jar "$SCRIPT_DIR"/"DHFS Package"/quarkus-run.jar >quarkus.log 2>&1 &