a little more consistent locking usage

This commit is contained in:
2024-06-15 14:06:39 +02:00
parent 3475a512cf
commit 6b52ce182c
13 changed files with 180 additions and 94 deletions

2
server/src/lombok.config Normal file
View File

@@ -0,0 +1,2 @@
lombok.accessors.prefix += _
lombok.accessors.prefix +=

View File

@@ -8,16 +8,16 @@ import java.util.Arrays;
@Getter
public class Chunk extends JObject {
final String hash;
final byte[] bytes;
final String _hash;
final byte[] _bytes;
public Chunk(byte[] bytes) {
this.bytes = Arrays.copyOf(bytes, bytes.length);
this.hash = DigestUtils.sha512Hex(bytes);
this._bytes = Arrays.copyOf(bytes, bytes.length);
this._hash = DigestUtils.sha512Hex(bytes);
}
@Override
public String getName() {
return hash;
return _hash;
}
}

View File

@@ -1,31 +0,0 @@
package com.usatiuk.dhfs.storage.files.objects;
import com.usatiuk.dhfs.storage.objects.jrepository.JObject;
import lombok.Getter;
import java.util.UUID;
public abstract class DirEntry extends JObject {
@Getter
final UUID uuid;
protected DirEntry(UUID uuid) {
this.uuid = uuid;
}
@Override
public String getName() {
return uuid.toString();
}
long mode;
public synchronized long getMode() {
return mode;
}
public synchronized DirEntry setMode(long mode) {
this.mode = mode;
return this;
}
}

View File

@@ -1,38 +1,75 @@
package com.usatiuk.dhfs.storage.files.objects;
import lombok.Getter;
import java.io.Serializable;
import java.util.*;
public class Directory extends DirEntry {
public class Directory extends FsNode {
public Directory(UUID uuid) {
super(uuid);
}
final Map<String, UUID> _children = new TreeMap<>();
public synchronized Map<String, UUID> getChildrenMap() {
return new TreeMap<>(_children);
public Directory(UUID uuid, long mode) {
super(uuid, mode);
}
public synchronized Optional<UUID> getKid(String name) {
if (_children.containsKey(name))
return Optional.of(_children.get(name));
else
return Optional.empty();
@Getter
public static class DirectoryData implements Serializable {
private final Map<String, UUID> _children = new TreeMap<>();
}
public synchronized boolean removeKid(String name) {
return _children.remove(name) != null;
final DirectoryData _directoryData = new DirectoryData();
@FunctionalInterface
public interface DirectoryFunction<R> {
R apply(FsNodeData fsNodeData, DirectoryData DirectoryData);
}
public synchronized boolean putKid(String name, UUID uuid) {
if (_children.containsKey(name))
return false;
_children.put(name, uuid);
return true;
public <R> R runReadLocked(DirectoryFunction<R> fn) {
lock.readLock().lock();
try {
return fn.apply(_fsNodeData, _directoryData);
} finally {
lock.readLock().unlock();
}
}
public synchronized List<String> getChildrenList() {
return _children.keySet().stream().toList();
public <R> R runWriteLocked(DirectoryFunction<R> fn) {
lock.writeLock().lock();
try {
return fn.apply(_fsNodeData, _directoryData);
} finally {
lock.writeLock().unlock();
}
}
public Map<String, UUID> getChildrenMap() {
return runReadLocked(((fsNodeData, directoryData) -> new TreeMap<>(directoryData.getChildren())));
}
public Optional<UUID> getKid(String name) {
return runReadLocked(((fsNodeData, directoryData) -> {
if (directoryData.getChildren().containsKey(name))
return Optional.of(directoryData.getChildren().get(name));
else return Optional.empty();
}));
}
public boolean removeKid(String name) {
return runWriteLocked((fsNodeData, directoryData) -> directoryData.getChildren().remove(name) != null);
}
public boolean putKid(String name, UUID uuid) {
return runWriteLocked((fsNodeData, directoryData) -> {
if (directoryData.getChildren().containsKey(name)) return false;
directoryData.getChildren().put(name, uuid);
return true;
});
}
public List<String> getChildrenList() {
return runReadLocked((fsNodeData, directoryData) -> directoryData.getChildren().keySet().stream().toList());
}
}

View File

@@ -6,33 +6,41 @@ import java.io.Serializable;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.function.Function;
public class File extends DirEntry {
public class File extends FsNode {
public File(UUID uuid) {
super(uuid);
}
@Getter
public static class FileData implements Serializable {
final NavigableMap<Long, String> chunks = new TreeMap<>();
public File(UUID uuid, long mode) {
super(uuid, mode);
}
final FileData fileData = new FileData();
@Getter
public static class FileData implements Serializable {
private final NavigableMap<Long, String> _chunks = new TreeMap<>();
}
public <T> T runReadLocked(Function<FileData, T> fn) throws Exception {
final FileData _fileData = new FileData();
@FunctionalInterface
public interface FileFunction<R> {
R apply(FsNodeData fsNodeData, FileData fileData);
}
public <R> R runReadLocked(FileFunction<R> fn) {
lock.readLock().lock();
try {
return fn.apply(fileData);
return fn.apply(_fsNodeData, _fileData);
} finally {
lock.readLock().unlock();
}
}
public <T> T runWriteLocked(Function<FileData, T> fn) throws Exception {
public <R> R runWriteLocked(FileFunction<R> fn) {
lock.writeLock().lock();
try {
return fn.apply(fileData);
return fn.apply(_fsNodeData, _fileData);
} finally {
lock.writeLock().unlock();
}

View File

@@ -0,0 +1,70 @@
package com.usatiuk.dhfs.storage.files.objects;
import com.usatiuk.dhfs.storage.objects.jrepository.JObject;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
import java.util.UUID;
import java.util.function.Function;
public abstract class FsNode extends JObject {
@Getter
final UUID _uuid;
protected FsNode(UUID uuid) {
this._uuid = uuid;
}
protected FsNode(UUID uuid, long mode) {
this._uuid = uuid;
this._fsNodeData._mode = mode;
}
@Override
public String getName() {
return _uuid.toString();
}
public static class FsNodeData implements Serializable {
@Getter
@Setter
private long _mode;
}
final FsNodeData _fsNodeData = new FsNodeData();
@FunctionalInterface
public interface FsNodeFunction<R> {
R apply(FsNodeData fsNodeData);
}
public <R> R runReadLocked(FsNodeFunction<R> fn) {
lock.readLock().lock();
try {
return fn.apply(_fsNodeData);
} finally {
lock.readLock().unlock();
}
}
public <R> R runWriteLocked(Function<FsNodeData, R> fn) {
lock.writeLock().lock();
try {
return fn.apply(_fsNodeData);
} finally {
lock.writeLock().unlock();
}
}
public void setMode(long mode) {
runWriteLocked((fsNodeData) -> {
fsNodeData.setMode(mode);
return null;
});
}
public long getMode() {
return runReadLocked(FsNodeData::getMode);
}
}

View File

@@ -1,6 +1,6 @@
package com.usatiuk.dhfs.storage.files.service;
import com.usatiuk.dhfs.storage.files.objects.DirEntry;
import com.usatiuk.dhfs.storage.files.objects.FsNode;
import com.usatiuk.dhfs.storage.files.objects.Directory;
import com.usatiuk.dhfs.storage.files.objects.File;
import io.smallrye.mutiny.Uni;
@@ -8,7 +8,7 @@ import io.smallrye.mutiny.Uni;
import java.util.Optional;
public interface DhfsFileService {
Uni<Optional<DirEntry>> getDirEntry(String name);
Uni<Optional<FsNode>> getDirEntry(String name);
Uni<Optional<File>> open(String name);
Uni<Optional<File>> create(String name, long mode);
Uni<Optional<Directory>> mkdir(String name, long mode);

View File

@@ -1,9 +1,9 @@
package com.usatiuk.dhfs.storage.files.service;
import com.usatiuk.dhfs.storage.files.objects.Chunk;
import com.usatiuk.dhfs.storage.files.objects.DirEntry;
import com.usatiuk.dhfs.storage.files.objects.Directory;
import com.usatiuk.dhfs.storage.files.objects.File;
import com.usatiuk.dhfs.storage.files.objects.FsNode;
import com.usatiuk.dhfs.storage.objects.jrepository.JObjectManager;
import com.usatiuk.dhfs.storage.objects.repository.ObjectRepository;
import io.quarkus.logging.Log;
@@ -37,7 +37,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
Log.info("Initializing file service");
if (!objectRepository.existsObject(namespace, new UUID(0, 0).toString()).await().indefinitely()) {
objectRepository.createNamespace(namespace).await().indefinitely();
jObjectManager.put(namespace, new Directory(new UUID(0, 0)).setMode(0755)).await().indefinitely();
jObjectManager.put(namespace, new Directory(new UUID(0, 0), 0755)).await().indefinitely();
}
getRoot().await().indefinitely();
}
@@ -47,7 +47,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
Log.info("Shutdown file service");
}
private Uni<Optional<DirEntry>> traverse(DirEntry from, Path path) {
private Uni<Optional<FsNode>> traverse(FsNode from, Path path) {
if (path.getNameCount() == 0) return Uni.createFrom().item(Optional.of(from));
if (!(from instanceof Directory dir))
@@ -56,13 +56,13 @@ public class DhfsFileServiceImpl implements DhfsFileService {
var pathFirstPart = path.getName(0).toString();
var found = dir.getKid(pathFirstPart);
if (found == null)
if (found.isEmpty())
return Uni.createFrom().item(Optional.empty());
var ref = jObjectManager.get(namespace, found.get().toString(), DirEntry.class)
var ref = jObjectManager.get(namespace, found.get().toString(), FsNode.class)
.await().indefinitely();
if (!ref.isPresent()) {
if (ref.isEmpty()) {
Log.error("File missing when traversing directory " + from.getName() + ": " + found);
return Uni.createFrom().item(Optional.empty());
}
@@ -73,7 +73,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
}
@Override
public Uni<Optional<DirEntry>> getDirEntry(String name) {
public Uni<Optional<FsNode>> getDirEntry(String name) {
var root = getRoot().await().indefinitely();
var found = traverse(root, Path.of(name)).await().indefinitely();
return Uni.createFrom().item(found);
@@ -216,7 +216,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
AtomicReference<List<Map.Entry<Long, String>>> chunksList = new AtomicReference<>();
try {
file.runReadLocked(fileData -> {
file.runReadLocked((fsNodeData, fileData) -> {
var chunksAll = fileData.getChunks();
chunksList.set(chunksAll.tailMap(chunksAll.floorKey(offset)).entrySet().stream().toList());
return null;
@@ -282,7 +282,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
// FIXME:
try {
file.runReadLocked(fileData -> {
file.runReadLocked((fsNodeData, fileData) -> {
chunksAllRef.set(new TreeMap<>(fileData.getChunks()));
return null;
});
@@ -344,7 +344,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
}
try {
file.runWriteLocked(fileData -> {
file.runWriteLocked((fsNodeData, fileData) -> {
fileData.getChunks().clear();
fileData.getChunks().putAll(newChunks);
return null;
@@ -370,7 +370,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
if (length == 0) {
try {
file.runWriteLocked(fileData -> {
file.runWriteLocked((fsNodeData, fileData) -> {
fileData.getChunks().clear();
return null;
});
@@ -385,7 +385,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
AtomicReference<TreeMap<Long, String>> chunksAllRef = new AtomicReference<>();
try {
file.runReadLocked(fileData -> {
file.runReadLocked((fsNodeData, fileData) -> {
chunksAllRef.set(new TreeMap<>(fileData.getChunks()));
return null;
});
@@ -421,7 +421,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
}
try {
file.runWriteLocked(fileData -> {
file.runWriteLocked((fsNodeData, fileData) -> {
fileData.getChunks().clear();
fileData.getChunks().putAll(newChunks);
return null;
@@ -443,7 +443,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
AtomicReference<TreeMap<Long, String>> chunksAllRef = new AtomicReference<>();
try {
f.runReadLocked(fileData -> {
f.runReadLocked((fsNodeData, fileData) -> {
chunksAllRef.set(new TreeMap<>(fileData.getChunks()));
return null;
});
@@ -471,7 +471,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
@Override
public Uni<Directory> getRoot() {
var read = jObjectManager.get(namespace, new UUID(0, 0).toString(), DirEntry.class).await().indefinitely();
var read = jObjectManager.get(namespace, new UUID(0, 0).toString(), FsNode.class).await().indefinitely();
if (read.isEmpty() || !(read.get() instanceof Directory)) {
Log.error("Root directory not found");
}

View File

@@ -1,9 +1,9 @@
package com.usatiuk.dhfs.storage.fuse;
import com.sun.security.auth.module.UnixSystem;
import com.usatiuk.dhfs.storage.files.objects.DirEntry;
import com.usatiuk.dhfs.storage.files.objects.Directory;
import com.usatiuk.dhfs.storage.files.objects.File;
import com.usatiuk.dhfs.storage.files.objects.FsNode;
import com.usatiuk.dhfs.storage.files.service.DhfsFileService;
import io.quarkus.logging.Log;
import io.quarkus.runtime.Shutdown;
@@ -60,7 +60,7 @@ public class DhfsFuse extends FuseStubFS {
@Override
public int getattr(String path, FileStat stat) {
Optional<DirEntry> found;
Optional<FsNode> found;
try {
found = fileService.getDirEntry(path).await().indefinitely();
} catch (Exception e) {

View File

@@ -10,5 +10,5 @@ import lombok.experimental.Accessors;
@Setter
@AllArgsConstructor
public class Namespace {
final String name;
final String _name;
}

View File

@@ -10,8 +10,8 @@ import lombok.experimental.Accessors;
@Setter
@AllArgsConstructor
public class Object {
final Namespace namespace;
final Namespace _namespace;
final String name;
final byte[] data;
final String _name;
final byte[] _data;
}

View File

@@ -19,11 +19,11 @@ public class JObjectManagerImpl implements JObjectManager {
private static class NamedSoftReference extends SoftReference<JObject> {
public NamedSoftReference(JObject target, ReferenceQueue<? super JObject> q) {
super(target, q);
this.key = target.getName();
this._key = target.getName();
}
@Getter
final String key;
final String _key;
}
private final HashMap<String, NamedSoftReference> _map = new HashMap<>();
@@ -33,8 +33,8 @@ public class JObjectManagerImpl implements JObjectManager {
NamedSoftReference cur;
while ((cur = (NamedSoftReference) _refQueue.poll()) != null) {
synchronized (_map) {
if (_map.containsKey(cur.key) && (_map.get(cur.key).get() == null))
_map.remove(cur.key);
if (_map.containsKey(cur._key) && (_map.get(cur._key).get() == null))
_map.remove(cur._key);
}
}
}

View File

@@ -39,7 +39,7 @@ public class DhfsFileServiceSimpleTest extends SimpleFileRepoTest {
Chunk c2 = new Chunk("678".getBytes());
Chunk c3 = new Chunk("91011".getBytes());
File f = new File(fuuid);
Assertions.assertDoesNotThrow(() -> f.runWriteLocked(fileData -> {
Assertions.assertDoesNotThrow(() -> f.runWriteLocked((fsNodeData, fileData) -> {
fileData.getChunks().put(0L, c1.getHash());
fileData.getChunks().put((long) c1.getBytes().length, c2.getHash());
fileData.getChunks().put((long) c1.getBytes().length + c2.getBytes().length, c3.getHash());