mirror of
https://github.com/usatiuk/dhfs.git
synced 2025-10-29 04:57:48 +01:00
Server: don't write-lock fs tree when opening files
This commit is contained in:
@@ -75,10 +75,14 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
@Inject
|
@Inject
|
||||||
JMapHelper jMapHelper;
|
JMapHelper jMapHelper;
|
||||||
|
|
||||||
private JKleppmannTreeManager.JKleppmannTree getTree() {
|
private JKleppmannTreeManager.JKleppmannTree getTreeW() {
|
||||||
return jKleppmannTreeManager.getTree(new JObjectKey("fs"));
|
return jKleppmannTreeManager.getTree(new JObjectKey("fs"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private JKleppmannTreeManager.JKleppmannTree getTreeR() {
|
||||||
|
return jKleppmannTreeManager.getTree(new JObjectKey("fs"), LockingStrategy.OPTIMISTIC);
|
||||||
|
}
|
||||||
|
|
||||||
private ChunkData createChunk(ByteString bytes) {
|
private ChunkData createChunk(ByteString bytes) {
|
||||||
var newChunk = new ChunkData(JObjectKey.of(UUID.randomUUID().toString()), bytes);
|
var newChunk = new ChunkData(JObjectKey.of(UUID.randomUUID().toString()), bytes);
|
||||||
remoteTx.putData(newChunk);
|
remoteTx.putData(newChunk);
|
||||||
@@ -87,18 +91,25 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
|
|
||||||
void init(@Observes @Priority(500) StartupEvent event) {
|
void init(@Observes @Priority(500) StartupEvent event) {
|
||||||
Log.info("Initializing file service");
|
Log.info("Initializing file service");
|
||||||
getTree();
|
getTreeW();
|
||||||
}
|
}
|
||||||
|
|
||||||
private JKleppmannTreeNode getDirEntry(String name) {
|
private JKleppmannTreeNode getDirEntryW(String name) {
|
||||||
var res = getTree().traverse(StreamSupport.stream(Path.of(name).spliterator(), false).map(p -> p.toString()).toList());
|
var res = getTreeW().traverse(StreamSupport.stream(Path.of(name).spliterator(), false).map(p -> p.toString()).toList());
|
||||||
|
if (res == null) throw new StatusRuntimeExceptionNoStacktrace(Status.NOT_FOUND);
|
||||||
|
var ret = curTx.get(JKleppmannTreeNode.class, res).orElseThrow(() -> new StatusRuntimeException(Status.NOT_FOUND.withDescription("Tree node exists but not found as jObject: " + name)));
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
private JKleppmannTreeNode getDirEntryR(String name) {
|
||||||
|
var res = getTreeR().traverse(StreamSupport.stream(Path.of(name).spliterator(), false).map(p -> p.toString()).toList());
|
||||||
if (res == null) throw new StatusRuntimeExceptionNoStacktrace(Status.NOT_FOUND);
|
if (res == null) throw new StatusRuntimeExceptionNoStacktrace(Status.NOT_FOUND);
|
||||||
var ret = curTx.get(JKleppmannTreeNode.class, res).orElseThrow(() -> new StatusRuntimeException(Status.NOT_FOUND.withDescription("Tree node exists but not found as jObject: " + name)));
|
var ret = curTx.get(JKleppmannTreeNode.class, res).orElseThrow(() -> new StatusRuntimeException(Status.NOT_FOUND.withDescription("Tree node exists but not found as jObject: " + name)));
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Optional<JKleppmannTreeNode> getDirEntryOpt(String name) {
|
private Optional<JKleppmannTreeNode> getDirEntryOpt(String name) {
|
||||||
var res = getTree().traverse(StreamSupport.stream(Path.of(name).spliterator(), false).map(p -> p.toString()).toList());
|
var res = getTreeW().traverse(StreamSupport.stream(Path.of(name).spliterator(), false).map(p -> p.toString()).toList());
|
||||||
if (res == null) return Optional.empty();
|
if (res == null) return Optional.empty();
|
||||||
var ret = curTx.get(JKleppmannTreeNode.class, res);
|
var ret = curTx.get(JKleppmannTreeNode.class, res);
|
||||||
return ret;
|
return ret;
|
||||||
@@ -130,7 +141,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
public Optional<JObjectKey> open(String name) {
|
public Optional<JObjectKey> open(String name) {
|
||||||
return jObjectTxManager.executeTx(() -> {
|
return jObjectTxManager.executeTx(() -> {
|
||||||
try {
|
try {
|
||||||
var ret = getDirEntry(name);
|
var ret = getDirEntryR(name);
|
||||||
return switch (ret.meta()) {
|
return switch (ret.meta()) {
|
||||||
case JKleppmannTreeNodeMetaFile f -> Optional.of(f.getFileIno());
|
case JKleppmannTreeNodeMetaFile f -> Optional.of(f.getFileIno());
|
||||||
case JKleppmannTreeNodeMetaDirectory f -> Optional.of(ret.key());
|
case JKleppmannTreeNodeMetaDirectory f -> Optional.of(ret.key());
|
||||||
@@ -154,7 +165,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
public Optional<JObjectKey> create(String name, long mode) {
|
public Optional<JObjectKey> create(String name, long mode) {
|
||||||
return jObjectTxManager.executeTx(() -> {
|
return jObjectTxManager.executeTx(() -> {
|
||||||
Path path = Path.of(name);
|
Path path = Path.of(name);
|
||||||
var parent = getDirEntry(path.getParent().toString());
|
var parent = getDirEntryW(path.getParent().toString());
|
||||||
|
|
||||||
ensureDir(parent);
|
ensureDir(parent);
|
||||||
|
|
||||||
@@ -166,7 +177,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
remoteTx.putData(f);
|
remoteTx.putData(f);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
getTree().move(parent.key(), new JKleppmannTreeNodeMetaFile(fname, f.key()), getTree().getNewNodeId());
|
getTreeW().move(parent.key(), new JKleppmannTreeNodeMetaFile(fname, f.key()), getTreeW().getNewNodeId());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// fobj.getMeta().removeRef(newNodeId);
|
// fobj.getMeta().removeRef(newNodeId);
|
||||||
throw e;
|
throw e;
|
||||||
@@ -179,7 +190,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
@Override
|
@Override
|
||||||
public Pair<String, JObjectKey> inoToParent(JObjectKey ino) {
|
public Pair<String, JObjectKey> inoToParent(JObjectKey ino) {
|
||||||
return jObjectTxManager.executeTx(() -> {
|
return jObjectTxManager.executeTx(() -> {
|
||||||
return getTree().findParent(w -> {
|
return getTreeW().findParent(w -> {
|
||||||
if (w.meta() instanceof JKleppmannTreeNodeMetaFile f)
|
if (w.meta() instanceof JKleppmannTreeNodeMetaFile f)
|
||||||
return f.getFileIno().equals(ino);
|
return f.getFileIno().equals(ino);
|
||||||
return false;
|
return false;
|
||||||
@@ -191,14 +202,14 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
public void mkdir(String name, long mode) {
|
public void mkdir(String name, long mode) {
|
||||||
jObjectTxManager.executeTx(() -> {
|
jObjectTxManager.executeTx(() -> {
|
||||||
Path path = Path.of(name);
|
Path path = Path.of(name);
|
||||||
var parent = getDirEntry(path.getParent().toString());
|
var parent = getDirEntryW(path.getParent().toString());
|
||||||
ensureDir(parent);
|
ensureDir(parent);
|
||||||
|
|
||||||
String dname = path.getFileName().toString();
|
String dname = path.getFileName().toString();
|
||||||
|
|
||||||
Log.debug("Creating directory " + name);
|
Log.debug("Creating directory " + name);
|
||||||
|
|
||||||
getTree().move(parent.key(), new JKleppmannTreeNodeMetaDirectory(dname), getTree().getNewNodeId());
|
getTreeW().move(parent.key(), new JKleppmannTreeNodeMetaDirectory(dname), getTreeW().getNewNodeId());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -210,21 +221,21 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
if (!allowRecursiveDelete && !node.children().isEmpty())
|
if (!allowRecursiveDelete && !node.children().isEmpty())
|
||||||
throw new DirectoryNotEmptyException();
|
throw new DirectoryNotEmptyException();
|
||||||
}
|
}
|
||||||
getTree().trash(node.meta(), node.key());
|
getTreeW().trash(node.meta(), node.key());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Boolean rename(String from, String to) {
|
public Boolean rename(String from, String to) {
|
||||||
return jObjectTxManager.executeTx(() -> {
|
return jObjectTxManager.executeTx(() -> {
|
||||||
var node = getDirEntry(from);
|
var node = getDirEntryW(from);
|
||||||
JKleppmannTreeNodeMeta meta = node.meta();
|
JKleppmannTreeNodeMeta meta = node.meta();
|
||||||
|
|
||||||
var toPath = Path.of(to);
|
var toPath = Path.of(to);
|
||||||
var toDentry = getDirEntry(toPath.getParent().toString());
|
var toDentry = getDirEntryW(toPath.getParent().toString());
|
||||||
ensureDir(toDentry);
|
ensureDir(toDentry);
|
||||||
|
|
||||||
getTree().move(toDentry.key(), meta.withName(toPath.getFileName().toString()), node.key());
|
getTreeW().move(toDentry.key(), meta.withName(toPath.getFileName().toString()), node.key());
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -253,7 +264,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
@Override
|
@Override
|
||||||
public Iterable<String> readDir(String name) {
|
public Iterable<String> readDir(String name) {
|
||||||
return jObjectTxManager.executeTx(() -> {
|
return jObjectTxManager.executeTx(() -> {
|
||||||
var found = getDirEntry(name);
|
var found = getDirEntryW(name);
|
||||||
|
|
||||||
if (!(found.meta() instanceof JKleppmannTreeNodeMetaDirectory md))
|
if (!(found.meta() instanceof JKleppmannTreeNodeMetaDirectory md))
|
||||||
throw new StatusRuntimeException(Status.INVALID_ARGUMENT);
|
throw new StatusRuntimeException(Status.INVALID_ARGUMENT);
|
||||||
@@ -696,7 +707,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
public JObjectKey symlink(String oldpath, String newpath) {
|
public JObjectKey symlink(String oldpath, String newpath) {
|
||||||
return jObjectTxManager.executeTx(() -> {
|
return jObjectTxManager.executeTx(() -> {
|
||||||
Path path = Path.of(newpath);
|
Path path = Path.of(newpath);
|
||||||
var parent = getDirEntry(path.getParent().toString());
|
var parent = getDirEntryW(path.getParent().toString());
|
||||||
|
|
||||||
ensureDir(parent);
|
ensureDir(parent);
|
||||||
|
|
||||||
@@ -710,7 +721,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
jMapHelper.put(f, JMapLongKey.of(0), newChunkData.key());
|
jMapHelper.put(f, JMapLongKey.of(0), newChunkData.key());
|
||||||
|
|
||||||
remoteTx.putData(f);
|
remoteTx.putData(f);
|
||||||
getTree().move(parent.key(), new JKleppmannTreeNodeMetaFile(fname, f.key()), getTree().getNewNodeId());
|
getTreeW().move(parent.key(), new JKleppmannTreeNodeMetaFile(fname, f.key()), getTreeW().getNewNodeId());
|
||||||
return f.key();
|
return f.key();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,9 +35,9 @@ public class JKleppmannTreeManager {
|
|||||||
@Inject
|
@Inject
|
||||||
PeerInfoService peerInfoService;
|
PeerInfoService peerInfoService;
|
||||||
|
|
||||||
public JKleppmannTree getTree(JObjectKey name) {
|
public JKleppmannTree getTree(JObjectKey name, LockingStrategy lockingStrategy) {
|
||||||
return txManager.executeTx(() -> {
|
return txManager.executeTx(() -> {
|
||||||
var data = curTx.get(JKleppmannTreePersistentData.class, name, LockingStrategy.WRITE).orElse(null);
|
var data = curTx.get(JKleppmannTreePersistentData.class, name, lockingStrategy).orElse(null);
|
||||||
if (data == null) {
|
if (data == null) {
|
||||||
data = new JKleppmannTreePersistentData(
|
data = new JKleppmannTreePersistentData(
|
||||||
name,
|
name,
|
||||||
@@ -59,6 +59,10 @@ public class JKleppmannTreeManager {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public JKleppmannTree getTree(JObjectKey name) {
|
||||||
|
return getTree(name, LockingStrategy.WRITE);
|
||||||
|
}
|
||||||
|
|
||||||
public class JKleppmannTree {
|
public class JKleppmannTree {
|
||||||
private final KleppmannTree<Long, PeerId, JKleppmannTreeNodeMeta, JObjectKey> _tree;
|
private final KleppmannTree<Long, PeerId, JKleppmannTreeNodeMeta, JObjectKey> _tree;
|
||||||
private final JKleppmannTreeStorageInterface _storageInterface;
|
private final JKleppmannTreeStorageInterface _storageInterface;
|
||||||
|
|||||||
Reference in New Issue
Block a user