slight cleanup

This commit is contained in:
2025-02-16 21:45:51 +01:00
parent de19705531
commit 73f5b9cdd9
9 changed files with 30 additions and 33 deletions

View File

@@ -26,5 +26,9 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.pcollections</groupId>
<artifactId>pcollections</artifactId>
</dependency>
</dependencies>
</project>

View File

@@ -53,8 +53,7 @@ public class KleppmannTree<TimestampT extends Comparable<TimestampT>, PeerIdT ex
var node = _storage.getById(effect.childId());
var curParent = _storage.getById(effect.newParentId());
{
var newCurParentChildren = new HashMap<>(curParent.children());
newCurParentChildren.remove(node.meta().getName());
var newCurParentChildren = curParent.children().minus(node.meta().getName());
curParent = curParent.withChildren(newCurParentChildren);
_storage.putNode(curParent);
}
@@ -65,8 +64,7 @@ public class KleppmannTree<TimestampT extends Comparable<TimestampT>, PeerIdT ex
// Needs to be read after changing curParent, as it might be the same node
var oldParent = _storage.getById(effect.oldInfo().oldParent());
{
var newOldParentChildren = new HashMap<>(oldParent.children());
newOldParentChildren.put(node.meta().getName(), node.key());
var newOldParentChildren = oldParent.children().plus(node.meta().getName(), node.key());
oldParent = oldParent.withChildren(newOldParentChildren);
_storage.putNode(oldParent);
}
@@ -79,8 +77,7 @@ public class KleppmannTree<TimestampT extends Comparable<TimestampT>, PeerIdT ex
var node = _storage.getById(effect.childId());
var curParent = _storage.getById(effect.newParentId());
{
var newCurParentChildren = new HashMap<>(curParent.children());
newCurParentChildren.remove(node.meta().getName());
var newCurParentChildren = curParent.children().minus(node.meta().getName());
curParent = curParent.withChildren(newCurParentChildren);
_storage.putNode(curParent);
}
@@ -149,10 +146,9 @@ public class KleppmannTree<TimestampT extends Comparable<TimestampT>, PeerIdT ex
for (var n : inTrash) {
var node = _storage.getById(n);
{
var newTrashChildren = new HashMap<>(trash.children());
if (newTrashChildren.remove(n.toString()) == null)
if (!trash.children().containsKey(n.toString()))
LOGGER.severe("Node " + node.key() + " not found in trash but should be there");
trash = trash.withChildren(newTrashChildren);
trash = trash.withChildren(trash.children().minus(n.toString()));
_storage.putNode(trash);
}
_storage.removeNode(n);
@@ -307,8 +303,7 @@ public class KleppmannTree<TimestampT extends Comparable<TimestampT>, PeerIdT ex
node = _storage.getById(effect.childId());
}
if (oldParentNode != null) {
var newOldParentChildren = new HashMap<>(oldParentNode.children());
newOldParentChildren.remove(effect.oldInfo().oldMeta().getName());
var newOldParentChildren = oldParentNode.children().minus(effect.oldInfo().oldMeta().getName());
oldParentNode = oldParentNode.withChildren(newOldParentChildren);
_storage.putNode(oldParentNode);
}
@@ -317,8 +312,7 @@ public class KleppmannTree<TimestampT extends Comparable<TimestampT>, PeerIdT ex
newParentNode = _storage.getById(effect.newParentId());
{
var newNewParentChildren = new HashMap<>(newParentNode.children());
newNewParentChildren.put(effect.newMeta().getName(), effect.childId());
var newNewParentChildren = newParentNode.children().plus(effect.newMeta().getName(), effect.childId());
newParentNode = newParentNode.withChildren(newNewParentChildren);
_storage.putNode(newParentNode);
}

View File

@@ -1,5 +1,7 @@
package com.usatiuk.kleppmanntree;
import org.pcollections.PMap;
import java.io.Serializable;
import java.util.Map;
@@ -12,7 +14,7 @@ public interface TreeNode<TimestampT extends Comparable<TimestampT>, PeerIdT ext
MetaT meta();
Map<String, NodeIdT> children();
PMap<String, NodeIdT> children();
TreeNode<TimestampT, PeerIdT, MetaT, NodeIdT> withParent(NodeIdT parent);
@@ -20,5 +22,5 @@ public interface TreeNode<TimestampT extends Comparable<TimestampT>, PeerIdT ext
TreeNode<TimestampT, PeerIdT, MetaT, NodeIdT> withMeta(MetaT meta);
TreeNode<TimestampT, PeerIdT, MetaT, NodeIdT> withChildren(Map<String, NodeIdT> children);
TreeNode<TimestampT, PeerIdT, MetaT, NodeIdT> withChildren(PMap<String, NodeIdT> children);
}

View File

@@ -73,7 +73,7 @@ public class KleppmanTreeSimpleTest {
// Second node wins as it has smaller timestamp
Assertions.assertIterableEquals(List.of("Test2"), testNode1._storageInterface.getById(testNode2._storageInterface.getRootId()).children().keySet());
Assertions.assertIterableEquals(List.of("Test1", "TestFile"), testNode1._storageInterface.getById(d2id).children().keySet());
Assertions.assertIterableEquals(List.of("Test1", "TestFile"), testNode1._storageInterface.getById(d2id).children().keySet().stream().sorted().toList());
Assertions.assertEquals(d2id, testNode1._tree.traverse(List.of("Test2")));
Assertions.assertEquals(d1id, testNode1._tree.traverse(List.of("Test2", "Test1")));
Assertions.assertEquals(f1id, testNode1._tree.traverse(List.of("Test2", "TestFile")));

View File

@@ -1,14 +1,14 @@
package com.usatiuk.kleppmanntree;
import java.util.Collections;
import java.util.Map;
import org.pcollections.HashTreePMap;
import org.pcollections.PMap;
public record TestTreeNode(Long key, Long parent, OpMove<Long, Long, TestNodeMeta, Long> lastEffectiveOp,
TestNodeMeta meta,
Map<String, Long> children) implements TreeNode<Long, Long, TestNodeMeta, Long> {
PMap<String, Long> children) implements TreeNode<Long, Long, TestNodeMeta, Long> {
public TestTreeNode(Long id, Long parent, TestNodeMeta meta) {
this(id, parent, null, meta, Collections.emptyMap());
this(id, parent, null, meta, HashTreePMap.empty());
}
@Override
@@ -27,7 +27,7 @@ public record TestTreeNode(Long key, Long parent, OpMove<Long, Long, TestNodeMet
}
@Override
public TreeNode<Long, Long, TestNodeMeta, Long> withChildren(Map<String, Long> children) {
public TreeNode<Long, Long, TestNodeMeta, Long> withChildren(PMap<String, Long> children) {
return new TestTreeNode(key, parent, lastEffectiveOp, meta, children);
}
}

View File

@@ -1,15 +1,10 @@
package com.usatiuk.dhfs.objects;
import java.io.Serializable;
import java.util.UUID;
public record PeerId(UUID id) implements Serializable, Comparable<PeerId> {
public static PeerId of(UUID id) {
return new PeerId(id);
}
public record PeerId(JObjectKey id) implements Serializable, Comparable<PeerId> {
public static PeerId of(String id) {
return new PeerId(UUID.fromString(id));
return new PeerId(JObjectKey.of(id));
}
@Override

View File

@@ -6,7 +6,9 @@ import com.usatiuk.dhfs.objects.PeerId;
import com.usatiuk.dhfs.objects.repository.peersync.structs.JKleppmannTreeNodeMetaPeer;
import com.usatiuk.kleppmanntree.OpMove;
import com.usatiuk.kleppmanntree.TreeNode;
import org.pcollections.HashTreePMap;
import org.pcollections.PCollection;
import org.pcollections.PMap;
import org.pcollections.TreePSet;
import java.io.Serializable;
@@ -20,10 +22,10 @@ import java.util.stream.Stream;
public record JKleppmannTreeNode(JObjectKey key, PCollection<JObjectKey> refsFrom, boolean frozen, JObjectKey parent,
OpMove<Long, PeerId, JKleppmannTreeNodeMeta, JObjectKey> lastEffectiveOp,
JKleppmannTreeNodeMeta meta,
Map<String, JObjectKey> children) implements TreeNode<Long, PeerId, JKleppmannTreeNodeMeta, JObjectKey>, JDataRefcounted, Serializable {
PMap<String, JObjectKey> children) implements TreeNode<Long, PeerId, JKleppmannTreeNodeMeta, JObjectKey>, JDataRefcounted, Serializable {
public JKleppmannTreeNode(JObjectKey id, JObjectKey parent, JKleppmannTreeNodeMeta meta) {
this(id, TreePSet.empty(), false, parent, null, meta, Collections.emptyMap());
this(id, TreePSet.empty(), false, parent, null, meta, HashTreePMap.empty());
}
@Override
@@ -42,7 +44,7 @@ public record JKleppmannTreeNode(JObjectKey key, PCollection<JObjectKey> refsFro
}
@Override
public JKleppmannTreeNode withChildren(Map<String, JObjectKey> children) {
public JKleppmannTreeNode withChildren(PMap<String, JObjectKey> children) {
return new JKleppmannTreeNode(key, refsFrom, frozen, parent, lastEffectiveOp, meta, children);
}

View File

@@ -59,7 +59,7 @@ public class PersistentPeerDataService {
return;
} else {
try {
_selfUuid = presetUuid.map(s -> PeerId.of(UUID.fromString(s))).orElseGet(() -> PeerId.of(UUID.randomUUID()));
_selfUuid = presetUuid.map(PeerId::of).orElseGet(() -> PeerId.of(UUID.randomUUID().toString()));
Log.info("Generating a key pair, please wait");
_selfKeyPair = CertificateTools.generateKeyPair();
_selfCertificate = CertificateTools.generateCertificate(_selfKeyPair, _selfUuid.toString());

View File

@@ -72,7 +72,7 @@ public class LocalPeerDiscoveryClient {
var got = PeerDiscoveryInfo.parseFrom(ByteBuffer.wrap(buf, 0, packet.getLength()));
peerDiscoveryDirectory.notifyAddr(
new IpPeerAddress(
PeerId.of(UUID.fromString(got.getUuid())),
PeerId.of(got.getUuid()),
PeerAddressType.LAN,
packet.getAddress(),
got.getPort(),