truncate fix

how was it working???
This commit is contained in:
2024-06-29 18:48:14 +02:00
parent 044dbdb441
commit 4c76b889c3
9 changed files with 312 additions and 210 deletions

View File

@@ -417,12 +417,15 @@ public class DhfsFileServiceImpl implements DhfsFileService {
});
}
private void cleanupChunks(String fileUuid, Collection<String> uuids) {
private void cleanupChunks(File f, Collection<String> uuids) {
// FIXME:
var inFile = new HashSet<>(f.getChunks().values());
for (var cuuid : uuids) {
if (inFile.contains(cuuid)) continue;
var ci = jObjectManager.get(ChunkInfo.getNameFromHash(cuuid));
if (ci.isPresent()) {
ci.get().runWriteLocked(JObject.ResolutionStrategy.NO_RESOLUTION, (m, d, b, v) -> {
m.removeRef(fileUuid);
m.removeRef(f.getName());
return null;
});
jObjectManager.tryQuickDelete(ci.get());
@@ -440,14 +443,13 @@ public class DhfsFileServiceImpl implements DhfsFileService {
var file = fileOpt.get();
// FIXME:
var removedChunksOuter = file.runWriteLocked(JObject.ResolutionStrategy.REMOTE, (meta, fDataU, bump, i) -> {
if (!(fDataU instanceof File))
file.runWriteLocked(JObject.ResolutionStrategy.REMOTE, (meta, fDataU, bump, i) -> {
if (!(fDataU instanceof File fData))
throw new StatusRuntimeException(Status.INVALID_ARGUMENT);
var fData = (File) fDataU;
var chunksAll = fData.getChunks();
var first = chunksAll.floorEntry(offset);
var last = chunksAll.floorEntry((offset + data.length) - 1);
var last = chunksAll.lowerEntry(offset + data.length);
TreeSet<String> removedChunks = new TreeSet<>();
@@ -482,45 +484,47 @@ public class DhfsFileServiceImpl implements DhfsFileService {
int combinedSize = pendingWrites.size();
if (Math.abs(combinedSize - targetChunkSize) > targetChunkSize * 0.1) {
if (combinedSize < targetChunkSize) {
boolean leftDone = false;
boolean rightDone = false;
while (!leftDone && !rightDone) {
if (beforeFirst.isEmpty()) leftDone = true;
if (!beforeFirst.isEmpty() && !leftDone) {
var takeLeft = beforeFirst.lastEntry();
if (targetChunkSize > 0) {
if (Math.abs(combinedSize - targetChunkSize) > targetChunkSize * 0.1) {
if (combinedSize < targetChunkSize) {
boolean leftDone = false;
boolean rightDone = false;
while (!leftDone && !rightDone) {
if (beforeFirst.isEmpty()) leftDone = true;
if (!beforeFirst.isEmpty() && !leftDone) {
var takeLeft = beforeFirst.lastEntry();
var cuuid = takeLeft.getValue();
var cuuid = takeLeft.getValue();
if ((combinedSize + getChunkSize(cuuid)) > (targetChunkSize * 1.2)) {
leftDone = true;
continue;
if ((combinedSize + getChunkSize(cuuid)) > (targetChunkSize * 1.2)) {
leftDone = true;
continue;
}
beforeFirst.pollLastEntry();
start = takeLeft.getKey();
pendingWrites = readChunk(cuuid).concat(pendingWrites);
combinedSize += getChunkSize(cuuid);
chunksAll.remove(takeLeft.getKey());
removedChunks.add(cuuid);
}
if (afterLast.isEmpty()) rightDone = true;
if (!afterLast.isEmpty() && !rightDone) {
var takeRight = afterLast.firstEntry();
beforeFirst.pollLastEntry();
start = takeLeft.getKey();
pendingWrites = pendingWrites.concat(readChunk(cuuid));
combinedSize += getChunkSize(cuuid);
chunksAll.remove(takeLeft.getKey());
removedChunks.add(cuuid);
}
if (afterLast.isEmpty()) rightDone = true;
if (!afterLast.isEmpty() && !rightDone) {
var takeRight = afterLast.firstEntry();
var cuuid = takeRight.getValue();
var cuuid = takeRight.getValue();
if ((combinedSize + getChunkSize(cuuid)) > (targetChunkSize * 1.2)) {
rightDone = true;
continue;
}
if ((combinedSize + getChunkSize(cuuid)) > (targetChunkSize * 1.2)) {
rightDone = true;
continue;
afterLast.pollFirstEntry();
pendingWrites = pendingWrites.concat(readChunk(cuuid));
combinedSize += getChunkSize(cuuid);
chunksAll.remove(takeRight.getKey());
removedChunks.add(cuuid);
}
afterLast.pollFirstEntry();
pendingWrites = readChunk(cuuid).concat(pendingWrites);
combinedSize += getChunkSize(cuuid);
chunksAll.remove(takeRight.getKey());
removedChunks.add(cuuid);
}
}
}
@@ -530,10 +534,15 @@ public class DhfsFileServiceImpl implements DhfsFileService {
int cur = 0;
while (cur < combinedSize) {
int end;
if ((combinedSize - cur) > (targetChunkSize * 1.5)) {
end = cur + targetChunkSize;
} else {
if (targetChunkSize <= 0)
end = combinedSize;
else {
if ((combinedSize - cur) > (targetChunkSize * 1.5)) {
end = cur + targetChunkSize;
} else {
end = combinedSize;
}
}
var thisChunk = pendingWrites.substring(cur, end);
@@ -553,10 +562,10 @@ public class DhfsFileServiceImpl implements DhfsFileService {
bump.apply();
fData.setMtime(System.currentTimeMillis());
return removedChunks;
cleanupChunks(fData, removedChunks);
return null;
});
cleanupChunks(fileUuid, removedChunksOuter);
return (long) data.length;
}
@@ -570,24 +579,21 @@ public class DhfsFileServiceImpl implements DhfsFileService {
}
var file = fileOpt.get();
TreeSet<String> removedChunks = new TreeSet<>();
if (length == 0) {
try {
file.runWriteLocked(JObject.ResolutionStrategy.REMOTE, (m, fileData, bump, i) -> {
if (!(fileData instanceof File f))
throw new StatusRuntimeException(Status.INVALID_ARGUMENT);
bump.apply();
removedChunks.addAll(f.getChunks().values());
f.getChunks().clear();
f.setMtime(System.currentTimeMillis());
cleanupChunks(f, new LinkedHashSet<>(f.getChunks().values()));
return null;
});
} catch (Exception e) {
Log.error("Error writing file chunks: " + fileUuid, e);
return false;
}
cleanupChunks(fileUuid, removedChunks);
return true;
}
@@ -595,36 +601,77 @@ public class DhfsFileServiceImpl implements DhfsFileService {
file.runWriteLocked(JObject.ResolutionStrategy.REMOTE, (m, fDataU, bump, i) -> {
if (!(fDataU instanceof File fData))
throw new StatusRuntimeException(Status.INVALID_ARGUMENT);
var curSize = size(fileUuid);
if (curSize == length) return null;
var chunksAll = fData.getChunks();
var lastChunk = chunksAll.lastEntry();
//FIXME!
var removedChunks = new LinkedHashSet<String>();
if (lastChunk != null) {
var size = getChunkSize(lastChunk.getValue());
var chunkEnd = size + lastChunk.getKey();
if (curSize < length) {
int combinedSize = (int) (length - curSize);
if (chunkEnd == length) return null;
long start = curSize;
if (chunkEnd > length) {
var chunkData = readChunk(lastChunk.getValue());
// Hack
HashMap<Integer, ByteString> zeroCache = new HashMap<>();
ChunkData newChunkData = new ChunkData(chunkData.substring(0, (int) (length - lastChunk.getKey())));
ChunkInfo newChunkInfo = new ChunkInfo(newChunkData.getHash(), newChunkData.getBytes().size());
jObjectManager.put(newChunkData, Optional.of(newChunkInfo.getName()));
jObjectManager.put(newChunkInfo, Optional.of(m.getName()));
jObjectManager.put(newChunkData, Optional.of(newChunkInfo.getName()));
{
int cur = 0;
while (cur < combinedSize) {
int end;
removedChunks.add(lastChunk.getValue());
if (targetChunkSize <= 0)
end = combinedSize;
else {
if ((combinedSize - cur) > (targetChunkSize * 1.5)) {
end = cur + targetChunkSize;
} else {
end = combinedSize;
}
}
chunksAll.put(lastChunk.getKey(), newChunkData.getHash());
} else {
write(fileUuid, chunkEnd, new byte[(int) (length - chunkEnd)]);
if (!zeroCache.containsKey(end - cur))
zeroCache.put(end - cur, UnsafeByteOperations.unsafeWrap(new byte[end - cur]));
ChunkData newChunkData = new ChunkData(zeroCache.get(end - cur));
ChunkInfo newChunkInfo = new ChunkInfo(newChunkData.getHash(), newChunkData.getBytes().size());
//FIXME:
jObjectManager.put(newChunkData, Optional.of(newChunkInfo.getName()));
jObjectManager.put(newChunkInfo, Optional.of(m.getName()));
jObjectManager.put(newChunkData, Optional.of(newChunkInfo.getName()));
chunksAll.put(start, newChunkInfo.getHash());
start += newChunkInfo.getSize();
cur = end;
}
}
} else {
var tail = chunksAll.lowerEntry(length);
var afterTail = chunksAll.tailMap(tail.getKey(), false);
removedChunks.addAll(afterTail.values());
afterTail.clear();
var tailBytes = readChunk(tail.getValue());
var newChunk = tailBytes.substring(0, (int) (length - tail.getKey()));
chunksAll.remove(tail.getKey());
removedChunks.add(tail.getValue());
ChunkData newChunkData = new ChunkData(newChunk);
ChunkInfo newChunkInfo = new ChunkInfo(newChunkData.getHash(), newChunkData.getBytes().size());
//FIXME:
jObjectManager.put(newChunkData, Optional.of(newChunkInfo.getName()));
jObjectManager.put(newChunkInfo, Optional.of(m.getName()));
jObjectManager.put(newChunkData, Optional.of(newChunkInfo.getName()));
chunksAll.put(tail.getKey(), newChunkInfo.getHash());
}
bump.apply();
fData.setMtime(System.currentTimeMillis());
cleanupChunks(fData, removedChunks);
return null;
});
@@ -632,7 +679,6 @@ public class DhfsFileServiceImpl implements DhfsFileService {
Log.error("Error reading file: " + fileUuid, e);
return false;
}
cleanupChunks(fileUuid, removedChunks);
return true;
}

View File

@@ -36,6 +36,9 @@ public class DhfsFuse extends FuseStubFS {
@ConfigProperty(name = "dhfs.fuse.root")
String root;
@ConfigProperty(name = "dhfs.fuse.enabled")
boolean enabled;
@ConfigProperty(name = "dhfs.fuse.debug")
Boolean debug;
@@ -43,6 +46,7 @@ public class DhfsFuse extends FuseStubFS {
DhfsFileService fileService;
void init(@Observes @Priority(100000) StartupEvent event) {
if (!enabled) return;
Paths.get(root).toFile().mkdirs();
Log.info("Mounting with root " + root);
@@ -54,6 +58,7 @@ public class DhfsFuse extends FuseStubFS {
}
void shutdown(@Observes @Priority(1) ShutdownEvent event) {
if (!enabled) return;
Log.info("Unmounting");
umount();
Log.info("Unmounted");

View File

@@ -33,9 +33,9 @@ public class ObjectMetadata implements Serializable {
@Getter
private boolean _locked = false;
private AtomicBoolean _seen = new AtomicBoolean(false);
private final AtomicBoolean _seen = new AtomicBoolean(false);
private AtomicBoolean _deleted = new AtomicBoolean(false);
private final AtomicBoolean _deleted = new AtomicBoolean(false);
public boolean isSeen() {
return _seen.get();

View File

@@ -140,7 +140,7 @@ public class SyncHandler {
Log.info("Outdated update of " + u.getName() + " from " + request.getSelfUuid());
} catch (Exception ex) {
Log.info("Error when handling update from " + request.getSelfUuid() + " of " + u.getName(), ex);
builder.addErrors(IndexUpdateError.newBuilder().setObjectName(u.getName()).setError(ex.toString() + Arrays.toString(ex.getStackTrace())).build());
builder.addErrors(IndexUpdateError.newBuilder().setObjectName(u.getName()).setError(ex + Arrays.toString(ex.getStackTrace())).build());
}
}
return builder.build();

View File

@@ -11,6 +11,7 @@ dhfs.objects.distributed.invalidation.delay=100
dhfs.objects.distributed.reconnect_interval=1s
dhfs.fuse.root=${HOME}/dhfs_data/dhfs_fuse_root
dhfs.fuse.debug=false
dhfs.fuse.enabled=true
dhfs.storage.files.target_chunk_size=1048576
dhfs.objects.writeback.delay=500
dhfs.objects.writeback.limit=10000

View File

@@ -1,150 +1,9 @@
package com.usatiuk.dhfs.files;
import com.google.protobuf.ByteString;
import com.usatiuk.dhfs.storage.files.objects.ChunkData;
import com.usatiuk.dhfs.storage.files.objects.ChunkInfo;
import com.usatiuk.dhfs.storage.files.objects.File;
import com.usatiuk.dhfs.storage.files.service.DhfsFileService;
import com.usatiuk.dhfs.storage.objects.jrepository.JObjectManager;
import io.quarkus.test.junit.QuarkusTest;
import io.quarkus.test.junit.QuarkusTestProfile;
import io.quarkus.test.junit.TestProfile;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import java.util.UUID;
class Profiles {
public static class DhfsFileServiceSimpleTestProfile implements QuarkusTestProfile {
}
}
@QuarkusTest
@TestProfile(Profiles.DhfsFileServiceSimpleTestProfile.class)
public class DhfsFileServiceSimpleTest {
@Inject
DhfsFileService fileService;
@Inject
JObjectManager jObjectManager;
@Test
void readTest() {
var fuuid = UUID.randomUUID();
{
ChunkData c1 = new ChunkData(ByteString.copyFrom("12345".getBytes()));
ChunkInfo c1i = new ChunkInfo(c1.getHash(), c1.getBytes().size());
ChunkData c2 = new ChunkData(ByteString.copyFrom("678".getBytes()));
ChunkInfo c2i = new ChunkInfo(c2.getHash(), c2.getBytes().size());
ChunkData c3 = new ChunkData(ByteString.copyFrom("91011".getBytes()));
ChunkInfo c3i = new ChunkInfo(c3.getHash(), c3.getBytes().size());
File f = new File(fuuid, 777, null);
f.getChunks().put(0L, c1.getHash());
f.getChunks().put((long) c1.getBytes().size(), c2.getHash());
f.getChunks().put((long) c1.getBytes().size() + c2.getBytes().size(), c3.getHash());
// FIXME: dhfs_files
jObjectManager.put(c1, Optional.of(c1i.getName()));
jObjectManager.put(c2, Optional.of(c2i.getName()));
jObjectManager.put(c3, Optional.of(c3i.getName()));
jObjectManager.put(c1i, Optional.of(f.getName()));
jObjectManager.put(c2i, Optional.of(f.getName()));
jObjectManager.put(c3i, Optional.of(f.getName()));
jObjectManager.put(f, Optional.empty());
}
String all = "1234567891011";
{
for (int start = 0; start < all.length(); start++) {
for (int end = start; end <= all.length(); end++) {
var read = fileService.read(fuuid.toString(), start, end - start);
Assertions.assertArrayEquals(all.substring(start, end).getBytes(), read.get().toByteArray());
}
}
}
}
@Test
void writeTest() {
var ret = fileService.create("/writeTest", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.write(uuid, 4, new byte[]{10, 11, 12});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 10, 11, 12, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.write(uuid, 10, new byte[]{13, 14});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 10, 11, 12, 7, 8, 9, 13, 14}, fileService.read(uuid, 0, 12).get().toByteArray());
fileService.write(uuid, 6, new byte[]{15, 16});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 10, 11, 15, 16, 8, 9, 13, 14}, fileService.read(uuid, 0, 12).get().toByteArray());
fileService.write(uuid, 3, new byte[]{17, 18});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 17, 18, 11, 15, 16, 8, 9, 13, 14}, fileService.read(uuid, 0, 12).get().toByteArray());
}
@Test
void truncateTest1() {
var ret = fileService.create("/truncateTest1", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.truncate(uuid, 20);
fileService.write(uuid, 5, new byte[]{10, 11, 12, 13, 14, 15, 16, 17});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 0, 0, 0, 0, 0, 0, 0}, fileService.read(uuid, 0, 20).get().toByteArray());
}
@Test
void truncateTest2() {
var ret = fileService.create("/truncateTest2", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.truncate(uuid, 20);
fileService.write(uuid, 10, new byte[]{11, 12, 13, 14, 15, 16, 17, 18, 19, 20});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, fileService.read(uuid, 0, 20).get().toByteArray());
}
@Test
void truncateTest3() {
var ret = fileService.create("/truncateTest3", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.truncate(uuid, 7);
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6,}, fileService.read(uuid, 0, 20).get().toByteArray());
}
@Test
void moveTest() {
var ret = fileService.create("/moveTest", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
Assertions.assertTrue(fileService.rename("/moveTest", "/movedTest"));
Assertions.assertFalse(fileService.open("/moveTest").isPresent());
Assertions.assertTrue(fileService.open("/movedTest").isPresent());
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
fileService.read(fileService.open("/movedTest").get(), 0, 10).get().toByteArray());
}
public class DhfsFileServiceSimpleTest extends DhfsFileServiceSimpleTestImpl {
}

View File

@@ -0,0 +1,173 @@
package com.usatiuk.dhfs.files;
import com.google.protobuf.ByteString;
import com.usatiuk.dhfs.storage.files.objects.ChunkData;
import com.usatiuk.dhfs.storage.files.objects.ChunkInfo;
import com.usatiuk.dhfs.storage.files.objects.File;
import com.usatiuk.dhfs.storage.files.service.DhfsFileService;
import com.usatiuk.dhfs.storage.objects.jrepository.JObjectManager;
import io.quarkus.test.junit.QuarkusTestProfile;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
class Profiles {
public static class DhfsFileServiceSimpleTestProfile implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
var ret = new HashMap<String, String>();
ret.put("dhfs.fuse.enabled", "false");
return ret;
}
}
public static class DhfsFileServiceSimpleTestProfileNoChunking implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
var ret = new HashMap<String, String>();
ret.put("dhfs.fuse.enabled", "false");
ret.put("dhfs.storage.files.target_chunk_size", "-1");
return ret;
}
}
public static class DhfsFileServiceSimpleTestProfileSmallChunking implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
var ret = new HashMap<String, String>();
ret.put("dhfs.fuse.enabled", "false");
ret.put("dhfs.storage.files.target_chunk_size", "3");
return ret;
}
}
}
public class DhfsFileServiceSimpleTestImpl {
@Inject
DhfsFileService fileService;
@Inject
JObjectManager jObjectManager;
@Test
void readTest() {
var fuuid = UUID.randomUUID();
{
ChunkData c1 = new ChunkData(ByteString.copyFrom("12345".getBytes()));
ChunkInfo c1i = new ChunkInfo(c1.getHash(), c1.getBytes().size());
ChunkData c2 = new ChunkData(ByteString.copyFrom("678".getBytes()));
ChunkInfo c2i = new ChunkInfo(c2.getHash(), c2.getBytes().size());
ChunkData c3 = new ChunkData(ByteString.copyFrom("91011".getBytes()));
ChunkInfo c3i = new ChunkInfo(c3.getHash(), c3.getBytes().size());
File f = new File(fuuid, 777, null);
f.getChunks().put(0L, c1.getHash());
f.getChunks().put((long) c1.getBytes().size(), c2.getHash());
f.getChunks().put((long) c1.getBytes().size() + c2.getBytes().size(), c3.getHash());
// FIXME: dhfs_files
jObjectManager.put(c1, Optional.of(c1i.getName()));
jObjectManager.put(c2, Optional.of(c2i.getName()));
jObjectManager.put(c3, Optional.of(c3i.getName()));
jObjectManager.put(c1i, Optional.of(f.getName()));
jObjectManager.put(c2i, Optional.of(f.getName()));
jObjectManager.put(c3i, Optional.of(f.getName()));
jObjectManager.put(f, Optional.empty());
}
String all = "1234567891011";
{
for (int start = 0; start < all.length(); start++) {
for (int end = start; end <= all.length(); end++) {
var read = fileService.read(fuuid.toString(), start, end - start);
Assertions.assertArrayEquals(all.substring(start, end).getBytes(), read.get().toByteArray());
}
}
}
}
@Test
void writeTest() {
var ret = fileService.create("/writeTest", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.write(uuid, 4, new byte[]{10, 11, 12});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 10, 11, 12, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.write(uuid, 10, new byte[]{13, 14});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 10, 11, 12, 7, 8, 9, 13, 14}, fileService.read(uuid, 0, 12).get().toByteArray());
fileService.write(uuid, 6, new byte[]{15, 16});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 10, 11, 15, 16, 8, 9, 13, 14}, fileService.read(uuid, 0, 12).get().toByteArray());
fileService.write(uuid, 3, new byte[]{17, 18});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 17, 18, 11, 15, 16, 8, 9, 13, 14}, fileService.read(uuid, 0, 12).get().toByteArray());
}
@Test
void truncateTest1() {
var ret = fileService.create("/truncateTest1", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.truncate(uuid, 20);
fileService.write(uuid, 5, new byte[]{10, 11, 12, 13, 14, 15, 16, 17});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16, 17, 0, 0, 0, 0, 0, 0, 0}, fileService.read(uuid, 0, 20).get().toByteArray());
}
@Test
void truncateTest2() {
var ret = fileService.create("/truncateTest2", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.truncate(uuid, 20);
fileService.write(uuid, 10, new byte[]{11, 12, 13, 14, 15, 16, 17, 18, 19, 20});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, fileService.read(uuid, 0, 20).get().toByteArray());
}
@Test
void truncateTest3() {
var ret = fileService.create("/truncateTest3", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
fileService.truncate(uuid, 7);
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6,}, fileService.read(uuid, 0, 20).get().toByteArray());
}
@Test
void moveTest() {
var ret = fileService.create("/moveTest", 777);
Assertions.assertTrue(ret.isPresent());
var uuid = ret.get();
fileService.write(uuid, 0, new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, fileService.read(uuid, 0, 10).get().toByteArray());
Assertions.assertTrue(fileService.rename("/moveTest", "/movedTest"));
Assertions.assertFalse(fileService.open("/moveTest").isPresent());
Assertions.assertTrue(fileService.open("/movedTest").isPresent());
Assertions.assertArrayEquals(new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
fileService.read(fileService.open("/movedTest").get(), 0, 10).get().toByteArray());
}
}

View File

@@ -0,0 +1,9 @@
package com.usatiuk.dhfs.files;
import io.quarkus.test.junit.QuarkusTest;
import io.quarkus.test.junit.TestProfile;
@QuarkusTest
@TestProfile(Profiles.DhfsFileServiceSimpleTestProfileNoChunking.class)
public class DhfsFileServiceSimpleTestNoChunkingTest extends DhfsFileServiceSimpleTestImpl {
}

View File

@@ -0,0 +1,9 @@
package com.usatiuk.dhfs.files;
import io.quarkus.test.junit.QuarkusTest;
import io.quarkus.test.junit.TestProfile;
@QuarkusTest
@TestProfile(Profiles.DhfsFileServiceSimpleTestProfileSmallChunking.class)
public class DhfsFileServiceSimpleTestSmallChunkingTest extends DhfsFileServiceSimpleTestImpl {
}