mirror of
https://github.com/usatiuk/dhfs.git
synced 2025-10-29 04:57:48 +01:00
add some checks
why does it break?
This commit is contained in:
@@ -339,6 +339,11 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<ByteString> read(String fileUuid, long offset, int length) {
|
public Optional<ByteString> read(String fileUuid, long offset, int length) {
|
||||||
|
if (length < 0)
|
||||||
|
throw new StatusRuntimeException(Status.INVALID_ARGUMENT.withDescription("Length should be more than zero: " + length));
|
||||||
|
if (offset < 0)
|
||||||
|
throw new StatusRuntimeException(Status.INVALID_ARGUMENT.withDescription("Offset should be more than zero: " + offset));
|
||||||
|
|
||||||
var fileOpt = jObjectManager.get(fileUuid);
|
var fileOpt = jObjectManager.get(fileUuid);
|
||||||
if (fileOpt.isEmpty()) {
|
if (fileOpt.isEmpty()) {
|
||||||
Log.error("File not found when trying to read: " + fileUuid);
|
Log.error("File not found when trying to read: " + fileUuid);
|
||||||
@@ -453,6 +458,9 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Long write(String fileUuid, long offset, byte[] data) {
|
public Long write(String fileUuid, long offset, byte[] data) {
|
||||||
|
if (offset < 0)
|
||||||
|
throw new StatusRuntimeException(Status.INVALID_ARGUMENT.withDescription("Offset should be more than zero: " + offset));
|
||||||
|
|
||||||
var fileOpt = jObjectManager.get(fileUuid);
|
var fileOpt = jObjectManager.get(fileUuid);
|
||||||
if (fileOpt.isEmpty()) {
|
if (fileOpt.isEmpty()) {
|
||||||
Log.error("File not found when trying to read: " + fileUuid);
|
Log.error("File not found when trying to read: " + fileUuid);
|
||||||
@@ -590,6 +598,9 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Boolean truncate(String fileUuid, long length) {
|
public Boolean truncate(String fileUuid, long length) {
|
||||||
|
if (length < 0)
|
||||||
|
throw new StatusRuntimeException(Status.INVALID_ARGUMENT.withDescription("Length should be more than zero: " + length));
|
||||||
|
|
||||||
var fileOpt = jObjectManager.get(fileUuid);
|
var fileOpt = jObjectManager.get(fileUuid);
|
||||||
if (fileOpt.isEmpty()) {
|
if (fileOpt.isEmpty()) {
|
||||||
Log.error("File not found when trying to read: " + fileUuid);
|
Log.error("File not found when trying to read: " + fileUuid);
|
||||||
@@ -628,17 +639,17 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
var removedChunks = new LinkedHashSet<String>();
|
var removedChunks = new LinkedHashSet<String>();
|
||||||
|
|
||||||
if (curSize < length) {
|
if (curSize < length) {
|
||||||
int combinedSize = (int) (length - curSize);
|
long combinedSize = (length - curSize);
|
||||||
|
|
||||||
long start = curSize;
|
long start = curSize;
|
||||||
|
|
||||||
// Hack
|
// Hack
|
||||||
HashMap<Integer, ByteString> zeroCache = new HashMap<>();
|
HashMap<Long, ByteString> zeroCache = new HashMap<>();
|
||||||
|
|
||||||
{
|
{
|
||||||
int cur = 0;
|
long cur = 0;
|
||||||
while (cur < combinedSize) {
|
while (cur < combinedSize) {
|
||||||
int end;
|
long end;
|
||||||
|
|
||||||
if (targetChunkSize <= 0)
|
if (targetChunkSize <= 0)
|
||||||
end = combinedSize;
|
end = combinedSize;
|
||||||
@@ -651,7 +662,7 @@ public class DhfsFileServiceImpl implements DhfsFileService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!zeroCache.containsKey(end - cur))
|
if (!zeroCache.containsKey(end - cur))
|
||||||
zeroCache.put(end - cur, UnsafeByteOperations.unsafeWrap(new byte[end - cur]));
|
zeroCache.put(end - cur, UnsafeByteOperations.unsafeWrap(new byte[Math.toIntExact(end - cur)]));
|
||||||
|
|
||||||
ChunkData newChunkData = createChunk(zeroCache.get(end - cur));
|
ChunkData newChunkData = createChunk(zeroCache.get(end - cur));
|
||||||
ChunkInfo newChunkInfo = new ChunkInfo(newChunkData.getHash(), newChunkData.getBytes().size());
|
ChunkInfo newChunkInfo = new ChunkInfo(newChunkData.getHash(), newChunkData.getBytes().size());
|
||||||
|
|||||||
@@ -147,6 +147,8 @@ public class DhfsFuse extends FuseStubFS {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int read(String path, Pointer buf, long size, long offset, FuseFileInfo fi) {
|
public int read(String path, Pointer buf, long size, long offset, FuseFileInfo fi) {
|
||||||
|
if (size < 0) return -ErrorCodes.EINVAL();
|
||||||
|
if (offset < 0) return -ErrorCodes.EINVAL();
|
||||||
try {
|
try {
|
||||||
var fileOpt = fileService.open(path);
|
var fileOpt = fileService.open(path);
|
||||||
if (fileOpt.isEmpty()) return -ErrorCodes.ENOENT();
|
if (fileOpt.isEmpty()) return -ErrorCodes.ENOENT();
|
||||||
@@ -163,6 +165,7 @@ public class DhfsFuse extends FuseStubFS {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int write(String path, Pointer buf, long size, long offset, FuseFileInfo fi) {
|
public int write(String path, Pointer buf, long size, long offset, FuseFileInfo fi) {
|
||||||
|
if (offset < 0) return -ErrorCodes.EINVAL();
|
||||||
try {
|
try {
|
||||||
var fileOpt = fileService.open(path);
|
var fileOpt = fileService.open(path);
|
||||||
if (fileOpt.isEmpty()) return -ErrorCodes.ENOENT();
|
if (fileOpt.isEmpty()) return -ErrorCodes.ENOENT();
|
||||||
@@ -239,6 +242,7 @@ public class DhfsFuse extends FuseStubFS {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int truncate(String path, long size) {
|
public int truncate(String path, long size) {
|
||||||
|
if (size < 0) return -ErrorCodes.EINVAL();
|
||||||
try {
|
try {
|
||||||
var fileOpt = fileService.open(path);
|
var fileOpt = fileService.open(path);
|
||||||
if (fileOpt.isEmpty()) return -ErrorCodes.ENOENT();
|
if (fileOpt.isEmpty()) return -ErrorCodes.ENOENT();
|
||||||
|
|||||||
@@ -173,7 +173,7 @@ public class JObjectWriteback {
|
|||||||
if (_objects.size() < limit) {
|
if (_objects.size() < limit) {
|
||||||
if (overload) {
|
if (overload) {
|
||||||
overload = false;
|
overload = false;
|
||||||
Log.info("Writeback cache enabled");
|
Log.trace("Writeback cache enabled");
|
||||||
}
|
}
|
||||||
_objects.put(name, Pair.of(System.currentTimeMillis(), object));
|
_objects.put(name, Pair.of(System.currentTimeMillis(), object));
|
||||||
_objects.notifyAll();
|
_objects.notifyAll();
|
||||||
@@ -184,7 +184,7 @@ public class JObjectWriteback {
|
|||||||
try {
|
try {
|
||||||
if (!overload) {
|
if (!overload) {
|
||||||
overload = true;
|
overload = true;
|
||||||
Log.info("Writeback cache disabled");
|
Log.trace("Writeback cache disabled");
|
||||||
}
|
}
|
||||||
flushOneImmediate(object.getMeta(), object.getData());
|
flushOneImmediate(object.getMeta(), object.getData());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
|||||||
Reference in New Issue
Block a user