diff options
| author | Dan Engelbrecht <[email protected]> | 2024-04-26 10:12:03 +0200 |
|---|---|---|
| committer | GitHub Enterprise <[email protected]> | 2024-04-26 10:12:03 +0200 |
| commit | ef1b4234c851131cf5a6d249684c14c5c27d2878 (patch) | |
| tree | afd972c077b2585c2dc336b79eb1d31d02372295 /src/zenstore/compactcas.cpp | |
| parent | fix oplog import during gcv2 (#62) (diff) | |
| download | zen-ef1b4234c851131cf5a6d249684c14c5c27d2878.tar.xz zen-ef1b4234c851131cf5a6d249684c14c5c27d2878.zip | |
use direct file access for large file hash (#63)
- Improvement: Refactor `IoHash::HashBuffer` and `BLAKE3::HashBuffer` to not use memory mapped files. Performs better and saves ~10% of oplog export time on CI
Diffstat (limited to 'src/zenstore/compactcas.cpp')
| -rw-r--r-- | src/zenstore/compactcas.cpp | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/src/zenstore/compactcas.cpp b/src/zenstore/compactcas.cpp index 7b11200a5..d6e5efdaa 100644 --- a/src/zenstore/compactcas.cpp +++ b/src/zenstore/compactcas.cpp @@ -1310,7 +1310,7 @@ TEST_CASE("compactcas.compact.gc") CbObject Obj = Cbo.Save(); IoBuffer ObjBuffer = Obj.GetBuffer().AsIoBuffer(); - const IoHash Hash = HashBuffer(ObjBuffer); + const IoHash Hash = IoHash::HashBuffer(ObjBuffer); Cas.InsertChunk(ObjBuffer, Hash); @@ -1370,7 +1370,7 @@ TEST_CASE("compactcas.compact.totalsize") for (int32_t Idx = 0; Idx < kChunkCount; ++Idx) { IoBuffer Chunk = CreateRandomBlob(kChunkSize); - const IoHash Hash = HashBuffer(Chunk); + const IoHash Hash = IoHash::HashBuffer(Chunk); CasStore::InsertResult InsertResult = Cas.InsertChunk(Chunk, Hash); ZEN_ASSERT(InsertResult.New); } @@ -1823,7 +1823,7 @@ TEST_CASE_TEMPLATE("compactcas.threadedinsert", GCV2, FalseType, TrueType) while (true) { IoBuffer Chunk = CreateRandomBlob(kChunkSize); - IoHash Hash = HashBuffer(Chunk); + IoHash Hash = IoHash::HashBuffer(Chunk); if (Chunks.contains(Hash)) { continue; @@ -1892,7 +1892,7 @@ TEST_CASE_TEMPLATE("compactcas.threadedinsert", GCV2, FalseType, TrueType) for (int32_t Idx = 0; Idx < kChunkCount; ++Idx) { IoBuffer Chunk = CreateRandomBlob(kChunkSize); - IoHash Hash = HashBuffer(Chunk); + IoHash Hash = IoHash::HashBuffer(Chunk); NewChunks[Hash] = Chunk; GcChunkHashes.insert(Hash); } |