Skip to content

Commit

Permalink
tests - timeout fix
Browse files Browse the repository at this point in the history
Signed-off-by: Romy <[email protected]>
  • Loading branch information
romayalon committed Dec 8, 2024
1 parent 5ed32a4 commit 77ac1a6
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 8 deletions.
19 changes: 15 additions & 4 deletions src/test/unit_tests/test_file_writer.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,22 @@
'use strict';

const mocha = require('mocha');
const config = require('../../../config');
const file_writer_hashing = require('../../tools/file_writer_hashing');
const orig_iov_max = config.NSFS_DEFAULT_IOV_MAX;

// on iov_max small tests we need to use smaller amount of parts and chunks to ensure that the test will finish
// in a reasonable period of time because we will flush max 1/2 buffers at a time.
const small_iov_num_parts = 20;


mocha.describe('FileWriter', function() {
const RUN_TIMEOUT = 10 * 60 * 1000;

mocha.afterEach(function() {
config.NSFS_DEFAULT_IOV_MAX = orig_iov_max;
});

mocha.it('Concurrent FileWriter with hash target', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
Expand All @@ -23,25 +34,25 @@ mocha.describe('FileWriter', function() {
mocha.it('Concurrent FileWriter with hash target - iov_max=1', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
await file_writer_hashing.hash_target(1);
await file_writer_hashing.hash_target(undefined, small_iov_num_parts, 1);
});

mocha.it('Concurrent FileWriter with file target - iov_max=1', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
await file_writer_hashing.file_target(undefined, undefined, 1);
await file_writer_hashing.file_target(undefined, small_iov_num_parts, 1);
});

mocha.it('Concurrent FileWriter with hash target - iov_max=2', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
await file_writer_hashing.hash_target(2);
await file_writer_hashing.hash_target(undefined, small_iov_num_parts, 2);
});

mocha.it('Concurrent FileWriter with file target - iov_max=2', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
await file_writer_hashing.file_target(undefined, undefined, 2);
await file_writer_hashing.file_target(undefined, small_iov_num_parts, 2);
});

mocha.it('Concurrent FileWriter with file target - produce num_chunks > 1024 && total_chunks_size < config.NSFS_BUF_SIZE_L', async function() {
Expand Down
8 changes: 4 additions & 4 deletions src/tools/file_writer_hashing.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,15 +59,15 @@ function assign_md5_to_fs_xattr(md5_digest, fs_xattr) {
return fs_xattr;
}

async function hash_target(iov_max = IOV_MAX) {
async function hash_target(chunk_size = CHUNK, parts = PARTS, iov_max = IOV_MAX) {
config.NSFS_DEFAULT_IOV_MAX = iov_max;
await P.map_with_concurrency(CONCURRENCY, Array(PARTS).fill(), async () => {
await P.map_with_concurrency(CONCURRENCY, Array(parts).fill(), async () => {
const data = crypto.randomBytes(PART_SIZE);
const content_md5 = crypto.createHash('md5').update(data).digest('hex');
// Using async generator function in order to push data in small chunks
const source_stream = stream.Readable.from(async function*() {
for (let i = 0; i < data.length; i += CHUNK) {
yield data.slice(i, i + CHUNK);
for (let i = 0; i < data.length; i += chunk_size) {
yield data.slice(i, i + chunk_size);
}
}());
const target = new TargetHash();
Expand Down

0 comments on commit 77ac1a6

Please sign in to comment.