Skip to content

Commit

Permalink
[#543] Fix issue with parallel downloads to a directory
Browse files Browse the repository at this point in the history
Add get/put test for large files
  • Loading branch information
qubixes authored and alanking committed Apr 30, 2024
1 parent 9206646 commit 3312d29
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 4 deletions.
2 changes: 1 addition & 1 deletion irods/manager/data_object_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def _download(self, obj, local_path, num_threads, **options):
with self.open(obj, 'r', returned_values = data_open_returned_values_, **options) as o:
if self.should_parallelize_transfer (num_threads, o, open_options = options.items()):
f.close()
if not self.parallel_get( (obj,o), local_path, num_threads = num_threads,
if not self.parallel_get( (obj,o), local_file, num_threads = num_threads,
target_resource_name = options.get(kw.RESC_NAME_KW,''),
data_open_returned_values = data_open_returned_values_):
raise RuntimeError("parallel get failed")
Expand Down
14 changes: 11 additions & 3 deletions irods/test/data_obj_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -1304,7 +1304,15 @@ def test_get_replica_size(self):
resource.remove()


def test_obj_put_get(self):
def test_obj_put_get_small(self):
# Test put/get with 16M binary file that will be transferred with a single thread.
self._check_obj_put_get(1024 * 1024 * 16)

def test_obj_put_get_large(self):
# Test put/get with binary file that is large enough to trigger parallel transfers.
self._check_obj_put_get(data_object_manager.MAXIMUM_SINGLE_THREADED_TRANSFER_SIZE + 1)

def _check_obj_put_get(self, file_size):
# Can't do one step open/create with older servers
if self.sess.server_version <= (4, 1, 4):
self.skipTest('For iRODS 4.1.5 and newer')
Expand All @@ -1315,9 +1323,9 @@ def test_obj_put_get(self):
test_file = os.path.join(test_dir, filename)
collection = self.coll.path

# make random 16M binary file
# make random binary file
with open(test_file, 'wb') as f:
f.write(os.urandom(1024 * 1024 * 16))
f.write(os.urandom(file_size))

# compute file checksum
digest = self.sha256_checksum(test_file)
Expand Down

0 comments on commit 3312d29

Please sign in to comment.