diff --git a/src/ark/utils/deepcell_service_utils.py b/src/ark/utils/deepcell_service_utils.py index 9d81d092a..d5bd8b2b8 100644 --- a/src/ark/utils/deepcell_service_utils.py +++ b/src/ark/utils/deepcell_service_utils.py @@ -1,7 +1,6 @@ import os import time import warnings -from concurrent.futures import ThreadPoolExecutor from io import BytesIO from json import JSONDecodeError from pathlib import Path @@ -11,17 +10,92 @@ import numpy as np import requests from alpineer import image_utils, io_utils, load_utils, misc_utils -from requests.adapters import HTTPAdapter -from requests.exceptions import RetryError from tifffile import imread from tqdm.notebook import tqdm -from urllib3 import Retry + + +def zip_input_files(deepcell_input_dir, fov_group, batch_num): + """Helper function which handles zipping the batch fov images into a single zip file. + + Args: + deepcell_input_dir (str): path to where deepcell input image files are stored + fov_group (list): list of fovs to process in this batch + batch_num (int): the batch number + Returns: + str: path to deepcell input zip file + """ + + # write all files to the zip file + zip_path = os.path.join(deepcell_input_dir, f'fovs_batch_{batch_num}.zip') + + # create zip files, skip any existing + if not os.path.exists(zip_path): + with ZipFile(zip_path, 'w', compression=ZIP_DEFLATED) as zipObj: + for fov in fov_group: + # file has .tiff extension + basename = fov + '.tiff' + filename = os.path.join(deepcell_input_dir, basename) + zipObj.write(filename, basename) + + return zip_path + + +def extract_deepcell_response(deepcell_output_dir, fov_group, batch_num, wc_suffix, nuc_suffix): + """Helper function to extract the segmentation masks from the deepcell output zip file. + + Args: + deepcell_output_dir (str): + path to where deepcell output zips are stored + fov_group (list): + list of fovs to process in this batch + batch_num (int): + the batch number + wc_suffix (str): + Suffix for whole cell DeepCell output filename. e.g. for fovX, DeepCell output + should be `+suffix.tif`. + Whole cell DeepCell files by default get suffixed with `'feature_0'`, + it will be renamed to this arg. + nuc_suffix (str): + Suffix for nuclear DeepCell output filename. e.g. for fovX, DeepCell output + should be `+suffix.tif`. + Nuclear DeepCell files by default get suffixed with `'feature_1'`, + it will be renamed to this arg. + """ + + # extract the .tif output + batch_zip = os.path.join( + deepcell_output_dir, f"deepcell_response_fovs_batch_{batch_num}.zip") + + with ZipFile(batch_zip, "r") as zipObj: + for name in zipObj.namelist(): + # this files will only ever be suffixed with feature_0.tiff or feature_1.tiff + if '_feature_0.tif' in name: + resuffixed_name = name.replace('_feature_0', wc_suffix) + else: + resuffixed_name = name.replace('_feature_1', nuc_suffix) + + mask_path = os.path.join(deepcell_output_dir, resuffixed_name) + + # DeepCell uses .tif extension, append extra f to account for .tiff standard + mask_path += 'f' + + # read the file from the .zip file and save as segmentation mask + byte_repr = zipObj.read(name) + ranked_segmentation_mask = (_convert_deepcell_seg_masks(byte_repr)).squeeze() + image_utils.save_image(mask_path, ranked_segmentation_mask) + + # verify that all the files were extracted + for fov in fov_group: + if fov + '_feature_0.tif' not in zipObj.namelist(): + warnings.warn(f'Deep Cell whole cell output file was not found for {fov}.') + if fov + '_feature_1.tif' not in zipObj.namelist(): + warnings.warn(f'Deep Cell nuclear output file was not found for {fov}.') def create_deepcell_output(deepcell_input_dir, deepcell_output_dir, fovs=None, wc_suffix='_whole_cell', nuc_suffix='_nuclear', host='https://deepcell.org', job_type='mesmer', - scale=1.0, timeout=3600, zip_size=5, parallel=False): + scale=1.0, timeout=300, zip_size=5): """Handles all of the necessary data manipulation for running deepcell tasks. Creates .zip files (to be used as input for DeepCell), calls run_deepcell_task method, @@ -57,13 +131,10 @@ def create_deepcell_output(deepcell_input_dir, deepcell_output_dir, fovs=None, Default: 1.0 timeout (int): Approximate seconds until timeout. - Default: 1 hour (3600) + Default: 5 minutes (300) zip_size (int): Maximum number of files to include in zip. - Default: 100 - parallel (bool): - Tries to zip, upload, and extract zip files in parallel - Default: False + Default: 5 Raises: ValueError: Raised if there is some fov X (from fovs list) s.t. @@ -99,85 +170,49 @@ def create_deepcell_output(deepcell_input_dir, deepcell_output_dir, fovs=None, print(f'Processing tiffs in {len(fov_groups)} batches...') - # yes this is function, don't worry about it - # long story short, too many args to pass if function not in local scope - # i.e easier to map fov_groups - def _zip_run_extract(fov_group, group_index): - # define the location of the zip file for our fovs - zip_path = os.path.join(deepcell_input_dir, f'fovs_batch_{group_index + 1}.zip') - - if os.path.isfile(zip_path): - warnings.warn(f'{zip_path} will be overwritten') - - # write all files to the zip file - print('Zipping preprocessed tiff files.') - - def zip_write(zip_path): - with ZipFile(zip_path, 'w', compression=ZIP_DEFLATED) as zipObj: - for fov in fov_group: - # file has .tiff extension - basename = fov + '.tiff' - filename = os.path.join(deepcell_input_dir, basename) - zipObj.write(filename, basename) - - zip_write(zip_path) - - # pass the zip file to deepcell.org - print('Uploading files to DeepCell server.') - status = run_deepcell_direct( - zip_path, deepcell_output_dir, host, job_type, scale, timeout - ) - - # ensure execution is halted if run_deepcell_direct returned non-zero exit code - if status != 0: - print("The following FOVs could not be processed: %s" % ','.join(fov_group)) - return - - # extract the .tif output - print("Extracting tif files from DeepCell response.") - zip_names = io_utils.list_files(deepcell_output_dir, substrs=[".zip"]) - - zip_files = [os.path.join(deepcell_output_dir, name) for name in zip_names] - - # sort by newest added - zip_files.sort(key=os.path.getmtime) - - with ZipFile(zip_files[-1], "r") as zipObj: - for name in zipObj.namelist(): - # this files will only ever be suffixed with feature_0.tiff or feature_1.tiff - if '_feature_0.tif' in name: - resuffixed_name = name.replace('_feature_0', wc_suffix) - else: - resuffixed_name = name.replace('_feature_1', nuc_suffix) - - mask_path = os.path.join(deepcell_output_dir, resuffixed_name) + unprocessed_fovs = {} + for batch_num, fov_group in enumerate(fov_groups, start=1): + # create zipped input files + input_zip_path = zip_input_files(deepcell_input_dir, fov_group, batch_num) + + # add timeout limit + batch_filename = Path(input_zip_path).name + output_zip_path = os.path.join(deepcell_output_dir, f"deepcell_response_" + batch_filename) + if os.path.exists(output_zip_path): + print(f"Skipping previously processed batch_{batch_num}.") + + # upload to deepcell + total_time, status = 0, 0 + start = time.time() + while not os.path.exists(output_zip_path) and total_time < timeout: + # pass the zip file to deepcell.org + status = run_deepcell_direct( + input_zip_path, deepcell_output_dir, host, job_type, scale, timeout + ) - # DeepCell uses .tif extension, append extra f to account for .tiff standard - mask_path += 'f' + # successful deepcell response + if status == 0: + # extract segmentation masks from deepcell output + extract_deepcell_response(deepcell_output_dir, fov_group, batch_num, wc_suffix, + nuc_suffix) + break - # read the file from the .zip file and save as segmentation mask - byte_repr = zipObj.read(name) - ranked_segmentation_mask = (_convert_deepcell_seg_masks(byte_repr)).squeeze() - image_utils.save_image(mask_path, ranked_segmentation_mask) + total_time = time.time() - start - # verify that all the files were extracted - for fov in fov_group: - if fov + '_feature_0.tif' not in zipObj.namelist(): - warnings.warn(f'Deep Cell whole cell output file was not found for {fov}.') - if fov + '_feature_1.tif' not in zipObj.namelist(): - warnings.warn(f'Deep Cell nuclear output file was not found for {fov}.') + if status != 0: + unprocessed_fovs[batch_num] = fov_group + if total_time >= timeout: + print(f"This batch exceeded the allotted processing time of {timeout / 60} minutes " + f"and will be skipped.") - # make calls in parallel - if parallel: - with ThreadPoolExecutor() as executor: - executor.map(_zip_run_extract, fov_groups, range(len(fov_groups))) - executor.shutdown(wait=True) - else: - list(map(_zip_run_extract, fov_groups, range(len(fov_groups)))) + if unprocessed_fovs: + print("\nThe following batches were not processed:") + for batch in unprocessed_fovs.keys(): + print(f"fovs_batch_{batch} {unprocessed_fovs[batch]}") def run_deepcell_direct(input_dir, output_dir, host='https://deepcell.org', - job_type='mesmer', scale=1.0, timeout=3600, num_retries=5): + job_type='mesmer', scale=1.0, timeout=300): """Uses direct calls to DeepCell API and saves output to output_dir. Args: @@ -195,9 +230,7 @@ def run_deepcell_direct(input_dir, output_dir, host='https://deepcell.org', Default: 1.0 timeout (int): Approximate seconds until timeout. - Default: 1 hour (3600) - num_retries (int): - The maximum number of times to call the Deepcell API in case of failure + Default: 5 minutes (300) """ # upload zip file @@ -210,46 +243,20 @@ def run_deepcell_direct(input_dir, output_dir, host='https://deepcell.org', } f.seek(0) - # define and mount a retry instance to call the Deepcell API again if needed - retry_strategy = Retry( - total=num_retries, - status_forcelist=[404, 500, 502, 503, 504], - allowed_methods=['HEAD', 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'] - ) - adapter = HTTPAdapter(max_retries=retry_strategy) - - http = requests.Session() - http.mount('https://', adapter) - http.mount('http://', adapter) - - total_retries = 0 - while total_retries < num_retries: - # handles the case if the main endpoint can't be reached - try: - upload_response = http.post( - upload_url, - timeout=timeout, - files=upload_fields - ) - except RetryError as re: - print(re) - return 1 - - # handles the case if the endpoint returns an invalid JSON - # indicating an internal API error - try: - upload_response = upload_response.json() - except JSONDecodeError as jde: - total_retries += 1 - continue - - # if we reach the end no errors were encountered on this attempt - break - - # if the JSON could not be decoded num_retries number of times - if total_retries == num_retries: - print("The JSON response from DeepCell could not be decoded after %d attempts" % - num_retries) + try: + upload_response = requests.post( + upload_url, + timeout=timeout, + files=upload_fields + ) + except (requests.ConnectionError, requests.ReadTimeout) as e: + return 1 + + # handles the case if the endpoint returns an invalid JSON + # indicating an internal API error + try: + upload_response = upload_response.json() + except JSONDecodeError as jde: return 1 # call prediction @@ -271,7 +278,8 @@ def run_deepcell_direct(input_dir, output_dir, host='https://deepcell.org', # check redis every 3 seconds redis_url = host + '/api/redis' - print('Segmentation progress:') + batch_num = (io_utils.remove_file_extensions([filename])[0]).split("_")[-1] + print(f'Segmentation progress for batch_{batch_num}:') progress_bar = tqdm(total=100, bar_format='{l_bar}{bar}| {n_fmt}/{total_fmt} [{elapsed}<{remaining}]') @@ -303,14 +311,19 @@ def run_deepcell_direct(input_dir, output_dir, host='https://deepcell.org', total_time += 3 progress_bar.close() + # print timeout message + if total_time >= timeout: + return 1 + # when done, download result or examine errors if len(redis_response['value'][4]) > 0: # error happened print(f"Encountered Failure(s): {unquote_plus(redis_response['value'][4])}") + return 1 deepcell_output = requests.get(redis_response['value'][2], allow_redirects=True) - with open(os.path.join(output_dir, "deepcell_response.zip"), mode="wb") as f: + with open(os.path.join(output_dir, "deepcell_response_" + filename), mode="wb") as f: f.write(deepcell_output.content) # being kind and sending an expire signal to deepcell @@ -319,7 +332,7 @@ def run_deepcell_direct(input_dir, output_dir, host='https://deepcell.org', expire_url, json={ 'hash': predict_hash, - 'expireIn': 3600, + 'expireIn': 90, } ) diff --git a/tests/utils/deepcell_service_utils_test.py b/tests/utils/deepcell_service_utils_test.py index 612b2e154..e24822494 100644 --- a/tests/utils/deepcell_service_utils_test.py +++ b/tests/utils/deepcell_service_utils_test.py @@ -2,43 +2,115 @@ import os import pathlib import tempfile +import time from zipfile import ZipFile +from pathlib import Path import numpy as np import pytest import tifffile -from alpineer import image_utils, test_utils +from alpineer import image_utils, test_utils, io_utils from pytest_mock import MockerFixture from skimage import io +from unittest.mock import call, patch from ark.utils import deepcell_service_utils -from ark.utils.deepcell_service_utils import (_convert_deepcell_seg_masks, - create_deepcell_output) +from ark.utils.deepcell_service_utils import (_convert_deepcell_seg_masks, create_deepcell_output, + zip_input_files, extract_deepcell_response) -def mocked_run_deepcell(in_zip_path, output_dir, host, job_type, scale, timeout): +def mocked_zip_input(input_dir, fovs, batch_num): fov_data = np.ones(shape=(10, 10), dtype="float32") - fov_seg_pairs = list(itertools.product(range(1, 4), ['feature_0', 'feature_1'])) + for fov in fovs: + image_utils.save_image(os.path.join(input_dir, f'{fov}.tiff'), fov_data) - for i, seg_type in fov_seg_pairs: - image_utils.save_image(os.path.join(output_dir, f'fov{i}_{seg_type}.tif'), fov_data) + zip_input_files(input_dir, fovs, batch_num) - batch_num = int(in_zip_path.split('.')[0].split('_')[-1]) - if batch_num < 2: - zip_path = os.path.join(output_dir, 'example_output.zip') + +def mocked_bad_run_deepcell(in_zip_path, output_dir, host, job_type, scale, timeout): + return mocked_run_deepcell( + in_zip_path, output_dir, host, job_type, scale, timeout, missing=True) + + +def mocked_run_deepcell(in_zip_path, output_dir, host, job_type, scale, timeout, missing=False): + fov_data = np.ones(shape=(10, 10), dtype="float32") + with ZipFile(in_zip_path, 'r') as zipObj: + fovs = io_utils.remove_file_extensions(zipObj.namelist()) + + if missing: + fov_seg_pairs = list(itertools.product(fovs, ['feature_0'])) else: - zip_path = os.path.join(output_dir, f'example_output_{batch_num}.zip') + fov_seg_pairs = list(itertools.product(fovs, ['feature_0', 'feature_1'])) + + # temp write output files for zip + for fov, seg_type in fov_seg_pairs: + image_utils.save_image(os.path.join(output_dir, f'{fov}_{seg_type}.tif'), fov_data) + + batch_num = int(in_zip_path.split('.')[0].split('_')[-1]) + zip_path = os.path.join(output_dir, f'deepcell_response_fovs_batch_{batch_num}.zip') + # write deepcell output into zip file with ZipFile(zip_path, 'w') as zipObj: - if batch_num > 1: - return - for i, seg_type in fov_seg_pairs: - filename = os.path.join(output_dir, f'fov{i}_{seg_type}.tif') + for fov, seg_type in fov_seg_pairs: + filename = os.path.join(output_dir, f'{fov}_{seg_type}.tif') zipObj.write(filename, os.path.basename(filename)) os.remove(filename) return 0 +def test_zip_input_files(): + with tempfile.TemporaryDirectory() as temp_dir: + fov_data = np.ones(shape=(10, 10), dtype="float32") + image_utils.save_image(os.path.join(temp_dir, 'fov1.tiff'), fov_data) + image_utils.save_image(os.path.join(temp_dir, 'fov2.tiff'), fov_data) + + # test successful zipping + zip_path = zip_input_files(temp_dir, fov_group=["fov1", "fov2"], batch_num=1) + create_time = Path(zip_path).stat().st_ctime + + # check zip contents + with ZipFile(os.path.join(temp_dir, 'fovs_batch_1.zip'), 'r') as zip_batch1: + assert zip_batch1.namelist() == ['fov1.tiff', 'fov2.tiff'] + + # test previously zipped batches are not re-zipped + time.sleep(3) + zip_path = zip_input_files(temp_dir, fov_group=["fov1", "fov2"], batch_num=1) + modify_time = Path(zip_path).stat().st_mtime + + # check zip file was not overwritten + assert np.isclose(modify_time, create_time) + + +def test_extract_deepcell_response(): + with tempfile.TemporaryDirectory() as temp_dir: + mocked_zip_input(temp_dir, ["fov1", "fov2", "fov3"], 1) + mocked_run_deepcell(os.path.join(temp_dir, "fovs_batch_1.zip"), temp_dir, + host='https://deepcell.org', job_type='mesmer', scale=1.0, + timeout=3600) + + # test successful extraction + extract_deepcell_response(temp_dir, ["fov1", "fov2", "fov3"], 1, wc_suffix="_whole_cell", + nuc_suffix="_nuclear") + + assert os.path.exists(os.path.join(temp_dir, 'fov1_whole_cell.tiff')) + assert os.path.exists(os.path.join(temp_dir, 'fov1_nuclear.tiff')) + assert os.path.exists(os.path.join(temp_dir, 'fov2_whole_cell.tiff')) + assert os.path.exists(os.path.join(temp_dir, 'fov2_nuclear.tiff')) + assert os.path.exists(os.path.join(temp_dir, 'fov3_whole_cell.tiff')) + assert os.path.exists(os.path.join(temp_dir, 'fov3_nuclear.tiff')) + + with tempfile.TemporaryDirectory() as temp_dir: + mocked_zip_input(temp_dir, ["fov1", "fov2", "fov3"], 1) + mocked_bad_run_deepcell(os.path.join(temp_dir, "fovs_batch_1.zip"), temp_dir, + host='https://deepcell.org', job_type='mesmer', scale=1.0, + timeout=3600) + + # DeepCell nuclear output .tif file does not exist + with pytest.warns(UserWarning): + extract_deepcell_response(temp_dir, ["fov1", "fov2", "fov3"], 1, + wc_suffix="_whole_cell", nuc_suffix="_nuclear") + + def test_create_deepcell_output(mocker: MockerFixture): with tempfile.TemporaryDirectory() as temp_dir: mocker.patch('ark.utils.deepcell_service_utils.run_deepcell_direct', mocked_run_deepcell) @@ -69,28 +141,23 @@ def test_create_deepcell_output(mocker: MockerFixture): scale='test') # make sure DeepCell (.zip) output exists - assert os.path.exists(os.path.join(output_dir, 'example_output.zip')) + assert os.path.exists(os.path.join(output_dir, 'deepcell_response_fovs_batch_1.zip')) # DeepCell output .zip file should be extracted assert os.path.exists(os.path.join(output_dir, 'fov1_whole_cell.tiff')) assert os.path.exists(os.path.join(output_dir, 'fov1_nuclear.tiff')) assert os.path.exists(os.path.join(output_dir, 'fov2_whole_cell.tiff')) assert os.path.exists(os.path.join(output_dir, 'fov2_nuclear.tiff')) + # check fov3 not processed + assert not os.path.exists(os.path.join(output_dir, 'fov3_whole_cell.tiff')) # test for 2d shape whole_cell_arr = io.imread(os.path.join(output_dir, 'fov1_whole_cell.tiff')) nuclear_arr = io.imread(os.path.join(output_dir, 'fov1_nuclear.tiff')) assert len(whole_cell_arr.shape) == len(nuclear_arr.shape) == 2 - with tempfile.TemporaryDirectory() as output_dir: - - # test parallel - create_deepcell_output(deepcell_input_dir=input_dir, deepcell_output_dir=output_dir, - fovs=['fov1', 'fov2'], zip_size=1, parallel=True) - - # make sure DeepCell (.zip's) output exists - assert os.path.exists(os.path.join(output_dir, 'example_output.zip')) - assert os.path.exists(os.path.join(output_dir, 'example_output_2.zip')) + for batch_zip in io_utils.list_files(input_dir, substrs=".zip"): + os.remove(os.path.join(input_dir, batch_zip)) with tempfile.TemporaryDirectory() as output_dir: @@ -99,52 +166,30 @@ def test_create_deepcell_output(mocker: MockerFixture): fovs=['fov1', 'fov2.tiff', 'fov3.tiff']) # make sure DeepCell (.zip) output exists - assert os.path.exists(os.path.join(output_dir, 'example_output.zip')) - - # DeepCell output .zip file should be extracted + assert os.path.exists(os.path.join(output_dir, 'deepcell_response_fovs_batch_1.zip')) assert os.path.exists(os.path.join(output_dir, 'fov1_whole_cell.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov1_nuclear.tiff')) assert os.path.exists(os.path.join(output_dir, 'fov2_whole_cell.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov2_nuclear.tiff')) assert os.path.exists(os.path.join(output_dir, 'fov3_whole_cell.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov3_nuclear.tiff')) - with tempfile.TemporaryDirectory() as output_dir: + for batch_zip in io_utils.list_files(input_dir, substrs=".zip"): + os.remove(os.path.join(input_dir, batch_zip)) + with tempfile.TemporaryDirectory() as output_dir: # if fovs is None, all .tiff files in input dir should be taken create_deepcell_output(deepcell_input_dir=input_dir, deepcell_output_dir=output_dir) # make sure DeepCell (.zip) output exists - assert os.path.exists(os.path.join(output_dir, 'example_output.zip')) - - assert os.path.exists(os.path.join(output_dir, 'fov1_whole_cell.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov1_nuclear.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov2_whole_cell.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov2_nuclear.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov3_whole_cell.tiff')) - assert os.path.exists(os.path.join(output_dir, 'fov3_nuclear.tiff')) - - pathlib.Path(os.path.join(input_dir, 'fovs.zip')).touch() - - # Warning should be displayed if fovs.zip file exists (will be overwritten) - with pytest.warns(UserWarning): - create_deepcell_output(deepcell_input_dir=input_dir, - deepcell_output_dir=output_dir, fovs=['fov1']) + assert os.path.exists(os.path.join(output_dir, 'deepcell_response_fovs_batch_1.zip')) + with ZipFile(os.path.join(output_dir, 'deepcell_response_fovs_batch_1.zip'), 'r') \ + as zip_batch1: + assert sorted(zip_batch1.namelist()) == \ + ['fov1_feature_0.tif', 'fov1_feature_1.tif', 'fov2_feature_0.tif', + 'fov2_feature_1.tif', 'fov3_feature_0.tif', 'fov3_feature_1.tif'] - # DeepCell whole_cell output .tif file does not exist for some fov - with pytest.warns(UserWarning): - create_deepcell_output(deepcell_input_dir=input_dir, - deepcell_output_dir=output_dir, - wc_suffix='_other_suffix', - fovs=['fov1']) - - # DeepCell nuclear output .tif file does not exist for some fov - with pytest.warns(UserWarning): - create_deepcell_output(deepcell_input_dir=input_dir, - deepcell_output_dir=output_dir, - nuc_suffix='_other_suffix', - fovs=['fov1']) + for batch_zip in io_utils.list_files(input_dir, substrs=".zip"): + os.remove(os.path.join(input_dir, batch_zip)) + with tempfile.TemporaryDirectory() as output_dir: # add additional fov for auto-batch testing pathlib.Path(os.path.join(input_dir, 'fov4.tiff')).touch() @@ -160,8 +205,14 @@ def test_create_deepcell_output(mocker: MockerFixture): assert zip_batch1.namelist() == ['fov1.tiff', 'fov2.tiff', 'fov3.tiff'] with ZipFile(os.path.join(input_dir, 'fovs_batch_2.zip'), 'r') as zip_batch2: assert zip_batch2.namelist() == ['fov4.tiff'] + # check output for extra fov batch + assert os.path.exists(os.path.join(output_dir, "deepcell_response_fovs_batch_2.zip")) + + for batch_zip in io_utils.list_files(input_dir, substrs=".zip"): + os.remove(os.path.join(input_dir, batch_zip)) - # ValueError should be raised if .tiff file does not exists for some fov in fovs + with tempfile.TemporaryDirectory() as output_dir: + # ValueError should be raised if .tiff file does not exist for some fov in fovs with pytest.raises(ValueError): create_deepcell_output(deepcell_input_dir=input_dir, deepcell_output_dir=output_dir, fovs=['fov1', 'fov5'])