Skip to content

projection

Task for 3D->2D maximum-intensity projection.

projection(*, zarr_url, init_args)

Perform intensity projection along Z axis with a chosen method.

Note: this task stores the output in a new zarr file.

PARAMETER DESCRIPTION
zarr_url

Path or url to the individual OME-Zarr image to be processed. (standard argument for Fractal tasks, managed by Fractal server).

TYPE: str

init_args

Intialization arguments provided by create_cellvoyager_ome_zarr_init.

TYPE: InitArgsMIP

Source code in fractal_tasks_core/tasks/projection.py
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
@validate_call
def projection(
    *,
    # Fractal parameters
    zarr_url: str,
    init_args: InitArgsMIP,
) -> dict[str, Any]:
    """
    Perform intensity projection along Z axis with a chosen method.

    Note: this task stores the output in a new zarr file.

    Args:
        zarr_url: Path or url to the individual OME-Zarr image to be processed.
            (standard argument for Fractal tasks, managed by Fractal server).
        init_args: Intialization arguments provided by
            `create_cellvoyager_ome_zarr_init`.
    """
    method = DaskProjectionMethod(init_args.method)
    logger.info(f"{init_args.origin_url=}")
    logger.info(f"{zarr_url=}")
    logger.info(f"{method=}")

    # Read image metadata
    ngff_image = load_NgffImageMeta(init_args.origin_url)
    # Currently not using the validation models due to wavelength_id issue
    # See #681 for discussion
    # new_attrs = ngff_image.model_dump(exclude_none=True)
    # Current way to get the necessary metadata for MIP
    group = zarr.open_group(init_args.origin_url, mode="r")
    new_attrs = group.attrs.asdict()

    # Create the zarr image with correct
    new_image_group = zarr.group(zarr_url)
    new_image_group.attrs.put(new_attrs)

    # Load 0-th level
    data_czyx = da.from_zarr(init_args.origin_url + "/0")
    num_channels = data_czyx.shape[0]
    chunksize_y = data_czyx.chunksize[-2]
    chunksize_x = data_czyx.chunksize[-1]
    logger.info(f"{num_channels=}")
    logger.info(f"{chunksize_y=}")
    logger.info(f"{chunksize_x=}")

    # Loop over channels
    accumulate_chl = []
    for ind_ch in range(num_channels):
        # Perform MIP for each channel of level 0
        project_yx = da.stack(
            [method.apply(data_czyx[ind_ch], axis=0)], axis=0
        )
        accumulate_chl.append(project_yx)
    accumulated_array = da.stack(accumulate_chl, axis=0)

    # Write to disk (triggering execution)
    try:
        accumulated_array.to_zarr(
            f"{zarr_url}/0",
            overwrite=init_args.overwrite,
            dimension_separator="/",
            write_empty_chunks=False,
        )
    except ContainsArrayError as e:
        error_msg = (
            f"Cannot write array to zarr group at '{zarr_url}/0', "
            f"with {init_args.overwrite=} (original error: {str(e)}).\n"
            "Hint: try setting overwrite=True."
        )
        logger.error(error_msg)
        raise OverwriteNotAllowedError(error_msg)

    # Starting from on-disk highest-resolution data, build and write to disk a
    # pyramid of coarser levels
    build_pyramid(
        zarrurl=zarr_url,
        overwrite=init_args.overwrite,
        num_levels=ngff_image.num_levels,
        coarsening_xy=ngff_image.coarsening_xy,
        chunksize=(1, 1, chunksize_y, chunksize_x),
    )

    # Copy over any tables from the original zarr
    # Generate the list of tables:
    tables = get_tables_list_v1(init_args.origin_url)
    roi_tables = get_tables_list_v1(init_args.origin_url, table_type="ROIs")
    non_roi_tables = [table for table in tables if table not in roi_tables]

    for table in roi_tables:
        logger.info(
            f"Reading {table} from "
            f"{init_args.origin_url=}, convert it to 2D, and "
            "write it back to the new zarr file."
        )
        new_ROI_table = ad.read_zarr(f"{init_args.origin_url}/tables/{table}")
        old_ROI_table_attrs = zarr.open_group(
            f"{init_args.origin_url}/tables/{table}"
        ).attrs.asdict()

        # Convert 3D ROIs to 2D
        pxl_sizes_zyx = ngff_image.get_pixel_sizes_zyx(level=0)
        new_ROI_table = convert_ROIs_from_3D_to_2D(
            new_ROI_table, pixel_size_z=pxl_sizes_zyx[0]
        )
        # Write new table
        write_table(
            new_image_group,
            table,
            new_ROI_table,
            table_attrs=old_ROI_table_attrs,
            overwrite=init_args.overwrite,
        )

    for table in non_roi_tables:
        logger.info(
            f"Reading {table} from "
            f"{init_args.origin_url=}, and "
            "write it back to the new zarr file."
        )
        new_non_ROI_table = ad.read_zarr(
            f"{init_args.origin_url}/tables/{table}"
        )
        old_non_ROI_table_attrs = zarr.open_group(
            f"{init_args.origin_url}/tables/{table}"
        ).attrs.asdict()

        # Write new table
        write_table(
            new_image_group,
            table,
            new_non_ROI_table,
            table_attrs=old_non_ROI_table_attrs,
            overwrite=init_args.overwrite,
        )

    # Generate image_list_updates
    image_list_update_dict = dict(
        image_list_updates=[
            dict(
                zarr_url=zarr_url,
                origin=init_args.origin_url,
                types=dict(is_3D=False),
            )
        ]
    )
    return image_list_update_dict