Skip to content

API Reference for process_helper

ProcessHelper

Source code in pbp/process_helper.py
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
class ProcessHelper:
    def __init__(
        self,
        log,  # : loguru.Logger,
        file_helper: FileHelper,
        output_dir: str,
        output_prefix: str,
        gen_netcdf: bool = True,
        compress_netcdf: bool = True,
        add_quality_flag: bool = False,
        global_attrs_uri: Optional[str] = None,
        set_global_attrs: Optional[list[list[str]]] = None,
        variable_attrs_uri: Optional[str] = None,
        voltage_multiplier: Optional[float] = None,
        sensitivity_uri: Optional[str] = None,
        sensitivity_flat_value: Optional[float] = None,
        max_segments: int = 0,
        subset_to: Optional[Tuple[int, int]] = None,
    ):
        """
        Initializes the processor.

        Args:
            file_helper: File loader.
            output_dir: Output directory.
            output_prefix: Output filename prefix.
            gen_netcdf (bool): Whether to generate the netCDF file.
            compress_netcdf (bool): Whether to compress the generated NetCDF file.
            add_quality_flag (bool): Whether to add a quality flag variable (with value 2 - "Not evaluated") to the NetCDF file.
            global_attrs_uri (str): URI of a JSON file with global attributes to be added to the NetCDF file.
            set_global_attrs (list[tuple[str, str]]): List of (key, value) pairs to be considered for the global attributes.
            variable_attrs_uri (str): URI of a JSON file with variable attributes to be added to the NetCDF file.
            voltage_multiplier (float): Factor applied to the loaded signal.
            sensitivity_uri (str, optional): URI of a sensitivity NetCDF file for calibration of results.
                Has precedence over `sensitivity_flat_value`.
            sensitivity_flat_value (float, optional): Flat sensitivity value used for calibration.
            max_segments (int, optional): Maximum number of segments to process for each day. Defaults to 0 (no limit).
            subset_to (tuple[float, float], optional): Frequency limits for the PSD (lower inclusive, upper exclusive).
        """
        self.log = log

        self.log.info(
            "Creating ProcessHelper:"
            + f"\n    output_dir:             {output_dir}"
            + f"\n    output_prefix:          {output_prefix}"
            + f"\n    gen_netcdf:             {gen_netcdf}"
            + f"\n    compress_netcdf:        {compress_netcdf}"
            + f"\n    add_quality_flag:       {add_quality_flag}"
            + f"\n    global_attrs_uri:       {global_attrs_uri}"
            + f"\n    set_global_attrs:       {set_global_attrs}"
            + f"\n    variable_attrs_uri:     {variable_attrs_uri}"
            + f"\n    voltage_multiplier:     {voltage_multiplier}"
            + f"\n    sensitivity_uri:        {sensitivity_uri}"
            + f"\n    sensitivity_flat_value: {sensitivity_flat_value}"
            + (
                f"\n    max_segments:           {max_segments}"
                if max_segments > 0
                else ""
            )
            + f"\n    subset_to:              {subset_to}"
            + "\n"
        )
        self.file_helper = file_helper
        self.output_dir = output_dir
        self.output_prefix = output_prefix
        self.gen_netcdf = gen_netcdf
        self.compress_netcdf = compress_netcdf
        self.add_quality_flag = add_quality_flag

        self.metadata_helper = MetadataHelper(
            self.log,
            self._load_attributes("global", global_attrs_uri, set_global_attrs),
            self._load_attributes("variable", variable_attrs_uri),
        )

        self.max_segments = max_segments
        self.subset_to = subset_to

        self.voltage_multiplier: Optional[float] = voltage_multiplier

        self.sensitivity_da: Optional[xr.DataArray] = None
        self.sensitivity_flat_value: Optional[float] = sensitivity_flat_value

        if sensitivity_uri is not None:
            s_local_filename = file_helper.get_local_filename(sensitivity_uri)
            if s_local_filename is not None:
                sensitivity_ds = xr.open_dataset(s_local_filename)
                self.log.info(f"Will use loaded sensitivity from {s_local_filename=}")
                self.sensitivity_da = sensitivity_ds.sensitivity
                self.log.debug(f"{self.sensitivity_da=}")
            else:
                self.log.error(
                    f"Unable to resolve sensitivity_uri: '{sensitivity_uri}'. Ignoring it."
                )

        if self.sensitivity_da is None and self.sensitivity_flat_value is not None:
            self.log.info(
                f"Will use given flat sensitivity value: {sensitivity_flat_value}"
            )

        self.pypam_support = PypamSupport(self.log)

        pathlib.Path(output_dir).mkdir(exist_ok=True)

    def _load_attributes(
        self,
        what: str,
        attrs_uri: Optional[str],
        set_attrs: Optional[list[list[str]]] = None,
    ) -> Optional[OrderedDict[str, Any]]:
        if attrs_uri:
            self.log.info(f"Loading {what} attributes from {attrs_uri=}")
            filename = self.file_helper.get_local_filename(attrs_uri)
            if os.name == "nt" and filename is not None:
                filename = filename[3:]
            if filename is not None:
                with open(filename, "r", encoding="UTF-8") as f:
                    res = parse_attributes(f.read(), pathlib.Path(filename).suffix)
                    for k, v in set_attrs or []:
                        res[k] = v
                    return res
            else:
                self.log.error(f"Unable to resolve '{attrs_uri=}'. Ignoring it.")
        else:
            self.log.info(f"No '{what}' attributes URI given.")
        return None

    def process_day(self, date: str) -> Optional[ProcessDayResult]:
        """
        Generates NetCDF file with the result of processing all segments of the given day.

        Args:
            date (str): Date to process in YYYYMMDD format.

        Returns:
            The result or None if no segments at all were processed for the day.
        """
        year, month, day = parse_date(date)
        if not self.file_helper.select_day(year, month, day):
            return None

        at_hour_and_minutes: List[Tuple[int, int]] = list(
            gen_hour_minute_times(self.file_helper.segment_size_in_mins)
        )

        if self.max_segments > 0:
            at_hour_and_minutes = at_hour_and_minutes[: self.max_segments]
            self.log.info(f"NOTE: Limiting to {len(at_hour_and_minutes)} segments ...")

        self.process_hours_minutes(at_hour_and_minutes)

        result: Optional[ProcessResult] = self.pypam_support.process_captured_segments(
            sensitivity_da=self.sensitivity_da,
        )

        if result is None:
            self.log.warning(
                f"No segments processed, nothing to aggregate for day {date}."
            )
            return None

        psd_da = result.psd_da

        # rename 'frequency_bins' dimension to 'frequency':
        psd_da = psd_da.swap_dims(frequency_bins="frequency")

        data_vars = {
            "psd": psd_da,
            "effort": result.effort_da,
        }

        if self.sensitivity_da is not None:
            freq_subset = self.sensitivity_da.interp(frequency=psd_da.frequency)
            data_vars["sensitivity"] = freq_subset

        elif self.sensitivity_flat_value is not None:
            # better way to capture a scalar?
            data_vars["sensitivity"] = xr.DataArray(
                data=[self.sensitivity_flat_value],
                dims=["1"],
            ).astype(np.float32)

        if self.add_quality_flag:
            data_vars["quality_flag"] = xr.DataArray(
                data=np.full(psd_da.shape, DEFAULT_QUALITY_FLAG_VALUE, dtype=np.int8),
                dims=psd_da.dims,
                coords=psd_da.coords,
                # attrs are assigned below.
            )

        md_helper = self.metadata_helper

        md_helper.add_variable_attributes(psd_da["time"], "time")
        md_helper.add_variable_attributes(data_vars["effort"], "effort")
        md_helper.add_variable_attributes(psd_da["frequency"], "frequency")
        if "sensitivity" in data_vars:
            md_helper.add_variable_attributes(data_vars["sensitivity"], "sensitivity")
        if "quality_flag" in data_vars:
            md_helper.add_variable_attributes(data_vars["quality_flag"], "quality_flag")
        md_helper.add_variable_attributes(data_vars["psd"], "psd")

        ds_result = xr.Dataset(
            data_vars=data_vars,
            attrs=self._get_global_attributes(year, month, day),
        )

        generated_filenames = []
        basename = f"{self.output_dir}/{self.output_prefix}{year:04}{month:02}{day:02}"
        if os.name == "nt":
            basename = (
                f"{self.output_dir}\\{self.output_prefix}{year:04}{month:02}{day:02}"
            )

        if self.gen_netcdf:
            nc_filename = f"{basename}.nc"
            save_dataset_to_netcdf(self.log, ds_result, nc_filename)
            generated_filenames.append(nc_filename)

        self.file_helper.day_completed()

        return ProcessDayResult(generated_filenames, ds_result)

    def process_hours_minutes(self, hour_and_minutes: List[Tuple[int, int]]):
        self.log.info(f"Processing {len(hour_and_minutes)} segments ...")
        for at_hour, at_minute in hour_and_minutes:
            self.process_segment_at_hour_minute(at_hour, at_minute)

    def process_segment_at_hour_minute(self, at_hour: int, at_minute: int):
        file_helper = self.file_helper
        year, month, day = file_helper.year, file_helper.month, file_helper.day
        assert year is not None and month is not None and day is not None

        dt = datetime(year, month, day, at_hour, at_minute, tzinfo=timezone.utc)

        self.log.debug(
            f"Segment at {at_hour:02}h:{at_minute:02}m ...\n"
            + f"  - extracting {file_helper.segment_size_in_mins * 60}-sec segment:"
        )
        extraction = file_helper.extract_audio_segment(at_hour, at_minute)
        if extraction is None:
            self.log.warning(f"cannot get audio segment at {at_hour:02}:{at_minute:02}")
            self.pypam_support.add_missing_segment(dt)
            return

        audio_info, audio_segment = extraction

        if self.pypam_support.parameters_set:
            if self.pypam_support.fs != audio_info.samplerate:
                self.log.info(
                    f"ERROR: samplerate changed from {self.pypam_support.fs} to {audio_info.samplerate}"
                )
                return
        else:
            self.log.info("Got audio parameters")
            self.pypam_support.set_parameters(
                audio_info.samplerate,
                subset_to=self.subset_to,
            )

        if self.voltage_multiplier is not None:
            audio_segment *= self.voltage_multiplier

        if self.sensitivity_flat_value is not None:
            # convert signal to uPa
            audio_segment = audio_segment * 10 ** (self.sensitivity_flat_value / 20)

        self.pypam_support.add_segment(dt, audio_segment)

    def _get_global_attributes(self, year: int, month: int, day: int):
        coverage_date = f"{year:04}-{month:02}-{day:02}"
        global_attrs = {
            "time_coverage_start": f"{coverage_date} 00:00:00Z",
            "time_coverage_end": f"{coverage_date} 23:59:00Z",
            "date_created": datetime.utcnow().strftime("%Y-%m-%d"),
        }
        md_helper = self.metadata_helper
        md_helper.set_some_global_attributes(global_attrs)
        snippets = {
            "{{PBP_version}}": get_pbp_version(),
            "{{PyPAM_version}}": get_pypam_version(),
        }
        global_attrs = md_helper.get_global_attributes()
        # for each, key, have the {{key}} snippet for replacement
        # in case it is used in any values:
        for k, v in global_attrs.items():
            snippets["{{" + k + "}}"] = v
        return replace_snippets(global_attrs, snippets)

__init__(log, file_helper, output_dir, output_prefix, gen_netcdf=True, compress_netcdf=True, add_quality_flag=False, global_attrs_uri=None, set_global_attrs=None, variable_attrs_uri=None, voltage_multiplier=None, sensitivity_uri=None, sensitivity_flat_value=None, max_segments=0, subset_to=None)

Initializes the processor.

Parameters:

Name Type Description Default
file_helper FileHelper

File loader.

required
output_dir str

Output directory.

required
output_prefix str

Output filename prefix.

required
gen_netcdf bool

Whether to generate the netCDF file.

True
compress_netcdf bool

Whether to compress the generated NetCDF file.

True
add_quality_flag bool

Whether to add a quality flag variable (with value 2 - "Not evaluated") to the NetCDF file.

False
global_attrs_uri str

URI of a JSON file with global attributes to be added to the NetCDF file.

None
set_global_attrs list[tuple[str, str]]

List of (key, value) pairs to be considered for the global attributes.

None
variable_attrs_uri str

URI of a JSON file with variable attributes to be added to the NetCDF file.

None
voltage_multiplier float

Factor applied to the loaded signal.

None
sensitivity_uri str

URI of a sensitivity NetCDF file for calibration of results. Has precedence over sensitivity_flat_value.

None
sensitivity_flat_value float

Flat sensitivity value used for calibration.

None
max_segments int

Maximum number of segments to process for each day. Defaults to 0 (no limit).

0
subset_to tuple[float, float]

Frequency limits for the PSD (lower inclusive, upper exclusive).

None
Source code in pbp/process_helper.py
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
def __init__(
    self,
    log,  # : loguru.Logger,
    file_helper: FileHelper,
    output_dir: str,
    output_prefix: str,
    gen_netcdf: bool = True,
    compress_netcdf: bool = True,
    add_quality_flag: bool = False,
    global_attrs_uri: Optional[str] = None,
    set_global_attrs: Optional[list[list[str]]] = None,
    variable_attrs_uri: Optional[str] = None,
    voltage_multiplier: Optional[float] = None,
    sensitivity_uri: Optional[str] = None,
    sensitivity_flat_value: Optional[float] = None,
    max_segments: int = 0,
    subset_to: Optional[Tuple[int, int]] = None,
):
    """
    Initializes the processor.

    Args:
        file_helper: File loader.
        output_dir: Output directory.
        output_prefix: Output filename prefix.
        gen_netcdf (bool): Whether to generate the netCDF file.
        compress_netcdf (bool): Whether to compress the generated NetCDF file.
        add_quality_flag (bool): Whether to add a quality flag variable (with value 2 - "Not evaluated") to the NetCDF file.
        global_attrs_uri (str): URI of a JSON file with global attributes to be added to the NetCDF file.
        set_global_attrs (list[tuple[str, str]]): List of (key, value) pairs to be considered for the global attributes.
        variable_attrs_uri (str): URI of a JSON file with variable attributes to be added to the NetCDF file.
        voltage_multiplier (float): Factor applied to the loaded signal.
        sensitivity_uri (str, optional): URI of a sensitivity NetCDF file for calibration of results.
            Has precedence over `sensitivity_flat_value`.
        sensitivity_flat_value (float, optional): Flat sensitivity value used for calibration.
        max_segments (int, optional): Maximum number of segments to process for each day. Defaults to 0 (no limit).
        subset_to (tuple[float, float], optional): Frequency limits for the PSD (lower inclusive, upper exclusive).
    """
    self.log = log

    self.log.info(
        "Creating ProcessHelper:"
        + f"\n    output_dir:             {output_dir}"
        + f"\n    output_prefix:          {output_prefix}"
        + f"\n    gen_netcdf:             {gen_netcdf}"
        + f"\n    compress_netcdf:        {compress_netcdf}"
        + f"\n    add_quality_flag:       {add_quality_flag}"
        + f"\n    global_attrs_uri:       {global_attrs_uri}"
        + f"\n    set_global_attrs:       {set_global_attrs}"
        + f"\n    variable_attrs_uri:     {variable_attrs_uri}"
        + f"\n    voltage_multiplier:     {voltage_multiplier}"
        + f"\n    sensitivity_uri:        {sensitivity_uri}"
        + f"\n    sensitivity_flat_value: {sensitivity_flat_value}"
        + (
            f"\n    max_segments:           {max_segments}"
            if max_segments > 0
            else ""
        )
        + f"\n    subset_to:              {subset_to}"
        + "\n"
    )
    self.file_helper = file_helper
    self.output_dir = output_dir
    self.output_prefix = output_prefix
    self.gen_netcdf = gen_netcdf
    self.compress_netcdf = compress_netcdf
    self.add_quality_flag = add_quality_flag

    self.metadata_helper = MetadataHelper(
        self.log,
        self._load_attributes("global", global_attrs_uri, set_global_attrs),
        self._load_attributes("variable", variable_attrs_uri),
    )

    self.max_segments = max_segments
    self.subset_to = subset_to

    self.voltage_multiplier: Optional[float] = voltage_multiplier

    self.sensitivity_da: Optional[xr.DataArray] = None
    self.sensitivity_flat_value: Optional[float] = sensitivity_flat_value

    if sensitivity_uri is not None:
        s_local_filename = file_helper.get_local_filename(sensitivity_uri)
        if s_local_filename is not None:
            sensitivity_ds = xr.open_dataset(s_local_filename)
            self.log.info(f"Will use loaded sensitivity from {s_local_filename=}")
            self.sensitivity_da = sensitivity_ds.sensitivity
            self.log.debug(f"{self.sensitivity_da=}")
        else:
            self.log.error(
                f"Unable to resolve sensitivity_uri: '{sensitivity_uri}'. Ignoring it."
            )

    if self.sensitivity_da is None and self.sensitivity_flat_value is not None:
        self.log.info(
            f"Will use given flat sensitivity value: {sensitivity_flat_value}"
        )

    self.pypam_support = PypamSupport(self.log)

    pathlib.Path(output_dir).mkdir(exist_ok=True)

process_day(date)

Generates NetCDF file with the result of processing all segments of the given day.

Parameters:

Name Type Description Default
date str

Date to process in YYYYMMDD format.

required

Returns:

Type Description
Optional[ProcessDayResult]

The result or None if no segments at all were processed for the day.

Source code in pbp/process_helper.py
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
def process_day(self, date: str) -> Optional[ProcessDayResult]:
    """
    Generates NetCDF file with the result of processing all segments of the given day.

    Args:
        date (str): Date to process in YYYYMMDD format.

    Returns:
        The result or None if no segments at all were processed for the day.
    """
    year, month, day = parse_date(date)
    if not self.file_helper.select_day(year, month, day):
        return None

    at_hour_and_minutes: List[Tuple[int, int]] = list(
        gen_hour_minute_times(self.file_helper.segment_size_in_mins)
    )

    if self.max_segments > 0:
        at_hour_and_minutes = at_hour_and_minutes[: self.max_segments]
        self.log.info(f"NOTE: Limiting to {len(at_hour_and_minutes)} segments ...")

    self.process_hours_minutes(at_hour_and_minutes)

    result: Optional[ProcessResult] = self.pypam_support.process_captured_segments(
        sensitivity_da=self.sensitivity_da,
    )

    if result is None:
        self.log.warning(
            f"No segments processed, nothing to aggregate for day {date}."
        )
        return None

    psd_da = result.psd_da

    # rename 'frequency_bins' dimension to 'frequency':
    psd_da = psd_da.swap_dims(frequency_bins="frequency")

    data_vars = {
        "psd": psd_da,
        "effort": result.effort_da,
    }

    if self.sensitivity_da is not None:
        freq_subset = self.sensitivity_da.interp(frequency=psd_da.frequency)
        data_vars["sensitivity"] = freq_subset

    elif self.sensitivity_flat_value is not None:
        # better way to capture a scalar?
        data_vars["sensitivity"] = xr.DataArray(
            data=[self.sensitivity_flat_value],
            dims=["1"],
        ).astype(np.float32)

    if self.add_quality_flag:
        data_vars["quality_flag"] = xr.DataArray(
            data=np.full(psd_da.shape, DEFAULT_QUALITY_FLAG_VALUE, dtype=np.int8),
            dims=psd_da.dims,
            coords=psd_da.coords,
            # attrs are assigned below.
        )

    md_helper = self.metadata_helper

    md_helper.add_variable_attributes(psd_da["time"], "time")
    md_helper.add_variable_attributes(data_vars["effort"], "effort")
    md_helper.add_variable_attributes(psd_da["frequency"], "frequency")
    if "sensitivity" in data_vars:
        md_helper.add_variable_attributes(data_vars["sensitivity"], "sensitivity")
    if "quality_flag" in data_vars:
        md_helper.add_variable_attributes(data_vars["quality_flag"], "quality_flag")
    md_helper.add_variable_attributes(data_vars["psd"], "psd")

    ds_result = xr.Dataset(
        data_vars=data_vars,
        attrs=self._get_global_attributes(year, month, day),
    )

    generated_filenames = []
    basename = f"{self.output_dir}/{self.output_prefix}{year:04}{month:02}{day:02}"
    if os.name == "nt":
        basename = (
            f"{self.output_dir}\\{self.output_prefix}{year:04}{month:02}{day:02}"
        )

    if self.gen_netcdf:
        nc_filename = f"{basename}.nc"
        save_dataset_to_netcdf(self.log, ds_result, nc_filename)
        generated_filenames.append(nc_filename)

    self.file_helper.day_completed()

    return ProcessDayResult(generated_filenames, ds_result)

ProcessDayResult

The result returned from process_day. Contains the list of paths to generated files (NetCDF and others depending on given parameters) as well as the generated dataset.

Attributes:

Name Type Description
generated_filenames list[str]

List of paths to generated files

dataset Dataset

The generated dataset

Source code in pbp/process_helper.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
@dataclass
class ProcessDayResult:
    """
    The result returned from `process_day`.
    Contains the list of paths to generated files
    (NetCDF and others depending on given parameters)
    as well as the generated dataset.

    Attributes:
        generated_filenames: List of paths to generated files
        dataset: The generated dataset
    """

    generated_filenames: list[str]
    dataset: xr.Dataset

save_dataset_to_netcdf

Saves the given dataset to a NetCDF file.

Parameters:

Name Type Description Default
log Logger

Logger.

required
ds Dataset

Dataset to save.

required
filename str

Output filename.

required
compress_netcdf bool

Whether to compress the NetCDF file.

True

Returns:

Type Description
bool

True if the dataset was saved successfully, False otherwise.

Source code in pbp/process_helper.py
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
def save_dataset_to_netcdf(
    log,  #: loguru.Logger,
    ds: xr.Dataset,
    filename: str,
    compress_netcdf: bool = True,
) -> bool:
    """
    Saves the given dataset to a NetCDF file.

    Args:
        log (loguru.Logger): Logger.
        ds (xr.Dataset): Dataset to save.
        filename (str): Output filename.
        compress_netcdf (bool): Whether to compress the NetCDF file.

    Returns:
        True if the dataset was saved successfully, False otherwise.
    """
    log.info(f"  - saving dataset to: {filename}  (compressed: {compress_netcdf})")
    encoding: dict[Any, dict[str, Any]] = {
        "effort": {"_FillValue": None},
        "frequency": {"_FillValue": None},
        "sensitivity": {"_FillValue": None},
    }
    if compress_netcdf:
        for k in ds.data_vars:
            if ds[k].ndim < 2:
                continue
            encoding[k] = {
                "zlib": True,
                "complevel": 3,
                "fletcher32": True,
                "chunksizes": tuple(map(lambda x: x // 2, ds[k].shape)),
            }
    try:
        ds.to_netcdf(filename, format="NETCDF4", engine="h5netcdf", encoding=encoding)
        return True
    except Exception as e:  # pylint: disable=broad-exception-caught
        error = f"Unable to save {filename}: {e}"
        log.error(error)
        print(error)
        return False