Coverage for src / CSET / operators / read.py: 89%
344 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-16 13:48 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-16 13:48 +0000
1# © Crown copyright, Met Office (2022-2025) and CSET contributors.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15"""Operators for reading various types of files from disk."""
17import ast
18import datetime
19import functools
20import glob
21import itertools
22import logging
23from pathlib import Path
24from typing import Literal
26import iris
27import iris.coord_systems
28import iris.coords
29import iris.cube
30import iris.exceptions
31import iris.util
32import numpy as np
33from iris.analysis.cartography import rotate_pole, rotate_winds
35from CSET._common import iter_maybe
36from CSET.operators._stash_to_lfric import STASH_TO_LFRIC
37from CSET.operators._utils import (
38 get_cube_coordindex,
39 get_cube_yxcoordname,
40 is_spatialdim,
41)
44class NoDataError(FileNotFoundError):
45 """Error that no data has been loaded."""
48def read_cube(
49 file_paths: list[str] | str,
50 constraint: iris.Constraint = None,
51 model_names: list[str] | str | None = None,
52 subarea_type: str = None,
53 subarea_extent: list[float] = None,
54 **kwargs,
55) -> iris.cube.Cube:
56 """Read a single cube from files.
58 Read operator that takes a path string (can include shell-style glob
59 patterns), and loads the cube matching the constraint. If any paths point to
60 directory, all the files contained within are loaded.
62 Ensemble data can also be loaded. If it has a realization coordinate
63 already, it will be directly used. If not, it will have its member number
64 guessed from the filename, based on one of several common patterns. For
65 example the pattern *emXX*, where XX is the realization.
67 Deterministic data will be loaded with a realization of 0, allowing it to be
68 processed in the same way as ensemble data.
70 Arguments
71 ---------
72 file_paths: str | list[str]
73 Path or paths to where .pp/.nc files are located
74 constraint: iris.Constraint | iris.ConstraintCombination, optional
75 Constraints to filter data by. Defaults to unconstrained.
76 model_names: str | list[str], optional
77 Names of the models that correspond to respective paths in file_paths.
78 subarea_type: "gridcells" | "modelrelative" | "realworld", optional
79 Whether to constrain data by model relative coordinates or real world
80 coordinates.
81 subarea_extent: list, optional
82 List of coordinates to constraint data by, in order lower latitude,
83 upper latitude, lower longitude, upper longitude.
85 Returns
86 -------
87 cubes: iris.cube.Cube
88 Cube loaded
90 Raises
91 ------
92 FileNotFoundError
93 If the provided path does not exist
94 ValueError
95 If the constraint doesn't produce a single cube.
96 """
97 cubes = read_cubes(
98 file_paths=file_paths,
99 constraint=constraint,
100 model_names=model_names,
101 subarea_type=subarea_type,
102 subarea_extent=subarea_extent,
103 )
104 # Check filtered cubes is a CubeList containing one cube.
105 if len(cubes) == 1:
106 return cubes[0]
107 else:
108 raise ValueError(
109 f"Constraint doesn't produce single cube: {constraint}\n{cubes}"
110 )
113def read_cubes(
114 file_paths: list[str] | str,
115 constraint: iris.Constraint | None = None,
116 model_names: str | list[str] | None = None,
117 subarea_type: str = None,
118 subarea_extent: list = None,
119 **kwargs,
120) -> iris.cube.CubeList:
121 """Read cubes from files.
123 Read operator that takes a path string (can include shell-style glob
124 patterns), and loads the cubes matching the constraint. If any paths point
125 to directory, all the files contained within are loaded.
127 Ensemble data can also be loaded. If it has a realization coordinate
128 already, it will be directly used. If not, it will have its member number
129 guessed from the filename, based on one of several common patterns. For
130 example the pattern *emXX*, where XX is the realization.
132 Deterministic data will be loaded with a realization of 0, allowing it to be
133 processed in the same way as ensemble data.
135 Data output by XIOS (such as LFRic) has its per-file metadata removed so
136 that the cubes merge across files.
138 Arguments
139 ---------
140 file_paths: str | list[str]
141 Path or paths to where .pp/.nc files are located. Can include globs.
142 constraint: iris.Constraint | iris.ConstraintCombination, optional
143 Constraints to filter data by. Defaults to unconstrained.
144 model_names: str | list[str], optional
145 Names of the models that correspond to respective paths in file_paths.
146 subarea_type: str, optional
147 Whether to constrain data by model relative coordinates or real world
148 coordinates.
149 subarea_extent: list[float], optional
150 List of coordinates to constraint data by, in order lower latitude,
151 upper latitude, lower longitude, upper longitude.
153 Returns
154 -------
155 cubes: iris.cube.CubeList
156 Cubes loaded after being merged and concatenated.
158 Raises
159 ------
160 FileNotFoundError
161 If the provided path does not exist
162 """
163 # Get iterable of paths. Each path corresponds to 1 model.
164 paths = iter_maybe(file_paths)
165 model_names = iter_maybe(model_names)
167 # Check we have appropriate number of model names.
168 if model_names != (None,) and len(model_names) != len(paths):
169 raise ValueError(
170 f"The number of model names ({len(model_names)}) should equal "
171 f"the number of paths given ({len(paths)})."
172 )
174 # Load the data for each model into a CubeList per model.
175 model_cubes = (
176 _load_model(path, name, constraint)
177 for path, name in itertools.zip_longest(paths, model_names, fillvalue=None)
178 )
180 # Split out first model's cubes and mark it as the base for comparisons.
181 cubes = next(model_cubes)
182 for cube in cubes:
183 # Use 1 to indicate True, as booleans can't be saved in NetCDF attributes.
184 cube.attributes["cset_comparison_base"] = 1
186 # Load the rest of the models.
187 cubes.extend(itertools.chain.from_iterable(model_cubes))
189 # Unify time units so different case studies can merge.
190 iris.util.unify_time_units(cubes)
192 # Select sub region.
193 cubes = _cutout_cubes(cubes, subarea_type, subarea_extent)
195 # Merge and concatenate cubes now metadata has been fixed.
196 cubes = cubes.merge()
197 cubes = cubes.concatenate()
199 # Squeeze single valued coordinates into scalar coordinates.
200 cubes = iris.cube.CubeList(iris.util.squeeze(cube) for cube in cubes)
202 # Ensure dimension coordinates are bounded.
203 for cube in cubes:
204 for dim_coord in cube.coords(dim_coords=True):
205 # Iris can't guess the bounds of a scalar coordinate.
206 if not dim_coord.has_bounds() and dim_coord.shape[0] > 1:
207 dim_coord.guess_bounds()
209 logging.info("Loaded cubes: %s", cubes)
210 if len(cubes) == 0:
211 raise NoDataError("No cubes loaded, check your constraints!")
212 return cubes
215def _load_model(
216 paths: str | list[str],
217 model_name: str | None,
218 constraint: iris.Constraint | None,
219) -> iris.cube.CubeList:
220 """Load a single model's data into a CubeList."""
221 input_files = _check_input_files(paths)
222 # If unset, a constraint of None lets everything be loaded.
223 logging.debug("Constraint: %s", constraint)
224 cubes = iris.load(input_files, constraint, callback=_loading_callback)
225 # Make the UM's winds consistent with LFRic.
226 _fix_um_winds(cubes)
228 # Add model_name attribute to each cube to make it available at any further
229 # step without needing to pass it as function parameter.
230 if model_name is not None:
231 for cube in cubes:
232 cube.attributes["model_name"] = model_name
233 return cubes
236def _check_input_files(input_paths: str | list[str]) -> list[Path]:
237 """Get an iterable of files to load, and check that they all exist.
239 Arguments
240 ---------
241 input_paths: list[str]
242 List of paths to input files or directories. The path may itself contain
243 glob patterns, but unlike in shells it will match directly first.
245 Returns
246 -------
247 list[Path]
248 A list of files to load.
250 Raises
251 ------
252 FileNotFoundError:
253 If the provided arguments don't resolve to at least one existing file.
254 """
255 files = []
256 for raw_filename in iter_maybe(input_paths):
257 # Match glob-like files first, if they exist.
258 raw_path = Path(raw_filename)
259 if raw_path.is_file():
260 files.append(raw_path)
261 else:
262 for input_path in glob.glob(raw_filename):
263 # Convert string paths into Path objects.
264 input_path = Path(input_path)
265 # Get the list of files in the directory, or use it directly.
266 if input_path.is_dir():
267 logging.debug("Checking directory '%s' for files", input_path)
268 files.extend(p for p in input_path.iterdir() if p.is_file())
269 else:
270 files.append(input_path)
272 files.sort()
273 logging.info("Loading files:\n%s", "\n".join(str(path) for path in files))
274 if len(files) == 0:
275 raise FileNotFoundError(f"No files found for {input_paths}")
276 return files
279def _cutout_cubes(
280 cubes: iris.cube.CubeList,
281 subarea_type: Literal["gridcells", "realworld", "modelrelative"] | None,
282 subarea_extent: list[float, float, float, float],
283):
284 """Cut out a subarea from a CubeList."""
285 if subarea_type is None:
286 logging.debug("Subarea selection is disabled.")
287 return cubes
289 # If selected, cutout according to number of grid cells to trim from each edge.
290 cutout_cubes = iris.cube.CubeList()
291 # Find spatial coordinates
292 for cube in cubes:
293 # Find dimension coordinates.
294 lat_name, lon_name = get_cube_yxcoordname(cube)
296 # Compute cutout based on number of cells to trim from edges.
297 if subarea_type == "gridcells":
298 logging.debug(
299 "User requested LowerTrim: %s LeftTrim: %s UpperTrim: %s RightTrim: %s",
300 subarea_extent[0],
301 subarea_extent[1],
302 subarea_extent[2],
303 subarea_extent[3],
304 )
305 lat_points = np.sort(cube.coord(lat_name).points)
306 lon_points = np.sort(cube.coord(lon_name).points)
307 # Define cutout region using user provided cell points.
308 lats = [lat_points[subarea_extent[0]], lat_points[-subarea_extent[2] - 1]]
309 lons = [lon_points[subarea_extent[1]], lon_points[-subarea_extent[3] - 1]]
311 # Compute cutout based on specified coordinate values.
312 elif subarea_type == "realworld" or subarea_type == "modelrelative":
313 # If not gridcells, cutout by requested geographic area,
314 logging.debug(
315 "User requested LLat: %s ULat: %s LLon: %s ULon: %s",
316 subarea_extent[0],
317 subarea_extent[1],
318 subarea_extent[2],
319 subarea_extent[3],
320 )
321 # Define cutout region using user provided coordinates.
322 lats = np.array(subarea_extent[0:2])
323 lons = np.array(subarea_extent[2:4])
324 # Ensure cutout longitudes are within +/- 180.0 bounds.
325 while lons[0] < -180.0:
326 lons += 360.0
327 while lons[1] > 180.0:
328 lons -= 360.0
329 # If the coordinate system is rotated we convert coordinates into
330 # model-relative coordinates to extract the appropriate cutout.
331 coord_system = cube.coord(lat_name).coord_system
332 if subarea_type == "realworld" and isinstance(
333 coord_system, iris.coord_systems.RotatedGeogCS
334 ):
335 lons, lats = rotate_pole(
336 lons,
337 lats,
338 pole_lon=coord_system.grid_north_pole_longitude,
339 pole_lat=coord_system.grid_north_pole_latitude,
340 )
341 else:
342 raise ValueError("Unknown subarea_type:", subarea_type)
344 # Do cutout and add to cutout_cubes.
345 intersection_args = {lat_name: lats, lon_name: lons}
346 logging.debug("Cutting out coords: %s", intersection_args)
347 try:
348 cutout_cubes.append(cube.intersection(**intersection_args))
349 except IndexError as err:
350 raise ValueError(
351 "Region cutout error. Check and update SUBAREA_EXTENT."
352 "Cutout region requested should be contained within data area. "
353 "Also check if cutout region requested is smaller than input grid spacing."
354 ) from err
356 return cutout_cubes
359def _loading_callback(cube: iris.cube.Cube, field, filename: str) -> iris.cube.Cube:
360 """Compose together the needed callbacks into a single function."""
361 # Most callbacks operate in-place, but save the cube when returned!
362 _realization_callback(cube, field, filename)
363 _um_normalise_callback(cube, field, filename)
364 _lfric_normalise_callback(cube, field, filename)
365 cube = _lfric_time_coord_fix_callback(cube, field, filename)
366 _normalise_var0_varname(cube)
367 _fix_spatial_coords_callback(cube)
368 _fix_pressure_coord_callback(cube)
369 _fix_um_radtime(cube)
370 _fix_cell_methods(cube)
371 cube = _convert_cube_units_callback(cube)
372 cube = _grid_longitude_fix_callback(cube)
373 _fix_lfric_cloud_base_altitude(cube)
374 _proleptic_gregorian_fix(cube)
375 _lfric_time_callback(cube)
376 _lfric_forecast_period_standard_name_callback(cube)
377 return cube
380def _realization_callback(cube, field, filename):
381 """Give deterministic cubes a realization of 0.
383 This means they can be handled in the same way as ensembles through the rest
384 of the code.
385 """
386 # Only add if realization coordinate does not exist.
387 if not cube.coords("realization"):
388 cube.add_aux_coord(
389 iris.coords.DimCoord(0, standard_name="realization", units="1")
390 )
393@functools.lru_cache(None)
394def _warn_once(msg):
395 """Print a warning message, skipping recent duplicates."""
396 logging.warning(msg)
399def _um_normalise_callback(cube: iris.cube.Cube, field, filename):
400 """Normalise UM STASH variable long names to LFRic variable names.
402 Note standard names will remain associated with cubes where different.
403 Long name will be used consistently in output filename and titles.
404 """
405 # Convert STASH to LFRic variable name
406 if "STASH" in cube.attributes:
407 stash = cube.attributes["STASH"]
408 try:
409 (name, grid) = STASH_TO_LFRIC[str(stash)]
410 cube.long_name = name
411 except KeyError:
412 # Don't change cubes with unknown stash codes.
413 _warn_once(
414 f"Unknown STASH code: {stash}. Please check file stash_to_lfric.py to update."
415 )
418def _lfric_normalise_callback(cube: iris.cube.Cube, field, filename):
419 """Normalise attributes that prevents LFRic cube from merging.
421 The uuid and timeStamp relate to the output file, as saved by XIOS, and has
422 no relation to the data contained. These attributes are removed.
424 The um_stash_source is a list of STASH codes for when an LFRic field maps to
425 multiple UM fields, however it can be encoded in any order. This attribute
426 is sorted to prevent this. This attribute is only present in LFRic data that
427 has been converted to look like UM data.
428 """
429 # Remove unwanted attributes.
430 cube.attributes.pop("timeStamp", None)
431 cube.attributes.pop("uuid", None)
432 cube.attributes.pop("name", None)
434 # Sort STASH code list.
435 stash_list = cube.attributes.get("um_stash_source")
436 if stash_list:
437 # Parse the string as a list, sort, then re-encode as a string.
438 cube.attributes["um_stash_source"] = str(sorted(ast.literal_eval(stash_list)))
441def _lfric_time_coord_fix_callback(
442 cube: iris.cube.Cube, field, filename
443) -> iris.cube.Cube:
444 """Ensure the time coordinate is a DimCoord rather than an AuxCoord.
446 The coordinate is converted and replaced if not. SLAMed LFRic data has this
447 issue, though the coordinate satisfies all the properties for a DimCoord.
448 Scalar time values are left as AuxCoords.
449 """
450 # This issue seems to come from iris's handling of NetCDF files where time
451 # always ends up as an AuxCoord.
452 if cube.coords("time"):
453 time_coord = cube.coord("time")
454 if (
455 not isinstance(time_coord, iris.coords.DimCoord)
456 and len(cube.coord_dims(time_coord)) == 1
457 ):
458 # Fudge the bounds to foil checking for strict monotonicity.
459 if time_coord.has_bounds(): 459 ↛ 460line 459 didn't jump to line 460 because the condition on line 459 was never true
460 if (time_coord.bounds[-1][0] - time_coord.bounds[0][0]) < 1.0e-8:
461 time_coord.bounds = [
462 [
463 time_coord.bounds[i][0] + 1.0e-8 * float(i),
464 time_coord.bounds[i][1],
465 ]
466 for i in range(len(time_coord.bounds))
467 ]
468 iris.util.promote_aux_coord_to_dim_coord(cube, time_coord)
469 return cube
472def _grid_longitude_fix_callback(cube: iris.cube.Cube) -> iris.cube.Cube:
473 """Check grid_longitude coordinates are in the range -180 deg to 180 deg.
475 This is necessary if comparing two models with different conventions --
476 for example, models where the prime meridian is defined as 0 deg or
477 360 deg. If not in the range -180 deg to 180 deg, we wrap the grid_longitude
478 so that it falls in this range. Checks are for near-180 bounds given
479 model data bounds may not extend exactly to 0. or 360.
480 Input cubes on non-rotated grid coordinates are not impacted.
481 """
482 try:
483 y, x = get_cube_yxcoordname(cube)
484 except ValueError:
485 # Don't modify non-spatial cubes.
486 return cube
488 long_coord = cube.coord(x)
489 # Wrap longitudes if rotated pole coordinates
490 coord_system = long_coord.coord_system
491 if x == "grid_longitude" and isinstance(
492 coord_system, iris.coord_systems.RotatedGeogCS
493 ):
494 long_points = long_coord.points.copy()
495 long_centre = np.median(long_points)
496 while long_centre < -175.0:
497 long_centre += 360.0
498 long_points += 360.0
499 while long_centre >= 175.0:
500 long_centre -= 360.0
501 long_points -= 360.0
502 long_coord.points = long_points
504 # Update coord bounds to be consistent with wrapping.
505 if long_coord.has_bounds() and np.size(long_coord) > 1: 505 ↛ 506line 505 didn't jump to line 506 because the condition on line 505 was never true
506 long_coord.bounds = None
507 long_coord.guess_bounds()
509 return cube
512def _fix_spatial_coords_callback(cube: iris.cube.Cube):
513 """Check latitude and longitude coordinates name.
515 This is necessary as some models define their grid as on rotated
516 'grid_latitude' and 'grid_longitude' coordinates while others define
517 the grid on non-rotated 'latitude' and 'longitude'.
518 Cube dimensions need to be made consistent to avoid recipe failures,
519 particularly where comparing multiple input models with differing spatial
520 coordinates.
521 """
522 # Check if cube is spatial.
523 if not is_spatialdim(cube):
524 # Don't modify non-spatial cubes.
525 return
527 # Get spatial coords and dimension index.
528 y_name, x_name = get_cube_yxcoordname(cube)
529 ny = get_cube_coordindex(cube, y_name)
530 nx = get_cube_coordindex(cube, x_name)
532 # Translate [grid_latitude, grid_longitude] to an unrotated 1-d DimCoord
533 # [latitude, longitude] for instances where rotated_pole=90.0
534 if "grid_latitude" in [coord.name() for coord in cube.coords(dim_coords=True)]:
535 coord_system = cube.coord("grid_latitude").coord_system
536 pole_lat = getattr(coord_system, "grid_north_pole_latitude", None)
537 if pole_lat == 90.0: 537 ↛ 538line 537 didn't jump to line 538 because the condition on line 537 was never true
538 lats = cube.coord("grid_latitude").points
539 lons = cube.coord("grid_longitude").points
541 cube.remove_coord("grid_latitude")
542 cube.add_dim_coord(
543 iris.coords.DimCoord(
544 lats,
545 standard_name="latitude",
546 var_name="latitude",
547 units="degrees",
548 coord_system=iris.coord_systems.GeogCS(6371229.0),
549 circular=True,
550 ),
551 ny,
552 )
553 y_name = "latitude"
554 cube.remove_coord("grid_longitude")
555 cube.add_dim_coord(
556 iris.coords.DimCoord(
557 lons,
558 standard_name="longitude",
559 var_name="longitude",
560 units="degrees",
561 coord_system=iris.coord_systems.GeogCS(6371229.0),
562 circular=True,
563 ),
564 nx,
565 )
566 x_name = "longitude"
568 # Create additional AuxCoord [grid_latitude, grid_longitude] with
569 # rotated pole attributes for cases with [lat, lon] inputs
570 if y_name in ["latitude"] and cube.coord(y_name).units in [
571 "degrees",
572 "degrees_north",
573 "degrees_south",
574 ]:
575 # Add grid_latitude AuxCoord
576 if "grid_latitude" not in [ 576 ↛ 589line 576 didn't jump to line 589 because the condition on line 576 was always true
577 coord.name() for coord in cube.coords(dim_coords=False)
578 ]:
579 cube.add_aux_coord(
580 iris.coords.AuxCoord(
581 cube.coord(y_name).points,
582 var_name="grid_latitude",
583 units="degrees",
584 ),
585 ny,
586 )
587 # Ensure input latitude DimCoord has CoordSystem
588 # This attribute is sometimes lost on iris.save
589 if not cube.coord(y_name).coord_system:
590 cube.coord(y_name).coord_system = iris.coord_systems.GeogCS(6371229.0)
592 if x_name in ["longitude"] and cube.coord(x_name).units in [
593 "degrees",
594 "degrees_west",
595 "degrees_east",
596 ]:
597 # Add grid_longitude AuxCoord
598 if "grid_longitude" not in [ 598 ↛ 612line 598 didn't jump to line 612 because the condition on line 598 was always true
599 coord.name() for coord in cube.coords(dim_coords=False)
600 ]:
601 cube.add_aux_coord(
602 iris.coords.AuxCoord(
603 cube.coord(x_name).points,
604 var_name="grid_longitude",
605 units="degrees",
606 ),
607 nx,
608 )
610 # Ensure input longitude DimCoord has CoordSystem
611 # This attribute is sometimes lost on iris.save
612 if not cube.coord(x_name).coord_system:
613 cube.coord(x_name).coord_system = iris.coord_systems.GeogCS(6371229.0)
616def _fix_pressure_coord_callback(cube: iris.cube.Cube):
617 """Rename pressure coordinate to "pressure" if it exists and ensure hPa units.
619 This problem was raised because the AIFS model data from ECMWF
620 defines the pressure coordinate with the name "pressure_level" rather
621 than compliant CF coordinate names.
623 Additionally, set the units of pressure to be hPa to be consistent with the UM,
624 and approach the coordinates in a unified way.
625 """
626 for coord in cube.dim_coords:
627 if coord.name() in ["pressure_level", "pressure_levels"]:
628 coord.rename("pressure")
630 if coord.name() == "pressure":
631 if str(cube.coord("pressure").units) != "hPa":
632 cube.coord("pressure").convert_units("hPa")
635def _fix_um_radtime(cube: iris.cube.Cube):
636 """Move radiation diagnostics from timestamps which are output N minutes or seconds past every hour.
638 This callback does not have any effect for output diagnostics with
639 timestamps exactly 00 or 30 minutes past the hour. Only radiation
640 diagnostics are checked.
641 Note this callback does not interpolate the data in time, only adjust
642 timestamps to sit on the hour to enable time-to-time difference plotting
643 with models which may output radiation data on the hour.
644 """
645 try:
646 if cube.attributes["STASH"] in [
647 "m01s01i207",
648 "m01s01i208",
649 "m01s02i205",
650 "m01s02i201",
651 "m01s01i207",
652 "m01s02i207",
653 "m01s01i235",
654 ]:
655 time_coord = cube.coord("time")
657 # Convert time points to datetime objects
658 time_unit = time_coord.units
659 time_points = time_unit.num2date(time_coord.points)
660 # Skip if times don't need fixing.
661 if time_points[0].minute == 0 and time_points[0].second == 0:
662 return
663 if time_points[0].minute == 30 and time_points[0].second == 0: 663 ↛ 664line 663 didn't jump to line 664 because the condition on line 663 was never true
664 return
666 # Subtract time difference from the hour from each time point
667 n_minute = time_points[0].minute
668 n_second = time_points[0].second
669 # If times closer to next hour, compute difference to add on to following hour
670 if n_minute > 30:
671 n_minute = n_minute - 60
672 # Compute new diagnostic time stamp
673 new_time_points = (
674 time_points
675 - datetime.timedelta(minutes=n_minute)
676 - datetime.timedelta(seconds=n_second)
677 )
679 # Convert back to numeric values using the original time unit.
680 new_time_values = time_unit.date2num(new_time_points)
682 # Replace the time coordinate with updated values.
683 time_coord.points = new_time_values
685 # Recompute forecast_period with corrected values.
686 if cube.coord("forecast_period"): 686 ↛ exitline 686 didn't return from function '_fix_um_radtime' because the condition on line 686 was always true
687 fcst_prd_points = cube.coord("forecast_period").points
688 new_fcst_points = (
689 time_unit.num2date(fcst_prd_points)
690 - datetime.timedelta(minutes=n_minute)
691 - datetime.timedelta(seconds=n_second)
692 )
693 cube.coord("forecast_period").points = time_unit.date2num(
694 new_fcst_points
695 )
696 except KeyError:
697 pass
700def _fix_cell_methods(cube: iris.cube.Cube):
701 """To fix the assumed cell_methods in accumulation STASH from UM.
703 Lightning (m01s21i104), rainfall amount (m01s04i201, m01s05i201) and snowfall amount
704 (m01s04i202, m01s05i202) in UM is being output as a time accumulation,
705 over each hour (TAcc1hr), but input cubes show cell_methods as "mean".
706 For UM and LFRic inputs to be compatible, we assume accumulated cell_methods are
707 "sum". This callback changes "mean" cube attribute cell_method to "sum",
708 enabling the cell_method constraint on reading to select correct input.
709 """
710 # Shift "mean" cell_method to "sum" for selected UM inputs.
711 if cube.attributes.get("STASH") in [
712 "m01s21i104",
713 "m01s04i201",
714 "m01s04i202",
715 "m01s05i201",
716 "m01s05i202",
717 ]:
718 # Check if input cell_method contains "mean" time-processing.
719 if set(cm.method for cm in cube.cell_methods) == {"mean"}: 719 ↛ exitline 719 didn't return from function '_fix_cell_methods' because the condition on line 719 was always true
720 # Retrieve interval and any comment information.
721 for cell_method in cube.cell_methods:
722 interval_str = cell_method.intervals
723 comment_str = cell_method.comments
725 # Remove input aggregation method.
726 cube.cell_methods = ()
728 # Replace "mean" with "sum" cell_method to indicate aggregation.
729 cube.add_cell_method(
730 iris.coords.CellMethod(
731 method="sum",
732 coords="time",
733 intervals=interval_str,
734 comments=comment_str,
735 )
736 )
739def _convert_cube_units_callback(cube: iris.cube.Cube):
740 """Adjust diagnostic units for specific variables.
742 Some precipitation diagnostics are output with unit kg m-2 s-1 and are
743 converted here to mm hr-1.
745 Visibility diagnostics are converted here from m to km to improve output
746 formatting.
747 """
748 # Convert precipitation diagnostic units if required.
749 varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name])
750 if any("surface_microphysical" in name for name in varnames):
751 if cube.units == "kg m-2 s-1":
752 logging.debug(
753 "Converting precipitation rate units from kg m-2 s-1 to mm hr-1"
754 )
755 # Convert from kg m-2 s-1 to mm s-1 assuming 1kg water = 1l water = 1dm^3 water.
756 # This is a 1:1 conversion, so we just change the units.
757 cube.units = "mm s-1"
758 # Convert the units to per hour.
759 cube.convert_units("mm hr-1")
760 elif cube.units == "kg m-2": 760 ↛ 767line 760 didn't jump to line 767 because the condition on line 760 was always true
761 logging.debug("Converting precipitation amount units from kg m-2 to mm")
762 # Convert from kg m-2 to mm assuming 1kg water = 1l water = 1dm^3 water.
763 # This is a 1:1 conversion, so we just change the units.
764 cube.units = "mm"
766 # Convert visibility diagnostic units if required.
767 varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name])
768 if any("visibility" in name for name in varnames):
769 if cube.units == "m": 769 ↛ 774line 769 didn't jump to line 774 because the condition on line 769 was always true
770 logging.debug("Converting visibility units m to km.")
771 # Convert the units to km.
772 cube.convert_units("km")
774 return cube
777def _fix_lfric_cloud_base_altitude(cube: iris.cube.Cube):
778 """Mask cloud_base_altitude diagnostic in regions with no cloud."""
779 varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name])
780 if any("cloud_base_altitude" in name for name in varnames):
781 # Mask cube where set > 144kft to catch default 144.35695538058164
782 cube.data = np.ma.masked_array(cube.data)
783 cube.data[cube.data > 144.0] = np.ma.masked
786def _fix_um_winds(cubes: iris.cube.CubeList):
787 """To make winds from the UM consistent with those from LFRic.
789 Diagnostics of wind are not always consistent between the UM
790 and LFric. Here, winds from the UM are adjusted to make them i
791 consistent with LFRic.
792 """
793 # Check whether we have components of the wind identified by STASH,
794 # (so this will apply only to cubes from the UM), but not the
795 # wind speed and calculate it if it is missing. Note that
796 # this will be biased low in general because the components will mostly
797 # be time averages. For simplicity, we do this only if there is just one
798 # cube of a component. A more complicated approach would be to consider
799 # the cell methods, but it may not be warranted.
800 u_constr = iris.AttributeConstraint(STASH="m01s03i225")
801 v_constr = iris.AttributeConstraint(STASH="m01s03i226")
802 speed_constr = iris.AttributeConstraint(STASH="m01s03i227")
803 try:
804 if cubes.extract(u_constr) and cubes.extract(v_constr): 804 ↛ 805line 804 didn't jump to line 805 because the condition on line 804 was never true
805 if len(cubes.extract(u_constr)) == 1 and not cubes.extract(speed_constr):
806 _add_wind_speed_um(cubes)
807 # Convert winds in the UM to be relative to true east and true north.
808 _convert_wind_true_dirn_um(cubes)
809 except (KeyError, AttributeError):
810 pass
813def _add_wind_speed_um(cubes: iris.cube.CubeList):
814 """Add windspeeds to cubes from the UM."""
815 wspd10 = (
816 cubes.extract_cube(iris.AttributeConstraint(STASH="m01s03i225"))[0] ** 2
817 + cubes.extract_cube(iris.AttributeConstraint(STASH="m01s03i226"))[0] ** 2
818 ) ** 0.5
819 wspd10.attributes["STASH"] = "m01s03i227"
820 wspd10.standard_name = "wind_speed"
821 wspd10.long_name = "wind_speed_at_10m"
822 cubes.append(wspd10)
825def _convert_wind_true_dirn_um(cubes: iris.cube.CubeList):
826 """To convert winds to true directions.
828 Convert from the components relative to the grid to true directions.
829 This functionality only handles the simplest case.
830 """
831 u_grid = cubes.extract_cube(iris.AttributeConstraint(STASH="m01s03i225"))
832 v_grid = cubes.extract_cube(iris.AttributeConstraint(STASH="m01s03i226"))
833 true_u, true_v = rotate_winds(u_grid, v_grid, iris.coord_systems.GeogCS(6371229.0))
834 u_grid.data = true_u.data
835 v_grid.data = true_v.data
838def _normalise_var0_varname(cube: iris.cube.Cube):
839 """Fix varnames for consistency to allow merging.
841 Some model data netCDF sometimes have a coordinate name end in
842 "_0" etc, where duplicate coordinates of same name are defined but
843 with different attributes. This can be inconsistently managed in
844 different model inputs and can cause cubes to fail to merge.
845 """
846 for coord in cube.coords():
847 if coord.var_name and coord.var_name.endswith("_0"):
848 coord.var_name = coord.var_name.removesuffix("_0")
849 if coord.var_name and coord.var_name.endswith("_1"):
850 coord.var_name = coord.var_name.removesuffix("_1")
851 if coord.var_name and coord.var_name.endswith("_2"): 851 ↛ 852line 851 didn't jump to line 852 because the condition on line 851 was never true
852 coord.var_name = coord.var_name.removesuffix("_2")
853 if coord.var_name and coord.var_name.endswith("_3"): 853 ↛ 854line 853 didn't jump to line 854 because the condition on line 853 was never true
854 coord.var_name = coord.var_name.removesuffix("_3")
856 if cube.var_name and cube.var_name.endswith("_0"):
857 cube.var_name = cube.var_name.removesuffix("_0")
860def _proleptic_gregorian_fix(cube: iris.cube.Cube):
861 """Convert the calendars of time units to use a standard calendar."""
862 try:
863 time_coord = cube.coord("time")
864 if time_coord.units.calendar == "proleptic_gregorian":
865 logging.debug(
866 "Changing proleptic Gregorian calendar to standard calendar for %s",
867 repr(time_coord.units),
868 )
869 time_coord.units = time_coord.units.change_calendar("standard")
870 except iris.exceptions.CoordinateNotFoundError:
871 pass
874def _lfric_time_callback(cube: iris.cube.Cube):
875 """Fix time coordinate metadata if missing dimensions.
877 Some model data does not contain forecast_reference_time or forecast_period as
878 expected coordinates, and so we cannot aggregate over case studies without this
879 metadata. This callback fixes these issues.
881 This callback also ensures all time coordinates are referenced as hours since
882 1970-01-01 00:00:00 for consistency across different model inputs.
884 Notes
885 -----
886 Some parts of the code have been adapted from Paul Earnshaw's scripts.
887 """
888 # Construct forecast_reference time if it doesn't exist.
889 try:
890 tcoord = cube.coord("time")
891 # Set time coordinate to common basis "hours since 1970"
892 try:
893 tcoord.convert_units("hours since 1970-01-01 00:00:00")
894 except ValueError:
895 logging.warning("Unrecognised base time unit: %s", tcoord.units)
897 if not cube.coords("forecast_reference_time"):
898 try:
899 init_time = datetime.datetime.fromisoformat(
900 tcoord.attributes["time_origin"]
901 )
902 frt_point = tcoord.units.date2num(init_time)
903 frt_coord = iris.coords.AuxCoord(
904 frt_point,
905 units=tcoord.units,
906 standard_name="forecast_reference_time",
907 long_name="forecast_reference_time",
908 )
909 cube.add_aux_coord(frt_coord)
910 except KeyError:
911 logging.warning(
912 "Cannot find forecast_reference_time, but no `time_origin` attribute to construct it from."
913 )
915 # Remove time_origin to allow multiple case studies to merge.
916 tcoord.attributes.pop("time_origin", None)
918 # Construct forecast_period axis (forecast lead time) if it doesn't exist.
919 if not cube.coords("forecast_period"):
920 try:
921 # Create array of forecast lead times.
922 init_coord = cube.coord("forecast_reference_time")
923 init_time_points_in_tcoord_units = tcoord.units.date2num(
924 init_coord.units.num2date(init_coord.points)
925 )
926 lead_times = tcoord.points - init_time_points_in_tcoord_units
928 # Get unit for lead time from time coordinate's unit.
929 # Convert all lead time to hours for consistency between models.
930 if "seconds" in str(tcoord.units): 930 ↛ 931line 930 didn't jump to line 931 because the condition on line 930 was never true
931 lead_times = lead_times / 3600.0
932 units = "hours"
933 elif "hours" in str(tcoord.units): 933 ↛ 936line 933 didn't jump to line 936 because the condition on line 933 was always true
934 units = "hours"
935 else:
936 raise ValueError(f"Unrecognised base time unit: {tcoord.units}")
938 # Create lead time coordinate.
939 lead_time_coord = iris.coords.AuxCoord(
940 lead_times,
941 standard_name="forecast_period",
942 long_name="forecast_period",
943 units=units,
944 )
946 # Associate lead time coordinate with time dimension.
947 cube.add_aux_coord(lead_time_coord, cube.coord_dims("time"))
948 except iris.exceptions.CoordinateNotFoundError:
949 logging.warning(
950 "Cube does not have both time and forecast_reference_time coordinate, so cannot construct forecast_period"
951 )
952 except iris.exceptions.CoordinateNotFoundError:
953 logging.warning("No time coordinate on cube.")
956def _lfric_forecast_period_standard_name_callback(cube: iris.cube.Cube):
957 """Add forecast_period standard name if missing."""
958 try:
959 coord = cube.coord("forecast_period")
960 if not coord.standard_name:
961 coord.standard_name = "forecast_period"
962 except iris.exceptions.CoordinateNotFoundError:
963 pass