Coverage for src / CSET / operators / imageprocessing.py: 100%
56 statements
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-27 11:58 +0000
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-27 11:58 +0000
1# © Crown copyright, Met Office (2022-2025) and CSET contributors.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15"""Operators to perform various kinds of image processing."""
17import logging
19import iris
20import iris.cube
21import numpy as np
22from skimage.metrics import structural_similarity
24from CSET._common import is_increasing
25from CSET.operators._utils import fully_equalise_attributes, get_cube_yxcoordname
26from CSET.operators.misc import _extract_common_time_points
27from CSET.operators.regrid import regrid_onto_cube
30def _SSIM_cube_preparation(
31 cubes: iris.cube.CubeList,
32) -> (iris.cube.Cube, iris.cube.Cube, str):
33 """Prepare the cubes for the SSIM calculation.
35 Parameters
36 ----------
37 cubes: iris.cube.CubeList
38 A list of exactly two cubes. One must have the cset_comparison_base
39 attribute set to 1, and will be used as the base of the comparison.
40 The cubes must contain a time coordinate.
42 Returns
43 -------
44 iris.cube.Cube, iris.cube.Cube, str
46 Raises
47 ------
48 ValueError
49 When the cubes are not compatible, or no time coordinate.
50 """
51 if len(cubes) != 2:
52 raise ValueError("cubes should contain exactly 2 cubes.")
53 base: iris.cube.Cube = cubes.extract_cube(
54 iris.AttributeConstraint(cset_comparison_base=1)
55 )
56 other: iris.cube.Cube = cubes.extract_cube(
57 iris.Constraint(
58 cube_func=lambda cube: "cset_comparison_base" not in cube.attributes
59 )
60 )
62 # Get spatial coord names.
63 base_lat_name, base_lon_name = get_cube_yxcoordname(base)
64 other_lat_name, other_lon_name = get_cube_yxcoordname(other)
66 # Ensure cubes to compare are on common differencing grid.
67 # This is triggered if either
68 # i) latitude and longitude shapes are not the same. Note grid points
69 # are not compared directly as these can differ through rounding
70 # errors.
71 # ii) or variables are known to often sit on different grid staggering
72 # in different models (e.g. cell center vs cell edge), as is the case
73 # for UM and LFRic comparisons.
74 # In future greater choice of regridding method might be applied depending
75 # on variable type. Linear regridding can in general be appropriate for smooth
76 # variables. Care should be taken with interpretation of differences
77 # given this dependency on regridding.
78 if (
79 base.coord(base_lat_name).shape != other.coord(other_lat_name).shape
80 or base.coord(base_lon_name).shape != other.coord(other_lon_name).shape
81 ) or (
82 base.long_name
83 in [
84 "eastward_wind_at_10m",
85 "northward_wind_at_10m",
86 "northward_wind_at_cell_centres",
87 "eastward_wind_at_cell_centres",
88 "zonal_wind_at_pressure_levels",
89 "meridional_wind_at_pressure_levels",
90 "potential_vorticity_at_pressure_levels",
91 "vapour_specific_humidity_at_pressure_levels_for_climate_averaging",
92 ]
93 ):
94 logging.debug(
95 "Linear regridding base cube to other grid to compute differences"
96 )
97 base = regrid_onto_cube(base, other, method="Linear")
99 # Figure out if we are comparing between UM and LFRic; flip array if so.
100 base_lat_direction = is_increasing(base.coord(base_lat_name).points)
101 other_lat_direction = is_increasing(other.coord(other_lat_name).points)
102 if base_lat_direction != other_lat_direction:
103 other.data = np.flip(other.data, other.coord(other_lat_name).cube_dims(other))
105 # Extract just common time points.
106 base, other = _extract_common_time_points(base, other)
108 # Equalise attributes so we can merge.
109 fully_equalise_attributes([base, other])
110 logging.debug("Base: %s\nOther: %s", base, other)
112 # Get the name of the first non-scalar time coordinate.
113 time_coord = next(
114 map(
115 lambda coord: coord.name(),
116 filter(
117 lambda coord: coord.shape > (1,) and coord.name() in ["time", "hour"],
118 base.coords(),
119 ),
120 ),
121 None,
122 )
124 if time_coord is None:
125 raise ValueError("Cubes should contain a time coordinate.")
126 # Create and empty CubeList for storing the time or realization data.
127 return base, other, time_coord
130def spatial_structural_similarity_model_comparisons(
131 cubes: iris.cube.CubeList, sigma: float = 1.5
132) -> iris.cube.Cube:
133 r"""Calculate the structural similarity and produces a spatial plot.
135 Parameters
136 ----------
137 cubes: iris.cube.CubeList
138 A list of exactly two cubes. One must have the cset_comparison_base
139 attribute set to 1, and will be used as the base of the comparison.
140 The cubes must contain a time coordinate.
141 sigma: float, optional
142 The standard deviation of the Gaussian kernel to be used. The default
143 is set to 1.5 to mimic the human eye following [Wangetal2004]_.
145 Returns
146 -------
147 iris.cube.Cube
149 Raises
150 ------
151 ValueError
152 When the cubes are not compatible, or no time coordinate.
154 Notes
155 -----
156 This diagnostic was introduced by Wang et al. (2004) [Wangetal2004]_. It is
157 an image processing diagnostic that takes into account three factors asscoiated
158 with an image: i) luminace, ii) contrast, iii) structure. In calculation terms
159 it is a combination of the intensity, variance, and co-variance of an image. It is
160 calculated as follows:
162 .. math:: SSIM(x,y) = \frac{(2\mu_{x}\mu_{y} + C_{1})(2\sigma_{xy} + C_{2})}{(\mu^{2}_{x}\mu^{2}_{y} + C_{1})(\sigma^{2}_{x}\sigma^{2}_{y} + C_{2})}
164 for images, x and y, and small constancts C1 and C2, with the other symbols having
165 their usual statistical meaning.
167 The diagnostic varies between positive and negative one, on the most part.
168 However, should the data being compared lie outside of the specified data
169 range values larger or smaller can occur. Values close to or exactly 1 imply
170 a perceptably similar image; values close to or exactly -1 imply an anticorrelated
171 image; small values imply the fields are perceptably different.
173 The diagnostic has been setup with default values that are designed to mimic the
174 human eye and so are universally applicable irrespective of model resolution.
175 However, it should be noted that the mean structural similarity is not
176 identical to the domain mean of the structural similarity. This difference
177 occurs because the former is calculated over the mean of all the windows
178 (Gaussian kernels) rather than the mean of the grid boxes.
180 Further details, including caveats, can be found in Wang et al. (2004)
181 [Wangetal2004]_.
183 References
184 ----------
185 .. [Wangetal2004] Wang, Z., Bovik, A.C., Sheikh, H.R., Simoncelli, E.P. (2004)
186 "Image Quality Assessment: From Error Visibility to Structural Similarity."
187 IEEE Transactions on Image Processing, vol. 13, 600-612,
188 doi: 10.1109/TIP.2003.819861
190 Examples
191 --------
192 >>> SSIM = imageprocessing.spatial_structural_similarity_model_comparisons(
193 cubes, sigma=1.5)
194 >>> iplt.pcolormesh(SSIM[0,:], cmap=mpl.cm.bwr)
195 >>> plt.gca().coastlines('10m')
196 >>> plt.clim(-1, 1)
197 >>> plt.colorbar()
198 >>> plt.show()
199 """
200 base, other, time_coord = _SSIM_cube_preparation(cubes)
201 ssim = iris.cube.CubeList()
203 # Loop over realization and time coordinates.
204 for base_r, other_r in zip(
205 base.slices_over("realization"),
206 other.slices_over("realization"),
207 strict=True,
208 ):
209 for base_t, other_t in zip(
210 base_r.slices_over(time_coord), other_r.slices_over(time_coord), strict=True
211 ):
212 # Use the full array as output will be as a 2D map.
213 ssim_map = base_t.copy()
214 _, ssim_map.data = structural_similarity(
215 base_t.data,
216 other_t.data,
217 data_range=other_t.data.max() - other_t.data.min(),
218 gaussian_weights=True,
219 sigma=sigma,
220 full=True,
221 )
222 ssim.append(ssim_map)
223 # Merge the cube slices into one cube, rename, and change units.
224 ssim = ssim.merge_cube()
225 ssim.standard_name = None
226 ssim.long_name = "structural_similarity"
227 ssim.units = "1"
228 return ssim
231def mean_structural_similarity_model_comparisons(
232 cubes: iris.cube.CubeList, sigma: float = 1.5
233) -> iris.cube.Cube:
234 r"""Calculate the mean structural similarity and produces a timeseries.
236 Parameters
237 ----------
238 cubes: iris.cube.CubeList
239 A list of exactly two cubes. One must have the cset_comparison_base
240 attribute set to 1, and will be used as the base of the comparison.
241 The cubes must contain a time coordinate.
242 sigma: float, optional
243 The standard deviation of the Gaussian kernel to be used. The default
244 is set to 1.5 to mimic the human eye following [Wangetal2004a]_.
246 Returns
247 -------
248 iris.cube.Cube
250 Raises
251 ------
252 ValueError
253 When the cubes are not compatible, or no time coordinate.
255 Notes
256 -----
257 This diagnostic was introduced by Wang et al. (2004) [Wangetal2004a]_. It is
258 an image processing diagnostic that takes into account three factors asscoiated
259 with an image: i) luminace, ii) contrast, iii) structure. In calculation terms
260 it is a combination of the intensity, variance, and co-variance of an image. It is
261 calculated as follows:
263 .. math:: SSIM(x,y) = \frac{(2\mu_{x}\mu_{y} + C_{1})(2\sigma_{xy} + C_{2})}{(\mu^{2}_{x}\mu^{2}_{y} + C_{1})(\sigma^{2}_{x}\sigma^{2}_{y} + C_{2})}
265 for images, x and y, and small constancts C1 and C2, with the other symbols having
266 their usual statistical meaning.
268 The diagnostic varies between positive and negative one, on the most part.
269 However, should the data being compared lie outside of the specified data
270 range values larger or smaller can occur. Values close to or exactly 1 imply
271 a perceptably similar image; values close to or exactly -1 imply an anticorrelated
272 image; small values imply the fields are perceptably different.
274 The diagnostic has been setup with default values that are designed to mimic the
275 human eye and so are universally applicable irrespective of model resolution.
276 However, it should be noted that the mean structural similarity is not
277 identical to the domain mean of the structural similarity. This difference
278 occurs because the former is calculated over the mean of all the windows
279 (Gaussian kernels) rather than the mean of the grid boxes.
281 Further details, including caveats, can be found in Wang et al. (2004)
282 [Wangetal2004a]_.
284 References
285 ----------
286 .. [Wangetal2004a] Wang, Z., Bovik, A.C., Sheikh, H.R., Simoncelli, E.P. (2004)
287 "Image Quality Assessment: From Error Visibility to Structural Similarity."
288 IEEE Transactions on Image Processing, vol. 13, 600-612,
289 doi: 10.1109/TIP.2003.819861
291 Examples
292 --------
293 >>> MSSIM = imageprocessing.structural_similarity_model_comparisons(
294 cubes,sigma=1.5, spatial_plot=False)
295 >>> iplt.plot(MSSIM)
296 """
297 base, other, time_coord = _SSIM_cube_preparation(cubes)
298 ssim = iris.cube.CubeList()
300 # Loop over realization and time coordinates.
301 for base_r, other_r in zip(
302 base.slices_over("realization"),
303 other.slices_over("realization"),
304 strict=True,
305 ):
306 for base_t, other_t in zip(
307 base_r.slices_over(time_coord), other_r.slices_over(time_coord), strict=True
308 ):
309 # The MSSIM (Mean structural similarity) is compression to
310 # a single point. Therefore, copying cube data for one
311 # point in the domain to keep cube consistency.
312 mssim = base_t[0, 0].copy()
313 mssim.data = structural_similarity(
314 base_t.data,
315 other_t.data,
316 data_range=other_t.data.max() - other_t.data.min(),
317 gaussian_weights=True,
318 sigma=sigma,
319 )
320 ssim.append(mssim)
321 # Merge the cube slices into one cube, rename, and change units.
322 ssim = ssim.merge_cube()
323 ssim.standard_name = None
324 ssim.long_name = "structural_similarity"
325 ssim.units = "1"
326 return ssim