diff --git a/src/CSET/operators/_colorbar_definition.json b/src/CSET/operators/_colorbar_definition.json index f923f01eb..9bba85086 100644 --- a/src/CSET/operators/_colorbar_definition.json +++ b/src/CSET/operators/_colorbar_definition.json @@ -492,6 +492,18 @@ "max": 1400, "min": -1400 }, + "surface_microphysical_rainfall_amount": { + "cmap": "cividis", + "max": 256, + "min": 0, + "ymax": 1.0, + "ymin": 0.0 + }, + "surface_microphysical_rainfall_amount_difference": { + "cmap": "BrBG", + "max": 16.0, + "min": -16.0 + }, "surface_microphysical_rainfall_rate": { "cmap": "cividis", "levels": [ @@ -509,7 +521,7 @@ 128, 256 ], - "ymax": 0.01, + "ymax": 1.0, "ymin": 0.0 }, "surface_microphysical_rainfall_rate_difference": { @@ -517,6 +529,18 @@ "max": 32.0, "min": -32.0 }, + "surface_microphysical_snowfall_amount": { + "cmap": "cividis", + "max": 256, + "min": 0, + "ymax": 1.0, + "ymin": 0.0 + }, + "surface_microphysical_snowfall_amount_difference": { + "cmap": "BrBG", + "max": 4.0, + "min": -4.0 + }, "surface_microphysical_snowfall_rate": { "cmap": "cool", "levels": [ @@ -534,7 +558,7 @@ 128, 256 ], - "ymax": 0.01, + "ymax": 1.0, "ymin": 0.0 }, "surface_microphysical_snowfall_rate_difference": { diff --git a/src/CSET/operators/plot.py b/src/CSET/operators/plot.py index 148fac030..3591cde07 100644 --- a/src/CSET/operators/plot.py +++ b/src/CSET/operators/plot.py @@ -843,21 +843,29 @@ def _plot_and_save_histogram_series( model_colors_map = _get_model_colors_map(cubes) + # Set default that histograms will produce probability density function + # at each bin (integral over range sums to 1). + density = True + for cube in iter_maybe(cubes): # Easier to check title (where var name originates) # than seeing if long names exist etc. # Exception case, where distribution better fits log scales/bins. if "surface_microphysical" in title: - # Usually in seconds but mm/hr more intuitive. - cube.convert_units("kg m-2 h-1") - bins = 10.0 ** ( - np.arange(-10, 27, 1) / 10.0 - ) # Suggestion from RMED toolbox. - bins = np.insert(bins, 0, 0) + if "amount" in title: + # Compute histogram following Klingaman et al. (2017): ASoP + bin2 = np.exp(np.log(0.02) + 0.1 * np.linspace(0, 99, 100)) + bins = np.pad(bin2, (1, 0), "constant", constant_values=0) + density = False + else: + bins = 10.0 ** ( + np.arange(-10, 27, 1) / 10.0 + ) # Suggestion from RMED toolbox. + bins = np.insert(bins, 0, 0) + ax.set_yscale("log") + vmin = bins[1] + vmax = bins[-1] # Manually set vmin/vmax to override json derived value. ax.set_xscale("log") - ax.set_yscale("log") - vmin = 0 - vmax = 400 # Manually set vmin/vmax to override json derived value. elif "lightning" in title: bins = [0, 1, 2, 3, 4, 5] else: @@ -878,7 +886,15 @@ def _plot_and_save_histogram_series( if model_colors_map: label = cube.attributes.get("model_name") color = model_colors_map[label] - x, y = np.histogram(cube_data_1d, bins=bins, density=True) + x, y = np.histogram(cube_data_1d, bins=bins, density=density) + + # Compute area under curve. + if "surface_microphysical" in title and "amount" in title: + bin_mean = (bins[:-1] + bins[1:]) / 2.0 + x = x * bin_mean / x.sum() + x = x[1:] + y = y[1:] + ax.plot( y[:-1], x, color=color, linewidth=3, marker="o", markersize=6, label=label ) @@ -889,6 +905,10 @@ def _plot_and_save_histogram_series( f"{iter_maybe(cubes)[0].name()} / {iter_maybe(cubes)[0].units}", fontsize=14 ) ax.set_ylabel("Normalised probability density", fontsize=14) + if "surface_microphysical" in title and "amount" in title: + ax.set_ylabel( + f"Contribution to mean ({iter_maybe(cubes)[0].units})", fontsize=14 + ) ax.set_xlim(vmin, vmax) ax.tick_params(axis="both", labelsize=12) @@ -1039,11 +1059,6 @@ def _spatial_plot( # Ensure we've got a single cube. cube = _check_single_cube(cube) - # Convert precipitation units if necessary - _convert_precipitation_units_callback(cube) - # Convert visibility units if necessary - _convert_visibility_units_callback(cube) - # Make postage stamp plots if stamp_coordinate exists and has more than a # single point. plotting_func = _plot_and_save_spatial_plot @@ -1085,40 +1100,6 @@ def _spatial_plot( _make_plot_html_page(complete_plot_index) -def _convert_precipitation_units_callback(cube: iris.cube.Cube): - """To convert the unit of precipitation from kg m-2 s-1 to mm hr-1. - - Some precipitation diagnostics are output with unit kg m-2 s-1 and are converted to mm hr-1. - """ - varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name]) - if any("surface_microphysical" in name for name in varnames): - if cube.units == "kg m-2 s-1": - logging.info("Converting precipitation units from kg m-2 s-1 to mm hr-1") - # Convert from kg m-2 s-1 to mm s-1 assuming 1kg water = 1l water = 1dm^3 water. - # This is a 1:1 conversion, so we just change the units. - cube.units = "mm s-1" - # Convert the units to per hour. - cube.convert_units("mm hr-1") - else: - logging.warning( - "Precipitation units are not in 'kg m-2 s-1', skipping conversion" - ) - return cube - - -def _convert_visibility_units_callback(cube: iris.cube.Cube): - """To convert the unit of visibility from m to km.""" - varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name]) - if any("visibility" in name for name in varnames): - if cube.units == "m": - logging.info("Converting visibility units m to km.") - # Convert the units to km. - cube.convert_units("km") - else: - logging.warning("Visibility units are not in 'm', skipping conversion") - return cube - - def _custom_colourmap_precipitation(cube: iris.cube.Cube, cmap, levels, norm): """Return a custom colourmap for the current recipe.""" import matplotlib.colors as mcolors diff --git a/src/CSET/operators/read.py b/src/CSET/operators/read.py index 99fcc8ebc..c400b9c97 100644 --- a/src/CSET/operators/read.py +++ b/src/CSET/operators/read.py @@ -395,6 +395,7 @@ def callback(cube: iris.cube.Cube, field, filename: str): _fix_pressure_coord_callback(cube) _fix_um_radtime(cube) _fix_cell_methods(cube) + _convert_cube_units_callback(cube) _fix_lfric_cloud_base_altitude(cube) _lfric_time_callback(cube) _lfric_forecast_period_standard_name_callback(cube) @@ -776,6 +777,50 @@ def _fix_cell_methods(cube: iris.cube.Cube): ) +def _convert_cube_units_callback(cube: iris.cube.Cube): + """Adjust diagnostic units for specific variables. + + Some precipitation diagnostics are output with unit kg m-2 s-1 and are + converted here to mm hr-1. + + Visibility diagnostics are converted here from m to km to improve output + formatting. + """ + # Convert precipitation diagnostic units if required. + varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name]) + if any("surface_microphysical" in name for name in varnames): + if cube.units == "kg m-2 s-1": + logging.info( + "Converting precipitation rate units from kg m-2 s-1 to mm hr-1" + ) + # Convert from kg m-2 s-1 to mm s-1 assuming 1kg water = 1l water = 1dm^3 water. + # This is a 1:1 conversion, so we just change the units. + cube.units = "mm s-1" + # Convert the units to per hour. + cube.convert_units("mm hr-1") + elif cube.units == "kg m-2": + logging.info("Converting precipitation amount units from kg m-2 to mm") + # Convert from kg m-2 to mm assuming 1kg water = 1l water = 1dm^3 water. + # This is a 1:1 conversion, so we just change the units. + cube.units = "mm" + else: + logging.info( + "Precipitation units are not in 'kg m-2 s-1' or 'kg m-2', skipping conversion" + ) + + # Convert visibility diagnostic units if required. + varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name]) + if any("visibility" in name for name in varnames): + if cube.units == "m": + logging.info("Converting visibility units m to km.") + # Convert the units to km. + cube.convert_units("km") + else: + logging.info("Visibility units are not in 'm', skipping conversion") + + return cube + + def _fix_lfric_cloud_base_altitude(cube: iris.cube.Cube): """Mask cloud_base_altitude diagnostic in regions with no cloud.""" varnames = filter(None, [cube.long_name, cube.standard_name, cube.var_name]) diff --git a/src/CSET/recipes/generic_surface_histogram_series.yaml b/src/CSET/recipes/generic_surface_histogram_series.yaml index bdd3b37e9..e7d03a3e2 100644 --- a/src/CSET/recipes/generic_surface_histogram_series.yaml +++ b/src/CSET/recipes/generic_surface_histogram_series.yaml @@ -2,10 +2,15 @@ category: Histogram title: Histogram $VARNAME description: | Extracts and plots the probability density of surface `$VARNAME`. It uses - [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html) - to plot the probability density so that the area under the histogram - integrates to 1. stacked is set to True so the sum of the histograms is + [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html). + + The default method generates a probability density so that the area under the histogram normalized to 1. + Histograms of rainfall or snowfall rate are plotted using a logarithmic scale. + Histograms of rainfall or snowfall amount are based on the + [Klingaman et al. 2017](https://gmd.copernicus.org/articles/10/57/2017/gmd-10-57-2017.html) + ASoP method, where histograms show the fractional contributions from each precipitation bin + to the total precipitation. The area under the histogram shows the total precipitation. steps: - operator: read.read_cubes diff --git a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_all.yaml b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_all.yaml index adb9a0b68..801a8e620 100644 --- a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_all.yaml +++ b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_all.yaml @@ -2,11 +2,15 @@ category: Histogram title: "Histogram $VARNAME\n Aggregation over all cases" description: | Extracts and plots the probability density of surface `$VARNAME`. It uses - [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html) - to plot the probability density so that the area under the histogram - integrates to 1. stacked is set to True so the sum of the histograms is - normalized to 1. Case aggregation occurs by putting all of the data into - one histogram whilst still providing a single cube to the histogram operator. + [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html). + + The default method generates a probability density so that the area under the histogram + normalized to 1. + Histograms of rainfall or snowfall rate are plotted using a logarithmic scale. + Histograms of rainfall or snowfall amount are based on the + [Klingaman et al. 2017](https://gmd.copernicus.org/articles/10/57/2017/gmd-10-57-2017.html) + ASoP method, where histograms show the fractional contributions from each precipitation bin + to the total precipitation. The area under the histogram shows the total precipitation. steps: - operator: read.read_cubes diff --git a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_hour_of_day.yaml b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_hour_of_day.yaml index b92b841cc..1685dd932 100644 --- a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_hour_of_day.yaml +++ b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_hour_of_day.yaml @@ -2,11 +2,15 @@ category: Histogram title: "Histogram $VARNAME\n Aggregation by hour of day" description: | Extracts and plots the probability density of surface `$VARNAME`. It uses - [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html) - to plot the probability density so that the area under the histogram - integrates to 1. stacked is set to True so the sum of the histograms is - normalized to 1. Case aggregation occurs by putting all of the data into - one histogram whilst still providing a single cube to the histogram operator. + [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html). + + The default method generates a probability density so that the area under the histogram + normalized to 1. + Histograms of rainfall or snowfall rate are plotted using a logarithmic scale. + Histograms of rainfall or snowfall amount are based on the + [Klingaman et al. 2017](https://gmd.copernicus.org/articles/10/57/2017/gmd-10-57-2017.html) + ASoP method, where histograms show the fractional contributions from each precipitation bin + to the total precipitation. The area under the histogram shows the total precipitation. steps: - operator: read.read_cubes diff --git a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_lead_time.yaml b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_lead_time.yaml index 81b88b33a..7bb0e2d4d 100644 --- a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_lead_time.yaml +++ b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_lead_time.yaml @@ -2,11 +2,15 @@ category: Histogram title: "Histogram $VARNAME\n Aggregation by lead time" description: | Extracts and plots the probability density of surface `$VARNAME`. It uses - [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html) - to plot the probability density so that the area under the histogram - integrates to 1. stacked is set to True so the sum of the histograms is - normalized to 1. Case aggregation occurs by putting all of the data into - one histogram whilst still providing a single cube to the histogram operator. + [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html). + + The default method generates a probability density so that the area under the histogram + normalized to 1. + Histograms of rainfall or snowfall rate are plotted using a logarithmic scale. + Histograms of rainfall or snowfall amount are based on the + [Klingaman et al. 2017](https://gmd.copernicus.org/articles/10/57/2017/gmd-10-57-2017.html) + ASoP method, where histograms show the fractional contributions from each precipitation bin + to the total precipitation. The area under the histogram shows the total precipitation. steps: - operator: read.read_cubes diff --git a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_validity_time.yaml b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_validity_time.yaml index 35d2777d1..bcfed7647 100644 --- a/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_validity_time.yaml +++ b/src/CSET/recipes/generic_surface_histogram_series_case_aggregation_validity_time.yaml @@ -2,11 +2,15 @@ category: Histogram title: "Histogram $VARNAME\n Aggregation by validity time" description: | Extracts and plots the probability density of surface `$VARNAME`. It uses - [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html) - to plot the probability density so that the area under the histogram - integrates to 1. stacked is set to True so the sum of the histograms is - normalized to 1. Case aggregation occurs by putting all of the data into - one histogram whilst still providing a single cube to the histogram operator. + [`plt.hist`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.hist.html). + + The default method generates a probability density so that the area under the histogram + normalized to 1. + Histograms of rainfall or snowfall rate are plotted using a logarithmic scale. + Histograms of rainfall or snowfall amount are based on the + [Klingaman et al. 2017](https://gmd.copernicus.org/articles/10/57/2017/gmd-10-57-2017.html) + ASoP method, where histograms show the fractional contributions from each precipitation bin + to the total precipitation. The area under the histogram shows the total precipitation. steps: - operator: read.read_cubes diff --git a/src/CSET/workflow/files/opt/rose-suite-RAL3LFRIC.conf b/src/CSET/workflow/files/opt/rose-suite-RAL3LFRIC.conf index 43461afc2..574b2f57a 100644 --- a/src/CSET/workflow/files/opt/rose-suite-RAL3LFRIC.conf +++ b/src/CSET/workflow/files/opt/rose-suite-RAL3LFRIC.conf @@ -78,7 +78,7 @@ SPATIAL_SURFACE_FIELD=True SPATIAL_SURFACE_FIELD_AGGREGATION=True,True,False SUBAREA_EXTENT=16,16,16,16 SUBAREA_TYPE="gridcells" -SURFACE_FIELDS=["eastward_wind_at_10m", "northward_wind_at_10m", "visibility_in_air", "toa_upward_shortwave_flux", "toa_direct_shortwave_flux", "toa_upward_longwave_flux_radiative_timestep", "grid_surface_upward_sensible_heat_flux", "grid_surface_upward_latent_heat_flux", "grid_surface_temperature", "grid_surface_snow_amount", "surface_net_longwave_flux_radiative_timestep", "surface_downward_shortwave_flux", "surface_downward_longwave_flux", "relative_humidity_at_screen_level", "fog_fraction_at_screen_level", "dew_point_temperature_at_screen_level", "atmosphere_boundary_layer_thickness", "temperature_at_screen_level", "turbulent_mixing_height", "surface_net_shortwave_flux", "surface_microphysical_snowfall_rate", "surface_microphysical_rainfall_rate", "maximum_combined_cloud_amount_below_111m_asl", "low_type_cloud_area_fraction", "medium_type_cloud_area_fraction", "high_type_cloud_area_fraction", "ceilometer_filtered_combined_cloud_amount_maximum_random_overlap", "combined_cloud_amount_maximum_random_overlap", "atmosphere_mass_content_of_cloud_liquid_water", "atmosphere_mass_content_of_cloud_ice", "air_pressure_at_mean_sea_level", "cloud_base_altitude", "number_of_lightning_flashes_in_column", "radar_reflectivity_at_1km_above_the_surface", "atmosphere_mass_of_air_per_unit_area", "atmosphere_mass_content_of_water_vapor"] +SURFACE_FIELDS=["eastward_wind_at_10m", "northward_wind_at_10m", "visibility_in_air", "toa_upward_shortwave_flux", "toa_direct_shortwave_flux", "toa_upward_longwave_flux_radiative_timestep", "grid_surface_upward_sensible_heat_flux", "grid_surface_upward_latent_heat_flux", "grid_surface_temperature", "grid_surface_snow_amount", "surface_net_longwave_flux_radiative_timestep", "surface_downward_shortwave_flux", "surface_downward_longwave_flux", "relative_humidity_at_screen_level", "fog_fraction_at_screen_level", "dew_point_temperature_at_screen_level", "atmosphere_boundary_layer_thickness", "temperature_at_screen_level", "turbulent_mixing_height", "surface_net_shortwave_flux", "surface_microphysical_snowfall_rate", "surface_microphysical_rainfall_rate", "surface_microphysical_rainfall_amount", "maximum_combined_cloud_amount_below_111m_asl", "low_type_cloud_area_fraction", "medium_type_cloud_area_fraction", "high_type_cloud_area_fraction", "ceilometer_filtered_combined_cloud_amount_maximum_random_overlap", "combined_cloud_amount_maximum_random_overlap", "atmosphere_mass_content_of_cloud_liquid_water", "atmosphere_mass_content_of_cloud_ice", "air_pressure_at_mean_sea_level", "cloud_base_altitude", "number_of_lightning_flashes_in_column", "radar_reflectivity_at_1km_above_the_surface", "atmosphere_mass_of_air_per_unit_area", "atmosphere_mass_content_of_water_vapor"] SURFACE_SINGLE_POINT_TIME_SERIES=False TIMESERIES_MLEVEL_FIELD=False TIMESERIES_MLEVEL_FIELD_AGGREGATION=False,False,False diff --git a/tests/operators/test_plots.py b/tests/operators/test_plots.py index 08a14c2bf..e7a303446 100644 --- a/tests/operators/test_plots.py +++ b/tests/operators/test_plots.py @@ -701,53 +701,6 @@ def test_levels_postage_stamp_spatial_plot(ensemble_cube, tmp_working_dir): ) -def test_convert_precipitation_units(cube, caplog): - """Test precipitation conversions prior to plotting.""" - cube.rename("surface_microphysical_rainfall_rate") - # Check unit conversion - cube.units = "kg m-2 s-1" - with caplog.at_level(logging.INFO): - cube = plot._convert_precipitation_units_callback(cube) - _, level, message = caplog.record_tuples[0] - assert level == logging.INFO - assert message == "Converting precipitation units from kg m-2 s-1 to mm hr-1" - assert cube.units == "mm hr-1" - - -def test_convert_precipitation_no_units(cube, caplog): - """Test precipitation conversions prior to plotting.""" - # Check no processing for non-expected units - cube.rename("surface_microphysical_rainfall_rate") - cube.units = "unknown" - cube = plot._convert_precipitation_units_callback(cube) - _, level, message = caplog.record_tuples[0] - assert level == logging.WARNING - assert message == "Precipitation units are not in 'kg m-2 s-1', skipping conversion" - assert cube.units == "unknown" - - -def test_convert_visibility_units(): - """Test visibility units conversions prior to plotting.""" - cube = iris.cube.Cube( - np.array([1000, 2000, 3000]), standard_name="visibility_in_air", units="m" - ) - cube = plot._convert_visibility_units_callback(cube) - assert cube.units == "km" - assert np.allclose(cube.data, np.array([1, 2, 3])) - - -def test_convert_visibility_no_units(cube, caplog): - """Check no processing for unexpected units in visibility conversions.""" - cube = iris.cube.Cube( - np.array([1000, 2000, 3000]), standard_name="visibility_in_air", units="unknown" - ) - cube = plot._convert_visibility_units_callback(cube) - _, level, message = caplog.record_tuples[0] - assert level == logging.WARNING - assert message == "Visibility units are not in 'm', skipping conversion" - assert cube.units == "unknown" - - def test_append_to_plot_index(monkeypatch, tmp_working_dir): """Ensure the datetime is written along with the plot index.""" # Setup environment and required file. diff --git a/tests/operators/test_read.py b/tests/operators/test_read.py index cbf729fb5..6bf394f6f 100644 --- a/tests/operators/test_read.py +++ b/tests/operators/test_read.py @@ -664,6 +664,71 @@ def test_lfric_time_callback_unknown_units(slammed_lfric_cube, caplog): assert message == "Unrecognised base time unit: unknown" +def test_convert_precipitation_units(cube, caplog): + """Test precipitation conversions prior to plotting.""" + cube.rename("surface_microphysical_rainfall_rate") + # Check unit conversion + cube.units = "kg m-2 s-1" + with caplog.at_level(logging.INFO): + cube = read._convert_cube_units_callback(cube) + _, level, message = caplog.record_tuples[0] + assert level == logging.INFO + assert message == "Converting precipitation rate units from kg m-2 s-1 to mm hr-1" + assert cube.units == "mm hr-1" + + +def test_convert_precipitation_no_units(cube, caplog): + """Test precipitation conversions prior to plotting.""" + # Check no processing for non-expected units + cube.rename("surface_microphysical_rainfall_rate") + cube.units = "unknown" + with caplog.at_level(logging.INFO): + cube = read._convert_cube_units_callback(cube) + _, level, message = caplog.record_tuples[0] + assert level == logging.INFO + assert ( + message + == "Precipitation units are not in 'kg m-2 s-1' or 'kg m-2', skipping conversion" + ) + assert cube.units == "unknown" + + +def test_convert_precipitation_amount_units(cube, caplog): + """Test precipitation conversions prior to plotting.""" + cube.rename("surface_microphysical_rainfall_amount") + # Check unit conversion + cube.units = "kg m-2" + with caplog.at_level(logging.INFO): + cube = read._convert_cube_units_callback(cube) + _, level, message = caplog.record_tuples[0] + assert level == logging.INFO + assert message == "Converting precipitation amount units from kg m-2 to mm" + assert cube.units == "mm" + + +def test_convert_visibility_units(): + """Test visibility units conversions prior to plotting.""" + cube = iris.cube.Cube( + np.array([1000, 2000, 3000]), standard_name="visibility_in_air", units="m" + ) + cube = read._convert_cube_units_callback(cube) + assert cube.units == "km" + assert np.allclose(cube.data, np.array([1, 2, 3])) + + +def test_convert_visibility_no_units(cube, caplog): + """Check no processing for unexpected units in visibility conversions.""" + cube = iris.cube.Cube( + np.array([1000, 2000, 3000]), standard_name="visibility_in_air", units="unknown" + ) + with caplog.at_level(logging.INFO): + cube = read._convert_cube_units_callback(cube) + _, level, message = caplog.record_tuples[0] + assert level == logging.INFO + assert message == "Visibility units are not in 'm', skipping conversion" + assert cube.units == "unknown" + + def test_fix_lfric_cloud_base_altitude(): """Check that lfric cloud_base_altitude callback applies mask.""" cube = iris.cube.Cube(np.arange(151) + 10.0, long_name="cloud_base_altitude") @@ -671,7 +736,6 @@ def test_fix_lfric_cloud_base_altitude(): assert not np.ma.is_masked(cube.data) # Apply fix callback to mask > 144kft. read._fix_lfric_cloud_base_altitude(cube) - print(np.nanmin(cube.data), np.nanmax(cube.data)) assert np.nanmax(cube.data) == 144.0 assert np.ma.is_masked(cube.data)