From 1f079a632c635699990ac9f7c89019026156cf87 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Mon, 4 Mar 2024 13:26:58 +0000 Subject: [PATCH 1/4] Revert "Merge branch 'main' into v3.7.x" This reverts commit 875e89028da0313207ae643de2385a60fc8a480f, reversing changes made to 932fd93234e9f888ab40e28ff56a0d7fefb9f18f. --- .../{benchmarks_run.yml => benchmark.yml} | 80 +- .github/workflows/benchmarks_report.yml | 83 - .github/workflows/ci-manifest.yml | 2 +- .github/workflows/ci-tests.yml | 6 +- .github/workflows/ci-wheels.yml | 6 +- .github/workflows/refresh-lockfiles.yml | 2 +- .github/workflows/stale.yml | 14 +- .gitignore | 1 - .pre-commit-config.yaml | 8 +- COPYING | 674 +++++++ COPYING.LESSER | 165 ++ LICENSE | 29 - MANIFEST.in | 3 +- README.md | 2 +- benchmarks/asv.conf.json | 1 + benchmarks/asv_delegated_conda.py | 9 +- benchmarks/benchmarks/__init__.py | 5 +- benchmarks/benchmarks/aux_factory.py | 5 +- benchmarks/benchmarks/coords.py | 5 +- benchmarks/benchmarks/cperf/__init__.py | 7 +- benchmarks/benchmarks/cperf/equality.py | 5 +- benchmarks/benchmarks/cperf/load.py | 5 +- benchmarks/benchmarks/cperf/save.py | 5 +- benchmarks/benchmarks/cube.py | 5 +- .../benchmarks/experimental/__init__.py | 5 +- .../benchmarks/experimental/ugrid/__init__.py | 5 +- .../experimental/ugrid/regions_combine.py | 15 +- .../benchmarks/generate_data/__init__.py | 5 +- benchmarks/benchmarks/generate_data/stock.py | 7 +- benchmarks/benchmarks/generate_data/ugrid.py | 5 +- .../benchmarks/generate_data/um_files.py | 5 +- benchmarks/benchmarks/import_iris.py | 5 +- benchmarks/benchmarks/iterate.py | 5 +- benchmarks/benchmarks/load/__init__.py | 9 +- benchmarks/benchmarks/load/ugrid.py | 7 +- .../benchmarks/metadata_manager_factory.py | 5 +- benchmarks/benchmarks/mixin.py | 5 +- benchmarks/benchmarks/plot.py | 5 +- benchmarks/benchmarks/regridding.py | 5 +- benchmarks/benchmarks/save.py | 11 +- benchmarks/benchmarks/sperf/__init__.py | 5 +- .../benchmarks/sperf/combine_regions.py | 5 +- benchmarks/benchmarks/sperf/equality.py | 5 +- benchmarks/benchmarks/sperf/load.py | 5 +- benchmarks/benchmarks/sperf/save.py | 5 +- benchmarks/benchmarks/trajectory.py | 5 +- benchmarks/bm_runner.py | 385 +--- docs/gallery_code/meteorology/plot_COP_1d.py | 2 +- .../gallery_code/meteorology/plot_COP_maps.py | 22 +- docs/gallery_tests/__init__.py | 5 +- docs/gallery_tests/conftest.py | 5 +- docs/gallery_tests/test_gallery_examples.py | 5 +- docs/src/_templates/layout.html | 20 + docs/src/community/index.rst | 2 - docs/src/community/iris_xarray.rst | 1 - docs/src/community/phrasebook.rst | 66 - docs/src/conf.py | 23 +- docs/src/copyright.rst | 9 +- .../contributing_documentation_easy.rst | 6 +- .../contributing_getting_involved.rst | 1 + .../gitwash/development_workflow.rst | 21 +- .../developers_guide/gitwash/git_links.inc | 15 + .../dask_best_practices/index.rst | 10 +- .../src/further_topics/filtering_warnings.rst | 271 --- docs/src/further_topics/index.rst | 20 - docs/src/further_topics/lenient_maths.rst | 4 +- docs/src/further_topics/lenient_metadata.rst | 10 +- docs/src/further_topics/metadata.rst | 67 +- docs/src/further_topics/netcdf_io.rst | 140 -- docs/src/further_topics/ugrid/data_model.rst | 40 +- docs/src/sphinxext/api_rst_formatting.py | 5 +- docs/src/techpapers/index.rst | 13 + .../missing_data_handling.rst | 0 .../um_files_loading.rst | 46 +- docs/src/userguide/index.rst | 18 +- docs/src/userguide/iris_cubes.rst | 5 +- docs/src/userguide/navigating_a_cube.rst | 8 +- docs/src/userguide/real_and_lazy_data.rst | 23 +- docs/src/voted_issues.rst | 2 + docs/src/whatsnew/1.4.rst | 2 +- docs/src/whatsnew/1.7.rst | 4 +- docs/src/whatsnew/3.7.rst | 6 +- docs/src/whatsnew/index.rst | 3 +- docs/src/whatsnew/latest.rst | 184 -- docs/src/whatsnew/latest.rst.template | 107 -- lib/iris/__init__.py | 24 +- lib/iris/_concatenate.py | 8 +- lib/iris/_constraints.py | 5 +- lib/iris/_data_manager.py | 5 +- lib/iris/_deprecation.py | 15 +- lib/iris/_lazy_data.py | 208 +- lib/iris/_merge.py | 28 +- lib/iris/_representation/__init__.py | 5 +- lib/iris/_representation/cube_printout.py | 5 +- lib/iris/_representation/cube_summary.py | 5 +- lib/iris/analysis/__init__.py | 5 +- lib/iris/analysis/_area_weighted.py | 1046 +++++++--- lib/iris/analysis/_grid_angles.py | 5 +- lib/iris/analysis/_interpolation.py | 5 +- lib/iris/analysis/_regrid.py | 8 +- lib/iris/analysis/calculus.py | 11 +- lib/iris/analysis/cartography.py | 28 +- lib/iris/analysis/geometry.py | 13 +- lib/iris/analysis/maths.py | 8 +- lib/iris/analysis/stats.py | 5 +- lib/iris/analysis/trajectory.py | 9 +- lib/iris/aux_factory.py | 73 +- lib/iris/common/__init__.py | 5 +- lib/iris/common/_split_attribute_dicts.py | 125 -- lib/iris/common/lenient.py | 5 +- lib/iris/common/metadata.py | 52 +- lib/iris/common/mixin.py | 38 +- lib/iris/common/resolve.py | 5 +- lib/iris/config.py | 68 +- lib/iris/coord_categorisation.py | 82 +- lib/iris/coord_systems.py | 208 +- lib/iris/coords.py | 63 +- lib/iris/cube.py | 394 +--- lib/iris/exceptions.py | 214 +-- lib/iris/experimental/__init__.py | 5 +- lib/iris/experimental/animate.py | 5 +- lib/iris/experimental/raster.py | 5 +- lib/iris/experimental/regrid.py | 8 +- lib/iris/experimental/regrid_conservative.py | 5 +- lib/iris/experimental/representation.py | 5 +- lib/iris/experimental/stratify.py | 5 +- lib/iris/experimental/ugrid/__init__.py | 5 +- lib/iris/experimental/ugrid/cf.py | 74 +- lib/iris/experimental/ugrid/load.py | 40 +- lib/iris/experimental/ugrid/mesh.py | 5 +- lib/iris/experimental/ugrid/metadata.py | 5 +- lib/iris/experimental/ugrid/save.py | 5 +- lib/iris/experimental/ugrid/utils.py | 5 +- lib/iris/fileformats/__init__.py | 5 +- lib/iris/fileformats/_ff.py | 40 +- lib/iris/fileformats/_ff_cross_references.py | 5 +- .../fileformats/_nc_load_rules/__init__.py | 5 +- .../fileformats/_nc_load_rules/actions.py | 42 +- lib/iris/fileformats/_nc_load_rules/engine.py | 5 +- .../fileformats/_nc_load_rules/helpers.py | 195 +- lib/iris/fileformats/_pp_lbproc_pairs.py | 5 +- .../_structured_array_identification.py | 7 +- lib/iris/fileformats/abf.py | 5 +- lib/iris/fileformats/cf.py | 55 +- lib/iris/fileformats/dot.py | 5 +- lib/iris/fileformats/name.py | 5 +- lib/iris/fileformats/name_loaders.py | 15 +- lib/iris/fileformats/netcdf/__init__.py | 8 +- lib/iris/fileformats/netcdf/_dask_locks.py | 85 +- .../fileformats/netcdf/_thread_safe_nc.py | 109 +- lib/iris/fileformats/netcdf/loader.py | 314 +-- lib/iris/fileformats/netcdf/saver.py | 893 ++++----- lib/iris/fileformats/nimrod.py | 5 +- lib/iris/fileformats/nimrod_load_rules.py | 31 +- lib/iris/fileformats/pp.py | 52 +- lib/iris/fileformats/pp_load_rules.py | 5 +- lib/iris/fileformats/pp_save_rules.py | 22 +- lib/iris/fileformats/rules.py | 15 +- lib/iris/fileformats/um/__init__.py | 5 +- lib/iris/fileformats/um/_fast_load.py | 5 +- .../um/_fast_load_structured_fields.py | 5 +- lib/iris/fileformats/um/_ff_replacement.py | 5 +- .../um/_optimal_array_structuring.py | 5 +- lib/iris/fileformats/um_cf_map.py | 5 +- lib/iris/io/__init__.py | 147 +- lib/iris/io/format_picker.py | 135 +- lib/iris/iterate.py | 10 +- lib/iris/palette.py | 5 +- lib/iris/pandas.py | 14 +- lib/iris/plot.py | 20 +- lib/iris/quickplot.py | 22 +- lib/iris/symbols.py | 5 +- lib/iris/tests/__init__.py | 5 +- lib/iris/tests/experimental/__init__.py | 5 +- .../tests/experimental/regrid/__init__.py | 5 +- ..._area_weighted_rectilinear_src_and_grid.py | 63 +- .../test_regrid_conservative_via_esmpy.py | 5 +- lib/iris/tests/experimental/test_raster.py | 5 +- lib/iris/tests/graphics/__init__.py | 5 +- lib/iris/tests/graphics/idiff.py | 25 +- lib/iris/tests/graphics/recreate_imagerepo.py | 5 +- lib/iris/tests/integration/__init__.py | 5 +- .../tests/integration/analysis/__init__.py | 5 +- .../analysis/test_area_weighted.py | 5 +- .../attrs_matrix_results_load.json | 1019 ---------- .../attrs_matrix_results_roundtrip.json | 983 ---------- .../attrs_matrix_results_save.json | 983 ---------- .../tests/integration/aux_factory/__init__.py | 5 +- .../aux_factory/test_OceanSigmaZFactory.py | 5 +- .../tests/integration/concatenate/__init__.py | 5 +- .../concatenate/test_concatenate.py | 5 +- .../integration/experimental/__init__.py | 5 +- .../experimental/test_CubeRepresentation.py | 5 +- .../test_regrid_ProjectedUnstructured.py | 5 +- .../experimental/test_ugrid_load.py | 25 +- .../experimental/test_ugrid_save.py | 5 +- .../tests/integration/fast_load/__init__.py | 5 +- .../integration/fast_load/test_fast_load.py | 5 +- lib/iris/tests/integration/merge/__init__.py | 5 +- .../tests/integration/merge/test_merge.py | 5 +- lib/iris/tests/integration/netcdf/__init__.py | 5 +- .../integration/netcdf/test__dask_locks.py | 5 +- .../integration/netcdf/test_attributes.py | 5 +- .../integration/netcdf/test_aux_factories.py | 5 +- .../integration/netcdf/test_coord_systems.py | 5 +- .../integration/netcdf/test_delayed_save.py | 52 +- .../tests/integration/netcdf/test_general.py | 11 +- .../netcdf/test_self_referencing.py | 14 +- .../integration/netcdf/test_thread_safety.py | 7 +- lib/iris/tests/integration/plot/__init__.py | 5 +- .../tests/integration/plot/test_animate.py | 5 +- .../tests/integration/plot/test_colorbar.py | 5 +- .../tests/integration/plot/test_netcdftime.py | 5 +- .../tests/integration/plot/test_nzdateline.py | 5 +- .../integration/plot/test_plot_2d_coords.py | 5 +- .../integration/plot/test_vector_plots.py | 5 +- lib/iris/tests/integration/test_Datums.py | 5 +- .../tests/integration/test_PartialDateTime.py | 5 +- .../tests/integration/test_climatology.py | 5 +- lib/iris/tests/integration/test_cube.py | 5 +- lib/iris/tests/integration/test_ff.py | 5 +- .../integration/test_netcdf__loadsaveattrs.py | 1678 ----------------- lib/iris/tests/integration/test_new_axis.py | 5 +- lib/iris/tests/integration/test_pickle.py | 5 +- lib/iris/tests/integration/test_pp.py | 11 +- .../test_pp_constrained_load_cubes.py | 5 +- .../integration/test_regrid_equivalence.py | 5 +- lib/iris/tests/integration/test_regridding.py | 5 +- lib/iris/tests/integration/test_subset.py | 5 +- lib/iris/tests/integration/test_trajectory.py | 5 +- lib/iris/tests/integration/um/__init__.py | 5 +- .../tests/integration/um/test_fieldsfile.py | 5 +- lib/iris/tests/pp.py | 5 +- lib/iris/tests/results/imagerepo.json | 34 +- lib/iris/tests/stock/__init__.py | 5 +- lib/iris/tests/stock/_stock_2d_latlons.py | 5 +- lib/iris/tests/stock/mesh.py | 5 +- lib/iris/tests/stock/netcdf.py | 5 +- lib/iris/tests/system_test.py | 5 +- lib/iris/tests/test_abf.py | 5 +- lib/iris/tests/test_aggregate_by.py | 5 +- lib/iris/tests/test_analysis.py | 5 +- lib/iris/tests/test_analysis_calculus.py | 5 +- lib/iris/tests/test_basic_maths.py | 5 +- lib/iris/tests/test_cartography.py | 5 +- lib/iris/tests/test_cdm.py | 5 +- lib/iris/tests/test_cell.py | 5 +- lib/iris/tests/test_cf.py | 5 +- lib/iris/tests/test_coding_standards.py | 71 +- lib/iris/tests/test_concatenate.py | 18 +- lib/iris/tests/test_constraints.py | 5 +- lib/iris/tests/test_coord_api.py | 5 +- lib/iris/tests/test_coord_categorisation.py | 197 ++ lib/iris/tests/test_coordsystem.py | 8 +- lib/iris/tests/test_cube.py | 5 +- lib/iris/tests/test_cube_to_pp.py | 5 +- lib/iris/tests/test_ff.py | 5 +- lib/iris/tests/test_file_load.py | 5 +- lib/iris/tests/test_file_save.py | 5 +- lib/iris/tests/test_hybrid.py | 14 +- lib/iris/tests/test_image_json.py | 5 +- lib/iris/tests/test_imports.py | 5 +- lib/iris/tests/test_intersect.py | 5 +- lib/iris/tests/test_io_init.py | 5 +- lib/iris/tests/test_iterate.py | 10 +- lib/iris/tests/test_lazy_aggregate_by.py | 5 +- lib/iris/tests/test_load.py | 5 +- lib/iris/tests/test_mapping.py | 5 +- lib/iris/tests/test_merge.py | 87 +- lib/iris/tests/test_name.py | 5 +- lib/iris/tests/test_netcdf.py | 10 +- lib/iris/tests/test_nimrod.py | 5 +- lib/iris/tests/test_peak.py | 5 +- lib/iris/tests/test_pickling.py | 5 +- lib/iris/tests/test_plot.py | 5 +- lib/iris/tests/test_pp_cf.py | 5 +- lib/iris/tests/test_pp_module.py | 5 +- lib/iris/tests/test_pp_stash.py | 5 +- lib/iris/tests/test_pp_to_cube.py | 5 +- lib/iris/tests/test_quickplot.py | 5 +- lib/iris/tests/test_std_names.py | 5 +- lib/iris/tests/test_uri_callback.py | 5 +- lib/iris/tests/test_util.py | 5 +- lib/iris/tests/unit/__init__.py | 5 +- lib/iris/tests/unit/analysis/__init__.py | 5 +- .../unit/analysis/area_weighted/__init__.py | 5 +- .../test_AreaWeightedRegridder.py | 7 +- .../unit/analysis/cartography/__init__.py | 5 +- .../cartography/test__get_lon_lat_coords.py | 5 +- .../cartography/test__quadrant_area.py | 5 +- .../analysis/cartography/test__xy_range.py | 5 +- .../analysis/cartography/test_area_weights.py | 5 +- .../cartography/test_gridcell_angles.py | 5 +- .../unit/analysis/cartography/test_project.py | 9 +- .../cartography/test_rotate_grid_vectors.py | 5 +- .../analysis/cartography/test_rotate_winds.py | 5 +- .../tests/unit/analysis/geometry/__init__.py | 5 +- .../test__extract_relevant_cube_slice.py | 5 +- .../geometry/test_geometry_area_weights.py | 10 +- .../unit/analysis/interpolation/__init__.py | 5 +- .../test_RectilinearInterpolator.py | 5 +- .../interpolation/test_get_xy_dim_coords.py | 5 +- .../tests/unit/analysis/maths/__init__.py | 5 +- .../analysis/maths/test__arith__dask_array.py | 5 +- .../maths/test__arith__derived_coords.py | 5 +- .../analysis/maths/test__arith__meshcoords.py | 5 +- .../unit/analysis/maths/test__get_dtype.py | 5 +- .../maths/test__inplace_common_checks.py | 5 +- .../unit/analysis/maths/test__output_dtype.py | 5 +- .../tests/unit/analysis/maths/test_add.py | 5 +- .../tests/unit/analysis/maths/test_divide.py | 5 +- .../unit/analysis/maths/test_multiply.py | 5 +- .../unit/analysis/maths/test_subtract.py | 5 +- .../tests/unit/analysis/regrid/__init__.py | 5 +- .../regrid/test_RectilinearRegridder.py | 5 +- .../regrid/test__CurvilinearRegridder.py | 5 +- .../analysis/scipy_interpolate/__init__.py | 5 +- .../test__RegularGridInterpolator.py | 5 +- .../tests/unit/analysis/stats/__init__.py | 5 +- .../unit/analysis/stats/test_pearsonr.py | 5 +- .../tests/unit/analysis/test_Aggregator.py | 5 +- .../tests/unit/analysis/test_AreaWeighted.py | 5 +- lib/iris/tests/unit/analysis/test_COUNT.py | 5 +- lib/iris/tests/unit/analysis/test_Linear.py | 5 +- lib/iris/tests/unit/analysis/test_MAX.py | 5 +- lib/iris/tests/unit/analysis/test_MAX_RUN.py | 5 +- lib/iris/tests/unit/analysis/test_MEAN.py | 5 +- lib/iris/tests/unit/analysis/test_MIN.py | 5 +- lib/iris/tests/unit/analysis/test_Nearest.py | 5 +- .../tests/unit/analysis/test_PERCENTILE.py | 7 +- .../tests/unit/analysis/test_PROPORTION.py | 5 +- .../analysis/test_PercentileAggregator.py | 5 +- .../tests/unit/analysis/test_PointInCell.py | 5 +- lib/iris/tests/unit/analysis/test_RMS.py | 5 +- lib/iris/tests/unit/analysis/test_STD_DEV.py | 5 +- lib/iris/tests/unit/analysis/test_SUM.py | 5 +- lib/iris/tests/unit/analysis/test_VARIANCE.py | 5 +- .../tests/unit/analysis/test_WPERCENTILE.py | 5 +- .../test_WeightedPercentileAggregator.py | 5 +- .../analysis/test__axis_to_single_trailing.py | 5 +- .../unit/analysis/trajectory/__init__.py | 5 +- .../analysis/trajectory/test_Trajectory.py | 5 +- ...t_UnstructuredNearestNeighbourRegridder.py | 5 +- ...est__nearest_neighbour_indices_ndcoords.py | 5 +- .../analysis/trajectory/test_interpolate.py | 5 +- lib/iris/tests/unit/aux_factory/__init__.py | 5 +- .../test_AtmosphereSigmaFactory.py | 5 +- .../unit/aux_factory/test_AuxCoordFactory.py | 5 +- .../aux_factory/test_HybridPressureFactory.py | 5 +- .../unit/aux_factory/test_OceanSFactory.py | 5 +- .../unit/aux_factory/test_OceanSg1Factory.py | 5 +- .../unit/aux_factory/test_OceanSg2Factory.py | 5 +- .../aux_factory/test_OceanSigmaFactory.py | 5 +- .../aux_factory/test_OceanSigmaZFactory.py | 5 +- lib/iris/tests/unit/common/__init__.py | 5 +- .../tests/unit/common/lenient/__init__.py | 5 +- .../tests/unit/common/lenient/test_Lenient.py | 5 +- .../unit/common/lenient/test__Lenient.py | 5 +- .../common/lenient/test__lenient_client.py | 5 +- .../common/lenient/test__lenient_service.py | 5 +- .../unit/common/lenient/test__qualname.py | 5 +- .../tests/unit/common/metadata/__init__.py | 5 +- .../test_AncillaryVariableMetadata.py | 5 +- .../unit/common/metadata/test_BaseMetadata.py | 5 +- .../metadata/test_CellMeasureMetadata.py | 5 +- .../common/metadata/test_CoordMetadata.py | 5 +- .../unit/common/metadata/test_CubeMetadata.py | 1198 +++++------- .../common/metadata/test__NamedTupleMeta.py | 5 +- .../unit/common/metadata/test_hexdigest.py | 5 +- .../common/metadata/test_metadata_filter.py | 5 +- .../metadata/test_metadata_manager_factory.py | 5 +- lib/iris/tests/unit/common/mixin/__init__.py | 5 +- .../unit/common/mixin/test_CFVariableMixin.py | 5 +- .../common/mixin/test_LimitedAttributeDict.py | 7 +- .../mixin/test__get_valid_standard_name.py | 5 +- .../tests/unit/common/resolve/__init__.py | 5 +- .../tests/unit/common/resolve/test_Resolve.py | 5 +- lib/iris/tests/unit/concatenate/__init__.py | 5 +- .../unit/concatenate/test__CoordMetaData.py | 5 +- .../unit/concatenate/test__CoordSignature.py | 5 +- .../unit/concatenate/test__CubeSignature.py | 5 +- .../unit/concatenate/test_concatenate.py | 5 +- lib/iris/tests/unit/config/__init__.py | 5 +- lib/iris/tests/unit/config/test_NetCDF.py | 5 +- lib/iris/tests/unit/conftest.py | 14 - lib/iris/tests/unit/constraints/__init__.py | 5 +- .../constraints/test_Constraint_equality.py | 5 +- .../unit/constraints/test_NameConstraint.py | 5 +- .../unit/coord_categorisation/__init__.py | 5 +- .../test_add_categorised_coord.py | 5 +- .../coord_categorisation/test_add_hour.py | 5 +- .../test_coord_categorisation.py | 251 --- lib/iris/tests/unit/coord_systems/__init__.py | 5 +- .../coord_systems/test_AlbersEqualArea.py | 5 +- .../tests/unit/coord_systems/test_GeogCS.py | 5 +- .../unit/coord_systems/test_Geostationary.py | 5 +- .../test_LambertAzimuthalEqualArea.py | 5 +- .../coord_systems/test_LambertConformal.py | 5 +- .../tests/unit/coord_systems/test_Mercator.py | 5 +- .../coord_systems/test_ObliqueMercator.py | 164 -- .../unit/coord_systems/test_Orthographic.py | 5 +- .../coord_systems/test_PolarStereographic.py | 5 +- .../coord_systems/test_RotatedMercator.py | 38 - .../unit/coord_systems/test_RotatedPole.py | 5 +- .../unit/coord_systems/test_Stereographic.py | 5 +- .../coord_systems/test_TransverseMercator.py | 5 +- .../coord_systems/test_VerticalPerspective.py | 5 +- lib/iris/tests/unit/coords/__init__.py | 5 +- .../unit/coords/test_AncillaryVariable.py | 5 +- lib/iris/tests/unit/coords/test_AuxCoord.py | 5 +- lib/iris/tests/unit/coords/test_Cell.py | 5 +- .../tests/unit/coords/test_CellMeasure.py | 5 +- lib/iris/tests/unit/coords/test_CellMethod.py | 5 +- lib/iris/tests/unit/coords/test_Coord.py | 53 +- lib/iris/tests/unit/coords/test_DimCoord.py | 5 +- .../unit/coords/test__DimensionalMetadata.py | 5 +- lib/iris/tests/unit/cube/__init__.py | 5 +- lib/iris/tests/unit/cube/test_Cube.py | 52 +- .../tests/unit/cube/test_CubeAttrsDict.py | 407 ---- lib/iris/tests/unit/cube/test_CubeList.py | 5 +- .../unit/cube/test_Cube__aggregated_by.py | 5 +- .../tests/unit/cube/test_Cube__operators.py | 5 +- lib/iris/tests/unit/data_manager/__init__.py | 5 +- .../unit/data_manager/test_DataManager.py | 5 +- lib/iris/tests/unit/experimental/__init__.py | 5 +- .../unit/experimental/raster/__init__.py | 5 +- .../raster/test_export_geotiff.py | 5 +- .../unit/experimental/regrid/__init__.py | 5 +- ..._area_weighted_rectilinear_src_and_grid.py | 5 +- ...rid_weighted_curvilinear_to_rectilinear.py | 5 +- .../experimental/representation/__init__.py | 5 +- .../test_CubeListRepresentation.py | 5 +- .../representation/test_CubeRepresentation.py | 5 +- .../unit/experimental/stratify/__init__.py | 5 +- .../experimental/stratify/test_relevel.py | 5 +- .../tests/unit/experimental/ugrid/__init__.py | 5 +- .../unit/experimental/ugrid/cf/__init__.py | 5 +- ...test_CFUGridAuxiliaryCoordinateVariable.py | 64 +- .../cf/test_CFUGridConnectivityVariable.py | 61 +- .../ugrid/cf/test_CFUGridGroup.py | 5 +- .../ugrid/cf/test_CFUGridMeshVariable.py | 55 +- .../ugrid/cf/test_CFUGridReader.py | 5 +- .../unit/experimental/ugrid/load/__init__.py | 5 +- .../ugrid/load/test_ParseUgridOnLoad.py | 5 +- .../experimental/ugrid/load/test_load_mesh.py | 5 +- .../ugrid/load/test_load_meshes.py | 5 +- .../unit/experimental/ugrid/mesh/__init__.py | 5 +- .../ugrid/mesh/test_Connectivity.py | 9 +- .../unit/experimental/ugrid/mesh/test_Mesh.py | 9 +- .../experimental/ugrid/mesh/test_MeshCoord.py | 11 +- .../ugrid/mesh/test_Mesh__from_coords.py | 5 +- .../experimental/ugrid/metadata/__init__.py | 5 +- .../metadata/test_ConnectivityMetadata.py | 5 +- .../ugrid/metadata/test_MeshCoordMetadata.py | 5 +- .../ugrid/metadata/test_MeshMetadata.py | 5 +- .../unit/experimental/ugrid/utils/__init__.py | 5 +- .../ugrid/utils/test_recombine_submeshes.py | 5 +- lib/iris/tests/unit/fileformats/__init__.py | 5 +- .../tests/unit/fileformats/abf/__init__.py | 5 +- .../unit/fileformats/abf/test_ABFField.py | 5 +- .../tests/unit/fileformats/cf/__init__.py | 5 +- .../tests/unit/fileformats/cf/test_CFGroup.py | 5 +- .../unit/fileformats/cf/test_CFReader.py | 5 +- .../tests/unit/fileformats/dot/__init__.py | 5 +- .../unit/fileformats/dot/test__dot_path.py | 5 +- .../tests/unit/fileformats/ff/__init__.py | 5 +- .../unit/fileformats/ff/test_ArakawaC.py | 5 +- .../tests/unit/fileformats/ff/test_ENDGame.py | 5 +- .../tests/unit/fileformats/ff/test_FF2PP.py | 9 +- .../unit/fileformats/ff/test_FFHeader.py | 10 +- .../tests/unit/fileformats/ff/test_Grid.py | 5 +- .../unit/fileformats/ff/test_NewDynamics.py | 5 +- .../unit/fileformats/name_loaders/__init__.py | 5 +- .../name_loaders/test__build_cell_methods.py | 10 +- ...test__build_lat_lon_for_NAME_timeseries.py | 5 +- .../test__calc_integration_period.py | 5 +- .../name_loaders/test__cf_height_from_name.py | 5 +- .../name_loaders/test__generate_cubes.py | 5 +- .../fileformats/nc_load_rules/__init__.py | 5 +- .../nc_load_rules/actions/__init__.py | 8 +- .../actions/test__grid_mappings.py | 5 +- .../actions/test__hybrid_formulae.py | 5 +- .../actions/test__latlon_dimcoords.py | 5 +- .../actions/test__miscellaneous.py | 5 +- .../actions/test__time_coords.py | 5 +- .../nc_load_rules/engine/__init__.py | 5 +- .../nc_load_rules/engine/test_engine.py | 5 +- .../nc_load_rules/helpers/__init__.py | 5 +- ...ild_albers_equal_area_coordinate_system.py | 5 +- .../helpers/test_build_ancil_var.py | 5 +- .../test_build_auxiliary_coordinate.py | 5 +- .../helpers/test_build_cell_measure.py | 5 +- .../helpers/test_build_cube_metadata.py | 14 +- .../test_build_dimension_coordinate.py | 5 +- ...t_build_geostationary_coordinate_system.py | 5 +- ..._azimuthal_equal_area_coordinate_system.py | 5 +- ...ild_lambert_conformal_coordinate_system.py | 5 +- .../test_build_mercator_coordinate_system.py | 5 +- ...uild_oblique_mercator_coordinate_system.py | 182 -- ...d_polar_stereographic_coordinate_system.py | 5 +- ...t_build_stereographic_coordinate_system.py | 5 +- ...d_transverse_mercator_coordinate_system.py | 5 +- .../test_build_verticalp_coordinate_system.py | 5 +- .../helpers/test_get_attr_units.py | 5 +- .../helpers/test_get_cf_bounds_var.py | 5 +- .../nc_load_rules/helpers/test_get_names.py | 5 +- .../test_has_supported_mercator_parameters.py | 5 +- ...upported_polar_stereographic_parameters.py | 5 +- .../helpers/test_parse_cell_methods.py | 10 +- .../helpers/test_reorder_bounds_data.py | 5 +- .../tests/unit/fileformats/netcdf/__init__.py | 5 +- .../fileformats/netcdf/loader/__init__.py | 5 +- .../netcdf/loader/test__chunk_control.py | 216 --- .../netcdf/loader/test__get_cf_var_data.py | 18 +- .../netcdf/loader/test__load_aux_factory.py | 9 +- .../netcdf/loader/test__load_cube.py | 5 +- ...__translate_constraints_to_var_callback.py | 5 +- .../netcdf/loader/test_load_cubes.py | 5 +- .../unit/fileformats/netcdf/saver/__init__.py | 5 +- .../fileformats/netcdf/saver/test_Saver.py | 58 +- .../netcdf/saver/test_Saver__lazy.py | 5 +- .../saver/test_Saver__lazy_stream_data.py | 8 +- .../netcdf/saver/test_Saver__ugrid.py | 5 +- .../saver/test__data_fillvalue_check.py | 5 +- .../netcdf/saver/test__fillvalue_report.py | 13 +- .../fileformats/netcdf/saver/test_save.py | 5 +- .../fileformats/nimrod_load_rules/__init__.py | 5 +- .../nimrod_load_rules/test_units.py | 5 +- .../nimrod_load_rules/test_vertical_coord.py | 7 +- .../tests/unit/fileformats/pp/__init__.py | 5 +- .../unit/fileformats/pp/test_PPDataProxy.py | 5 +- .../tests/unit/fileformats/pp/test_PPField.py | 12 +- .../pp/test__convert_constraints.py | 5 +- .../fileformats/pp/test__create_field_data.py | 5 +- .../pp/test__data_bytes_to_shaped_array.py | 5 +- .../unit/fileformats/pp/test__field_gen.py | 5 +- .../fileformats/pp/test__interpret_field.py | 5 +- .../unit/fileformats/pp/test_as_fields.py | 5 +- .../tests/unit/fileformats/pp/test_load.py | 5 +- .../tests/unit/fileformats/pp/test_save.py | 17 +- .../unit/fileformats/pp/test_save_fields.py | 5 +- .../pp/test_save_pairs_from_cube.py | 5 +- .../fileformats/pp_load_rules/__init__.py | 5 +- .../pp_load_rules/test__all_other_rules.py | 5 +- ...__collapse_degenerate_points_and_bounds.py | 5 +- ...est__convert_scalar_pseudo_level_coords.py | 5 +- ...test__convert_scalar_realization_coords.py | 5 +- .../test__convert_time_coords.py | 5 +- .../test__convert_vertical_coords.py | 5 +- .../pp_load_rules/test__dim_or_aux.py | 5 +- .../pp_load_rules/test__epoch_date_hours.py | 5 +- .../pp_load_rules/test__model_level_number.py | 5 +- .../test__reduced_points_and_bounds.py | 5 +- .../test__reshape_vector_args.py | 5 +- .../fileformats/pp_load_rules/test_convert.py | 5 +- .../tests/unit/fileformats/rules/__init__.py | 5 +- .../unit/fileformats/rules/test_Loader.py | 5 +- .../unit/fileformats/rules/test__make_cube.py | 5 +- .../__init__.py | 5 +- .../test_ArrayStructure.py | 5 +- .../test_GroupStructure.py | 5 +- lib/iris/tests/unit/fileformats/test_rules.py | 5 +- .../tests/unit/fileformats/um/__init__.py | 5 +- .../unit/fileformats/um/fast_load/__init__.py | 5 +- .../um/fast_load/test_FieldCollation.py | 5 +- .../um/fast_load/test__convert_collation.py | 5 +- .../fast_load_structured_fields/__init__.py | 5 +- .../test_BasicFieldCollation.py | 5 +- .../test_group_structured_fields.py | 5 +- .../um/optimal_array_structuring/__init__.py | 5 +- .../test_optimal_array_structure.py | 5 +- .../unit/fileformats/um/test_um_to_pp.py | 5 +- lib/iris/tests/unit/io/__init__.py | 5 +- .../tests/unit/io/test__generate_cubes.py | 5 +- .../tests/unit/io/test_expand_filespecs.py | 5 +- lib/iris/tests/unit/io/test_run_callback.py | 5 +- lib/iris/tests/unit/io/test_save.py | 5 +- lib/iris/tests/unit/lazy_data/__init__.py | 5 +- .../unit/lazy_data/test_as_concrete_data.py | 5 +- .../tests/unit/lazy_data/test_as_lazy_data.py | 29 +- .../unit/lazy_data/test_co_realise_cubes.py | 5 +- .../tests/unit/lazy_data/test_is_lazy_data.py | 5 +- .../lazy_data/test_is_lazy_masked_data.py | 5 +- .../unit/lazy_data/test_lazy_elementwise.py | 5 +- .../lazy_data/test_map_complete_blocks.py | 5 +- .../lazy_data/test_multidim_lazy_stack.py | 5 +- .../tests/unit/lazy_data/test_non_lazy.py | 5 +- lib/iris/tests/unit/merge/__init__.py | 5 +- lib/iris/tests/unit/merge/test_ProtoCube.py | 5 +- lib/iris/tests/unit/pandas/__init__.py | 5 +- lib/iris/tests/unit/pandas/test_pandas.py | 11 +- lib/iris/tests/unit/plot/__init__.py | 5 +- lib/iris/tests/unit/plot/_blockplot_common.py | 5 +- .../test__check_bounds_contiguity_and_mask.py | 5 +- ..._check_geostationary_coords_and_convert.py | 7 +- lib/iris/tests/unit/plot/test__fixup_dates.py | 5 +- .../tests/unit/plot/test__get_plot_defn.py | 5 +- ...est__get_plot_defn_custom_coords_picked.py | 5 +- .../tests/unit/plot/test__get_plot_objects.py | 5 +- .../test__replace_axes_with_cartopy_axes.py | 5 +- lib/iris/tests/unit/plot/test_contour.py | 5 +- lib/iris/tests/unit/plot/test_contourf.py | 5 +- lib/iris/tests/unit/plot/test_hist.py | 5 +- lib/iris/tests/unit/plot/test_outline.py | 5 +- lib/iris/tests/unit/plot/test_pcolor.py | 5 +- lib/iris/tests/unit/plot/test_pcolormesh.py | 5 +- lib/iris/tests/unit/plot/test_plot.py | 5 +- lib/iris/tests/unit/plot/test_points.py | 5 +- lib/iris/tests/unit/plot/test_scatter.py | 5 +- lib/iris/tests/unit/quickplot/__init__.py | 5 +- lib/iris/tests/unit/quickplot/test_contour.py | 5 +- .../tests/unit/quickplot/test_contourf.py | 5 +- lib/iris/tests/unit/quickplot/test_outline.py | 5 +- lib/iris/tests/unit/quickplot/test_pcolor.py | 5 +- .../tests/unit/quickplot/test_pcolormesh.py | 5 +- lib/iris/tests/unit/quickplot/test_plot.py | 5 +- lib/iris/tests/unit/quickplot/test_points.py | 5 +- lib/iris/tests/unit/quickplot/test_scatter.py | 5 +- .../tests/unit/representation/__init__.py | 5 +- .../representation/cube_printout/__init__.py | 5 +- .../cube_printout/test_CubePrintout.py | 5 +- .../cube_printout/test_Table.py | 5 +- .../representation/cube_summary/__init__.py | 5 +- .../cube_summary/test_CubeSummary.py | 5 +- lib/iris/tests/unit/test_Future.py | 5 +- lib/iris/tests/unit/test_sample_data_path.py | 5 +- lib/iris/tests/unit/tests/__init__.py | 5 +- lib/iris/tests/unit/tests/stock/__init__.py | 5 +- .../tests/unit/tests/stock/test_netcdf.py | 5 +- lib/iris/tests/unit/tests/test_IrisTest.py | 5 +- lib/iris/tests/unit/time/__init__.py | 5 +- .../tests/unit/time/test_PartialDateTime.py | 5 +- lib/iris/tests/unit/util/__init__.py | 5 +- .../tests/unit/util/test__coord_regular.py | 5 +- lib/iris/tests/unit/util/test__is_circular.py | 5 +- lib/iris/tests/unit/util/test__mask_array.py | 5 +- .../unit/util/test__slice_data_with_keys.py | 5 +- lib/iris/tests/unit/util/test_array_equal.py | 5 +- .../unit/util/test_broadcast_to_shape.py | 5 +- .../unit/util/test_column_slices_generator.py | 5 +- .../test_demote_dim_coord_to_aux_coord.py | 5 +- .../tests/unit/util/test_describe_diff.py | 5 +- .../unit/util/test_equalise_attributes.py | 118 +- .../unit/util/test_file_is_newer_than.py | 5 +- .../unit/util/test_find_discontiguities.py | 5 +- .../tests/unit/util/test_guess_coord_axis.py | 50 - lib/iris/tests/unit/util/test_mask_cube.py | 5 +- lib/iris/tests/unit/util/test_new_axis.py | 5 +- .../test_promote_aux_coord_to_dim_coord.py | 5 +- lib/iris/tests/unit/util/test_reverse.py | 5 +- .../tests/unit/util/test_rolling_window.py | 5 +- lib/iris/tests/unit/util/test_squeeze.py | 5 +- .../tests/unit/util/test_unify_time_units.py | 5 +- lib/iris/time.py | 58 +- lib/iris/util.py | 59 +- pyproject.toml | 6 +- requirements/locks/py310-linux-64.lock | 339 ++-- requirements/locks/py311-linux-64.lock | 337 ++-- requirements/locks/py39-linux-64.lock | 340 ++-- requirements/py310.yml | 5 +- requirements/py311.yml | 5 +- requirements/py39.yml | 5 +- requirements/pypi-core.txt | 4 +- tools/generate_std_names.py | 10 +- tools/release_do_nothing.py | 68 +- tools/update_lockfiles.py | 5 +- 666 files changed, 6111 insertions(+), 13910 deletions(-) rename .github/workflows/{benchmarks_run.yml => benchmark.yml} (56%) delete mode 100644 .github/workflows/benchmarks_report.yml create mode 100644 COPYING create mode 100644 COPYING.LESSER delete mode 100644 LICENSE create mode 100644 docs/src/_templates/layout.html delete mode 100644 docs/src/community/phrasebook.rst delete mode 100644 docs/src/further_topics/filtering_warnings.rst delete mode 100644 docs/src/further_topics/index.rst delete mode 100644 docs/src/further_topics/netcdf_io.rst create mode 100644 docs/src/techpapers/index.rst rename docs/src/{further_topics => techpapers}/missing_data_handling.rst (100%) rename docs/src/{further_topics => techpapers}/um_files_loading.rst (93%) delete mode 100644 docs/src/whatsnew/latest.rst delete mode 100644 docs/src/whatsnew/latest.rst.template delete mode 100644 lib/iris/common/_split_attribute_dicts.py delete mode 100644 lib/iris/tests/integration/attrs_matrix_results_load.json delete mode 100644 lib/iris/tests/integration/attrs_matrix_results_roundtrip.json delete mode 100644 lib/iris/tests/integration/attrs_matrix_results_save.json delete mode 100644 lib/iris/tests/integration/test_netcdf__loadsaveattrs.py create mode 100644 lib/iris/tests/test_coord_categorisation.py delete mode 100644 lib/iris/tests/unit/conftest.py delete mode 100644 lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py delete mode 100644 lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py delete mode 100644 lib/iris/tests/unit/coord_systems/test_RotatedMercator.py delete mode 100644 lib/iris/tests/unit/cube/test_CubeAttrsDict.py delete mode 100644 lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py delete mode 100644 lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py delete mode 100644 lib/iris/tests/unit/util/test_guess_coord_axis.py diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmark.yml similarity index 56% rename from .github/workflows/benchmarks_run.yml rename to .github/workflows/benchmark.yml index bcc18d62c4..5be56c1d80 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmark.yml @@ -1,9 +1,6 @@ -# Use ASV to check for performance regressions, either: -# - In the last 24 hours' commits. -# - Introduced by this pull request. +# Use ASV to check for performance regressions in the last 24 hours' commits. -name: benchmarks-run -run-name: Run benchmarks +name: benchmark-check on: schedule: @@ -12,7 +9,7 @@ on: workflow_dispatch: inputs: first_commit: - description: "First commit to benchmark (see bm_runner.py > Overnight)." + description: "Argument to be passed to the overnight benchmark script." required: false type: string pull_request: @@ -29,14 +26,14 @@ jobs: env: IRIS_TEST_DATA_LOC_PATH: benchmarks IRIS_TEST_DATA_PATH: benchmarks/iris-test-data - IRIS_TEST_DATA_VERSION: "2.22" + IRIS_TEST_DATA_VERSION: "2.19" # Lets us manually bump the cache to rebuild ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 with: fetch-depth: 0 @@ -77,17 +74,12 @@ jobs: - name: Benchmark this pull request if: ${{ github.event.label.name == 'benchmark_this' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - PR_NUMBER: ${{ github.event.number }} run: | + git checkout ${{ github.head_ref }} python benchmarks/bm_runner.py branch origin/${{ github.base_ref }} - name: Run overnight benchmarks - id: overnight if: ${{ github.event_name != 'pull_request' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | first_commit=${{ inputs.first_commit }} if [ "$first_commit" == "" ] @@ -100,27 +92,57 @@ jobs: python benchmarks/bm_runner.py overnight $first_commit fi - - name: Warn of failure - if: > - failure() && - steps.overnight.outcome == 'failure' + - name: Create issues for performance shifts + if: ${{ github.event_name != 'pull_request' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - title="Overnight benchmark workflow failed: \`${{ github.run_id }}\`" - body="Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" - gh issue create --title "$title" --body "$body" --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY + if [ -d benchmarks/.asv/performance-shifts ] + then + cd benchmarks/.asv/performance-shifts + for commit_file in * + do + commit="${commit_file%.*}" + pr_number=$(git log "$commit"^! --oneline | grep -o "#[0-9]*" | tail -1 | cut -c 2-) + author=$(gh pr view $pr_number --json author -q '.["author"]["login"]' --repo $GITHUB_REPOSITORY) + merger=$(gh pr view $pr_number --json mergedBy -q '.["mergedBy"]["login"]' --repo $GITHUB_REPOSITORY) + # Find a valid assignee from author/merger/nothing. + if curl -s https://api.github.com/users/$author | grep -q '"type": "User"'; then + assignee=$author + elif curl -s https://api.github.com/users/$merger | grep -q '"type": "User"'; then + assignee=$merger + else + assignee="" + fi + title="Performance Shift(s): \`$commit\`" + body=" + Benchmark comparison has identified performance shifts at + + * commit $commit (#$pr_number). + + Please review the report below and \ + take corrective/congratulatory action as appropriate \ + :slightly_smiling_face: - - name: Upload any benchmark reports - if: success() || steps.overnight.outcome == 'failure' - uses: actions/upload-artifact@v3 - with: - name: benchmark_reports - path: .github/workflows/benchmark_reports +
+ Performance shift report + + \`\`\` + $(cat $commit_file) + \`\`\` + +
+ + Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}) + " + gh issue create --title "$title" --body "$body" --assignee $assignee --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY + done + fi - name: Archive asv results if: ${{ always() }} uses: actions/upload-artifact@v3 with: - name: asv-raw-results - path: benchmarks/.asv/results + name: asv-report + path: | + benchmarks/.asv/results diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml deleted file mode 100644 index cb5110dda5..0000000000 --- a/.github/workflows/benchmarks_report.yml +++ /dev/null @@ -1,83 +0,0 @@ -# Post any reports generated by benchmarks_run.yml . -# Separated for security: -# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ - -name: benchmarks-report -run-name: Report benchmark results - -on: - workflow_run: - workflows: [benchmarks-run] - types: - - completed - -jobs: - download: - runs-on: ubuntu-latest - outputs: - reports_exist: ${{ steps.unzip.outputs.reports_exist }} - steps: - - name: Download artifact - id: download-artifact - # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#using-data-from-the-triggering-workflow - uses: actions/github-script@v7 - with: - script: | - let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: context.payload.workflow_run.id, - }); - let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { - return artifact.name == "benchmark_reports" - })[0]; - if (typeof matchArtifact != 'undefined') { - let download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - let fs = require('fs'); - fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/benchmark_reports.zip`, Buffer.from(download.data)); - }; - - - name: Unzip artifact - id: unzip - run: | - if test -f "benchmark_reports.zip"; then - reports_exist=1 - unzip benchmark_reports.zip -d benchmark_reports - else - reports_exist=0 - fi - echo "reports_exist=$reports_exist" >> "$GITHUB_OUTPUT" - - - name: Store artifact - uses: actions/upload-artifact@v3 - with: - name: benchmark_reports - path: benchmark_reports - - post_reports: - runs-on: ubuntu-latest - needs: download - if: needs.download.outputs.reports_exist == 1 - steps: - - name: Checkout repo - uses: actions/checkout@v4 - - - name: Download artifact - uses: actions/download-artifact@v3 - with: - name: benchmark_reports - path: .github/workflows/benchmark_reports - - - name: Set up Python - # benchmarks/bm_runner.py only needs builtins to run. - uses: actions/setup-python@v4 - - - name: Post reports - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: python benchmarks/bm_runner.py _gh_post diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 65716338de..c42eb90104 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.10.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.06.0 diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 2c2a083050..5c48966ce8 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -50,12 +50,12 @@ jobs: session: "tests" env: - IRIS_TEST_DATA_VERSION: "2.22" + IRIS_TEST_DATA_VERSION: "2.19" ENV_NAME: "ci-tests" steps: - name: "checkout" - uses: actions/checkout@v4 + uses: actions/checkout@v3 - name: "environment configure" env: @@ -80,7 +80,7 @@ jobs: env_name: ${{ env.ENV_NAME }} - name: "conda install" - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@v2 with: miniforge-version: latest channels: conda-forge,defaults diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index 450a18eb86..942d528f6d 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -28,7 +28,7 @@ jobs: name: "build sdist & wheel" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 with: fetch-depth: 0 @@ -57,7 +57,7 @@ jobs: env: ENV_NAME: "ci-wheels" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 with: fetch-depth: 0 @@ -82,7 +82,7 @@ jobs: env_name: ${{ env.ENV_NAME }} - name: "conda install" - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@v2 with: miniforge-version: latest channels: conda-forge,defaults diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index d92b653f26..453014fa2a 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.10.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.06.0 secrets: inherit diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 8e18b36491..203dc43b4e 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -33,7 +33,7 @@ jobs: Otherwise this issue will be automatically closed in 28 days time. # Comment on the staled prs. - stale-pr-message: | + stale-pr-message: | In order to maintain a backlog of relevant PRs, we automatically label them as stale after 500 days of inactivity. If this PR is still important to you, then please comment on this PR and the stale label will be removed. @@ -43,20 +43,20 @@ jobs: # Comment on the staled issues while closed. close-issue-message: | This stale issue has been automatically closed due to a lack of community activity. - + If you still care about this issue, then please either: * Re-open this issue, if you have sufficient permissions, or - * Add a comment stating that this is still relevant and someone will re-open it on your behalf. + * Add a comment pinging `@SciTools/iris-devs` who will re-open on your behalf. # Comment on the staled prs while closed. close-pr-message: | This stale PR has been automatically closed due to a lack of community activity. - + If you still care about this PR, then please either: * Re-open this PR, if you have sufficient permissions, or * Add a comment pinging `@SciTools/iris-devs` who will re-open on your behalf. - # Label to apply on staled issues. + # Label to apply on staled issues. stale-issue-label: Stale # Label to apply on staled prs. @@ -64,11 +64,11 @@ jobs: # Labels on issues exempted from stale. exempt-issue-labels: - "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎, Release: Major" + "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Labels on prs exempted from stale. exempt-pr-labels: - "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue, Dragon 🐉, Dragon Sub-Task 🦎, Release: Major" + "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Max number of operations per run. operations-per-run: 300 diff --git a/.gitignore b/.gitignore index 42d02d8c71..4d0b474e8a 100644 --- a/.gitignore +++ b/.gitignore @@ -32,7 +32,6 @@ pip-cache # asv data, environments, results .asv benchmarks/.data -.github/workflows/benchmark_reports #Translations *.mo diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cbad42b83a..c641389768 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ minimum_pre_commit_version: 1.21.0 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.4.0 hooks: # Prevent giant files from being committed. - id: check-added-large-files @@ -29,14 +29,14 @@ repos: - id: no-commit-to-branch - repo: https://github.com/codespell-project/codespell - rev: "v2.2.6" + rev: "v2.2.5" hooks: - id: codespell types_or: [asciidoc, python, markdown, rst] additional_dependencies: [tomli] - repo: https://github.com/psf/black - rev: 23.11.0 + rev: 23.7.0 hooks: - id: black pass_filenames: false @@ -56,7 +56,7 @@ repos: args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: 1.16.0 + rev: 1.15.0 hooks: - id: blacken-docs types: [file, rst] diff --git a/COPYING b/COPYING new file mode 100644 index 0000000000..94a9ed024d --- /dev/null +++ b/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/COPYING.LESSER b/COPYING.LESSER new file mode 100644 index 0000000000..65c5ca88a6 --- /dev/null +++ b/COPYING.LESSER @@ -0,0 +1,165 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 2d1d23e16c..0000000000 --- a/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2010, Met Office. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in index 354b92d735..329cf79c5d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -17,7 +17,8 @@ exclude .readthedocs.yml exclude CHANGES exclude CODE_OF_CONDUCT.md exclude codecov.yml -include LICENSE +include COPYING +include COPYING.LESSER exclude Makefile exclude noxfile.py diff --git a/README.md b/README.md index f857608718..53d24b0162 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ | 💬 Community | [![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/version/2/1/code_of_conduct/) [![GH Discussions](https://img.shields.io/badge/github-discussions%20%F0%9F%92%AC-yellow?logo=github&logoColor=lightgrey)](https://github.com/SciTools/iris/discussions) [![twitter](https://img.shields.io/twitter/follow/scitools_iris?color=yellow&label=twitter%7Cscitools_iris&logo=twitter&style=plastic)](https://twitter.com/scitools_iris) | | 📖 Documentation | [![rtd](https://readthedocs.org/projects/scitools-iris/badge/?version=latest)](https://scitools-iris.readthedocs.io/en/latest/?badge=latest) | | 📈 Health | [![codecov](https://codecov.io/gh/SciTools/iris/branch/main/graph/badge.svg?token=0GeICSIF3g)](https://codecov.io/gh/SciTools/iris) | -| ✨ Meta | [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/LICENSE) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | +| ✨ Meta | [![code style - black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![NEP29](https://raster.shields.io/badge/follows-NEP29-orange.png)](https://numpy.org/neps/nep-0029-deprecation_policy.html) [![license - bds-3-clause](https://img.shields.io/github/license/SciTools/iris)](https://github.com/SciTools/iris/blob/main/COPYING.LESSER) [![conda platform](https://img.shields.io/conda/pn/conda-forge/iris.svg)](https://anaconda.org/conda-forge/iris) | | 📦 Package | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.595182.svg)](https://doi.org/10.5281/zenodo.595182) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/iris?color=orange&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/iris) [![pypi](https://img.shields.io/pypi/v/scitools-iris?color=orange&label=pypi&logo=python&logoColor=white)](https://pypi.org/project/scitools-iris/) [![pypi - python version](https://img.shields.io/pypi/pyversions/scitools-iris.svg?color=orange&logo=python&label=python&logoColor=white)](https://pypi.org/project/scitools-iris/) | | 🧰 Repo | [![commits-since](https://img.shields.io/github/commits-since/SciTools/iris/latest.svg)](https://github.com/SciTools/iris/commits/main) [![contributors](https://img.shields.io/github/contributors/SciTools/iris)](https://github.com/SciTools/iris/graphs/contributors) [![release](https://img.shields.io/github/v/release/scitools/iris)](https://github.com/SciTools/iris/releases) | | | diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index fab5bcb44e..faa7f6daee 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -4,6 +4,7 @@ "project_url": "https://github.com/SciTools/iris", "repo": "..", "environment_type": "conda-delegated", + "conda_channels": ["conda-forge", "defaults"], "show_commit_url": "http://github.com/scitools/iris/commit/", "branches": ["upstream/main"], diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index a60cb7f2b7..250a4e032d 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` subclass that manages the Conda environment via custom user scripts. @@ -65,8 +66,6 @@ def __init__( ignored.append("`requirements`") if tagged_env_vars: ignored.append("`tagged_env_vars`") - if conf.conda_channels: - ignored.append("conda_channels") if conf.conda_environment_file: ignored.append("`conda_environment_file`") message = ( @@ -76,8 +75,6 @@ def __init__( log.warning(message) requirements = {} tagged_env_vars = {} - # All that is required to create ASV's bare-bones environment. - conf.conda_channels = ["defaults"] conf.conda_environment_file = None super().__init__(conf, python, requirements, tagged_env_vars) diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 61983a969f..c86682ca4a 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Common code for benchmarks.""" from os import environ import resource diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index 7d1e266c78..4cc4f6c70a 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ AuxFactory benchmark tests. diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index b6f56b997f..3107dcf077 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Coord benchmark tests. diff --git a/benchmarks/benchmarks/cperf/__init__.py b/benchmarks/benchmarks/cperf/__init__.py index 7adba01b60..fb311c44dc 100644 --- a/benchmarks/benchmarks/cperf/__init__.py +++ b/benchmarks/benchmarks/cperf/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. @@ -52,7 +53,7 @@ def setup(self, file_type, three_d, three_times): if three_d: create_kwargs["n_levels"] = 71 - # Will reuse a file if already present. + # Will re-use a file if already present. file_path = make_cubesphere_testfile(**create_kwargs) else: diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py index f27558a5ed..47eb255513 100644 --- a/benchmarks/benchmarks/cperf/equality.py +++ b/benchmarks/benchmarks/cperf/equality.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py index efbd497e2e..04bb7e1a61 100644 --- a/benchmarks/benchmarks/cperf/load.py +++ b/benchmarks/benchmarks/cperf/load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py index 957b28e3fd..2eb60e2ab5 100644 --- a/benchmarks/benchmarks/cperf/save.py +++ b/benchmarks/benchmarks/cperf/save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index ceacb4e86c..5889ce872b 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Cube benchmark tests. diff --git a/benchmarks/benchmarks/experimental/__init__.py b/benchmarks/benchmarks/experimental/__init__.py index 81fb222916..f16e400bce 100644 --- a/benchmarks/benchmarks/experimental/__init__.py +++ b/benchmarks/benchmarks/experimental/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Benchmark tests for the experimental module. diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py index 1fa8b82d67..2e40c525a6 100644 --- a/benchmarks/benchmarks/experimental/ugrid/__init__.py +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Benchmark tests for the experimental.ugrid module. diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 6d62cf9cd5..c5f8fb564e 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Benchmarks stages of operation of the function :func:`iris.experimental.ugrid.utils.recombine_submeshes`. @@ -22,14 +23,14 @@ from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD from iris.experimental.ugrid.utils import recombine_submeshes -from ... import TrackAddedMemoryAllocation, on_demand_benchmark +from ... import TrackAddedMemoryAllocation from ...generate_data.ugrid import make_cube_like_2d_cubesphere class MixinCombineRegions: # Characterise time taken + memory-allocated, for various stages of combine # operations on cubesphere-like test data. - params = [50, 500] + params = [4, 500] param_names = ["cubesphere-N"] def _parametrised_cache_filename(self, n_cubesphere, content_name): @@ -199,8 +200,6 @@ class CombineRegionsComputeRealData(MixinCombineRegions): def time_compute_data(self, n_cubesphere): _ = self.recombined_cube.data - # Vulnerable to noise, so disabled by default. - @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_compute_data(self, n_cubesphere): _ = self.recombined_cube.data @@ -218,8 +217,6 @@ def time_save(self, n_cubesphere): # Save to disk, which must compute data + stream it to file. save(self.recombined_cube, "tmp.nc") - # Vulnerable to noise, so disabled by default. - @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_save(self, n_cubesphere): save(self.recombined_cube, "tmp.nc") @@ -248,8 +245,6 @@ def time_stream_file2file(self, n_cubesphere): # Save to disk, which must compute data + stream it to file. save(self.recombined_cube, "tmp.nc") - # Vulnerable to noise, so disabled by default. - @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_stream_file2file(self, n_cubesphere): save(self.recombined_cube, "tmp.nc") diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py index 8837e7cca9..52a5aceca8 100644 --- a/benchmarks/benchmarks/generate_data/__init__.py +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Scripts for generating supporting data for benchmarking. diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py index b6702ad883..eaf46bb405 100644 --- a/benchmarks/benchmarks/generate_data/stock.py +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Wrappers for using :mod:`iris.tests.stock` methods for benchmarking. @@ -38,7 +39,7 @@ def _external(func_name_, temp_file_dir, **kwargs_): ) if not REUSE_DATA or not save_path.is_file(): # The xios functions take control of save location so need to move to - # a more specific name that allows reuse. + # a more specific name that allows re-use. actual_path = run_function_elsewhere( _external, func_name_=func_name, diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py index 3be5c20a48..527b49a6bb 100644 --- a/benchmarks/benchmarks/generate_data/ugrid.py +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Scripts for generating supporting data for UGRID-related benchmarking. """ diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py index 23d3770aa1..39773bbb4b 100644 --- a/benchmarks/benchmarks/generate_data/um_files.py +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Generate FF, PP and NetCDF files based on a minimal synthetic FF file. diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index 5f902fd2e0..fc32ac289b 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. from importlib import import_module, reload ################ diff --git a/benchmarks/benchmarks/iterate.py b/benchmarks/benchmarks/iterate.py index 6cc935498c..0a5415ac2b 100644 --- a/benchmarks/benchmarks/iterate.py +++ b/benchmarks/benchmarks/iterate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Iterate benchmark tests. diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index a926e6b7e2..1b0ea696f6 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ File loading benchmark tests. @@ -26,7 +27,7 @@ class LoadAndRealise: # For data generation timeout = 600.0 params = [ - [(50, 50, 2), (1280, 960, 5), (2, 2, 1000)], + [(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True], ["FF", "PP", "NetCDF"], ] @@ -68,7 +69,7 @@ def time_realise(self, _, __, ___, ____) -> None: class STASHConstraint: - # xyz sizes mimic LoadAndRealise to maximise file reuse. + # xyz sizes mimic LoadAndRealise to maximise file re-use. params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]] param_names = ["xyz", "file_format"] diff --git a/benchmarks/benchmarks/load/ugrid.py b/benchmarks/benchmarks/load/ugrid.py index ef01ae03be..350a78e128 100644 --- a/benchmarks/benchmarks/load/ugrid.py +++ b/benchmarks/benchmarks/load/ugrid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Mesh data loading benchmark tests. @@ -76,7 +77,7 @@ class DataRealisation: warmup_time = 0.0 timeout = 300.0 - params = [int(1e4), int(2e5)] + params = [1, int(2e5)] param_names = ["number of faces"] def setup_common(self, **kwargs): diff --git a/benchmarks/benchmarks/metadata_manager_factory.py b/benchmarks/benchmarks/metadata_manager_factory.py index 531af58b66..7e7fc98008 100644 --- a/benchmarks/benchmarks/metadata_manager_factory.py +++ b/benchmarks/benchmarks/metadata_manager_factory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ metadata_manager_factory benchmark tests. diff --git a/benchmarks/benchmarks/mixin.py b/benchmarks/benchmarks/mixin.py index 335bee1a0f..bec5518eee 100644 --- a/benchmarks/benchmarks/mixin.py +++ b/benchmarks/benchmarks/mixin.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Mixin benchmark tests. diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 73a2a51990..75195c86e9 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Plot benchmark tests. diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index 9cd77527af..44bd1b6c95 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Regridding benchmark test diff --git a/benchmarks/benchmarks/save.py b/benchmarks/benchmarks/save.py index 6feb446c70..3551c72528 100644 --- a/benchmarks/benchmarks/save.py +++ b/benchmarks/benchmarks/save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ File saving benchmarks. @@ -15,12 +16,12 @@ from iris import save from iris.experimental.ugrid import save_mesh -from . import TrackAddedMemoryAllocation, on_demand_benchmark +from . import TrackAddedMemoryAllocation from .generate_data.ugrid import make_cube_like_2d_cubesphere class NetcdfSave: - params = [[50, 600], [False, True]] + params = [[1, 600], [False, True]] param_names = ["cubesphere-N", "is_unstructured"] def setup(self, n_cubesphere, is_unstructured): @@ -46,8 +47,6 @@ def time_netcdf_save_mesh(self, n_cubesphere, is_unstructured): if is_unstructured: self._save_mesh(self.cube) - # Vulnerable to noise, so disabled by default. - @on_demand_benchmark @TrackAddedMemoryAllocation.decorator def track_addedmem_netcdf_save(self, n_cubesphere, is_unstructured): # Don't need to copy the cube here since track_ benchmarks don't diff --git a/benchmarks/benchmarks/sperf/__init__.py b/benchmarks/benchmarks/sperf/__init__.py index 111cd4b841..eccad56f6f 100644 --- a/benchmarks/benchmarks/sperf/__init__.py +++ b/benchmarks/benchmarks/sperf/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index da0cffde50..e27b3b1996 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py index bb3b707a75..85c73ab92b 100644 --- a/benchmarks/benchmarks/sperf/equality.py +++ b/benchmarks/benchmarks/sperf/equality.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/sperf/load.py b/benchmarks/benchmarks/sperf/load.py index ab1273e288..6a60355976 100644 --- a/benchmarks/benchmarks/sperf/load.py +++ b/benchmarks/benchmarks/sperf/load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/sperf/save.py b/benchmarks/benchmarks/sperf/save.py index 9892f0d239..dd33924c6c 100644 --- a/benchmarks/benchmarks/sperf/save.py +++ b/benchmarks/benchmarks/sperf/save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. """ diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py index e4c3297614..5c1d10d218 100644 --- a/benchmarks/benchmarks/trajectory.py +++ b/benchmarks/benchmarks/trajectory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Trajectory benchmark test diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 1efe8d3acb..f3efb0ea31 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Argparse conveniences for executing common types of benchmark runs. """ @@ -14,10 +15,8 @@ from os import environ from pathlib import Path import re -import shlex import subprocess from tempfile import NamedTemporaryFile -from textwrap import dedent from typing import Literal # The threshold beyond which shifts are 'notable'. See `asv compare`` docs @@ -25,37 +24,25 @@ COMPARE_FACTOR = 1.2 BENCHMARKS_DIR = Path(__file__).parent -ROOT_DIR = BENCHMARKS_DIR.parent -# Storage location for reports used in GitHub actions. -GH_REPORT_DIR = ROOT_DIR.joinpath(".github", "workflows", "benchmark_reports") # Common ASV arguments for all run_types except `custom`. ASV_HARNESS = ( - "run {posargs} --attribute rounds=4 --interleave-rounds --show-stderr" + "run {posargs} --attribute rounds=4 --interleave-rounds --strict " + "--show-stderr" ) -def echo(echo_string: str): +def _subprocess_run_print(args, **kwargs): # Use subprocess for printing to reduce chance of printing out of sequence # with the subsequent calls. - subprocess.run(["echo", f"BM_RUNNER DEBUG: {echo_string}"]) - - -def _subprocess_runner(args, asv=False, **kwargs): - # Avoid permanent modifications if the same arguments are used more than once. - args = args.copy() - kwargs = kwargs.copy() - if asv: - args.insert(0, "asv") - kwargs["cwd"] = BENCHMARKS_DIR - echo(" ".join(args)) - kwargs.setdefault("check", True) + subprocess.run(["echo", f"BM_RUNNER DEBUG: {' '.join(args)}"]) return subprocess.run(args, **kwargs) -def _subprocess_runner_capture(args, **kwargs) -> str: - result = _subprocess_runner(args, capture_output=True, **kwargs) - return result.stdout.decode().rstrip() +def _subprocess_run_asv(args, **kwargs): + args.insert(0, "asv") + kwargs["cwd"] = BENCHMARKS_DIR + return _subprocess_run_print(args, **kwargs) def _check_requirements(package: str) -> None: @@ -74,18 +61,19 @@ def _prep_data_gen_env() -> None: Create/access a separate, unchanging environment for generating test data. """ + root_dir = BENCHMARKS_DIR.parent python_version = "3.11" data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in environ: - echo("Using existing data generation environment.") + print("Using existing data generation environment.") else: - echo("Setting up the data generation environment ...") + print("Setting up the data generation environment ...") # Get Nox to build an environment for the `tests` session, but don't - # run the session. Will reuse a cached environment if appropriate. - _subprocess_runner( + # run the session. Will re-use a cached environment if appropriate. + _subprocess_run_print( [ "nox", - f"--noxfile={ROOT_DIR / 'noxfile.py'}", + f"--noxfile={root_dir / 'noxfile.py'}", "--session=tests", "--install-only", f"--python={python_version}", @@ -94,14 +82,14 @@ def _prep_data_gen_env() -> None: # Find the environment built above, set it to be the data generation # environment. data_gen_python = next( - (ROOT_DIR / ".nox").rglob(f"tests*/bin/python{python_version}") + (root_dir / ".nox").rglob(f"tests*/bin/python{python_version}") ).resolve() environ[data_gen_var] = str(data_gen_python) - echo("Installing Mule into data generation environment ...") + print("Installing Mule into data generation environment ...") mule_dir = data_gen_python.parents[1] / "resources" / "mule" if not mule_dir.is_dir(): - _subprocess_runner( + _subprocess_run_print( [ "git", "clone", @@ -109,7 +97,7 @@ def _prep_data_gen_env() -> None: str(mule_dir), ] ) - _subprocess_runner( + _subprocess_run_print( [ str(data_gen_python), "-m", @@ -119,7 +107,7 @@ def _prep_data_gen_env() -> None: ] ) - echo("Data generation environment ready.") + print("Data generation environment ready.") def _setup_common() -> None: @@ -128,192 +116,41 @@ def _setup_common() -> None: _prep_data_gen_env() - echo("Setting up ASV ...") - _subprocess_runner(["machine", "--yes"], asv=True) + print("Setting up ASV ...") + _subprocess_run_asv(["machine", "--yes"]) - echo("Setup complete.") + print("Setup complete.") def _asv_compare(*commits: str, overnight_mode: bool = False) -> None: - """ - Run through a list of commits comparing each one to the next. - """ + """Run through a list of commits comparing each one to the next.""" commits = [commit[:8] for commit in commits] + shifts_dir = BENCHMARKS_DIR / ".asv" / "performance-shifts" for i in range(len(commits) - 1): before = commits[i] after = commits[i + 1] - asv_command = shlex.split( + asv_command = ( f"compare {before} {after} --factor={COMPARE_FACTOR} --split" ) - - comparison = _subprocess_runner_capture(asv_command, asv=True) - echo(comparison) - shifts = _subprocess_runner_capture( - [*asv_command, "--only-changed"], asv=True - ) - - if shifts or (not overnight_mode): - # For the overnight run: only post if there are shifts. - _gh_create_reports(after, comparison, shifts) - - -def _gh_create_reports( - commit_sha: str, results_full: str, results_shifts: str -) -> None: - """ - If running under GitHub Actions: record the results in report(s). - - Posting the reports is done by :func:`_gh_post_reports`, which must be run - within a separate action to comply with GHA's security limitations. - """ - if "GITHUB_ACTIONS" not in environ: - # Only run when within GHA. - return - - pr_number = environ.get("PR_NUMBER", None) - on_pull_request = pr_number is not None - run_id = environ["GITHUB_RUN_ID"] - repo = environ["GITHUB_REPOSITORY"] - gha_run_link = ( - f"[`{run_id}`](https://github.com/{repo}/actions/runs/{run_id})" - ) - - GH_REPORT_DIR.mkdir(exist_ok=True) - commit_dir = GH_REPORT_DIR / commit_sha - commit_dir.mkdir() - command_path = commit_dir / "command.txt" - body_path = commit_dir / "body.txt" - - performance_report = dedent( - ( - """ - ### Performance Benchmark Report: {commit_sha} - -
- Performance shifts - - ``` - {results_shifts} - ``` - -
- -
- Full benchmark results - - ``` - {results_full} - ``` - -
- - Generated by GHA run {gha_run_link} - """ - ) - ) - performance_report = performance_report.format( - commit_sha=commit_sha, - results_shifts=results_shifts, - results_full=results_full, - gha_run_link=gha_run_link, - ) - - if on_pull_request: - # Command to post the report as a comment on the active PR. - body_path.write_text(performance_report) - command = ( - f"gh pr comment {pr_number} " - f"--body-file {body_path.absolute()} " - f"--repo {repo}" - ) - command_path.write_text(command) - - else: - # Command to post the report as new issue. - commit_msg = _subprocess_runner_capture( - f"git log {commit_sha}^! --oneline".split(" ") - ) - # Intended for benchmarking commits on trunk - should include a PR - # number due to our squash policy. - pr_tag_match = re.search("#[0-9]*", commit_msg) - - assignee = "" - pr_tag = "pull request number unavailable" - if pr_tag_match is not None: - pr_tag = pr_tag_match.group(0) - - for login_type in ("author", "mergedBy"): - gh_query = f'.["{login_type}"]["login"]' - command = shlex.split( - f"gh pr view {pr_tag[1:]} " - f"--json {login_type} -q '{gh_query}' " - f"--repo {repo}" - ) - login = _subprocess_runner_capture(command) - - command = [ - "curl", - "-s", - f"https://api.github.com/users/{login}", - ] - login_info = _subprocess_runner_capture(command) - is_user = '"type": "User"' in login_info - if is_user: - assignee = login - break - - title = f"Performance Shift(s): `{commit_sha}`" - body = dedent( - ( - f""" - Benchmark comparison has identified performance shifts at: - - * commit {commit_sha} ({pr_tag}). - -

- Please review the report below and - take corrective/congratulatory action as appropriate - :slightly_smiling_face: -

- """ - ) - ) - body += performance_report - body_path.write_text(body) - - command = ( - "gh issue create " - f'--title "{title}" ' - f"--body-file {body_path.absolute()} " - '--label "Bot" ' - '--label "Type: Performance" ' - f"--repo {repo}" - ) - if assignee: - command += f" --assignee {assignee}" - command_path.write_text(command) - - -def _gh_post_reports() -> None: - """ - If running under GitHub Actions: post pre-prepared benchmark reports. - - Reports are prepared by :func:`_gh_create_reports`, which must be run - within a separate action to comply with GHA's security limitations. - """ - if "GITHUB_ACTIONS" not in environ: - # Only run when within GHA. - return - - commit_dirs = [x for x in GH_REPORT_DIR.iterdir() if x.is_dir()] - for commit_dir in commit_dirs: - command_path = commit_dir / "command.txt" - command = command_path.read_text() - - # Security: only accept certain commands to run. - assert command.startswith(("gh issue create", "gh pr comment")) - - _subprocess_runner(shlex.split(command)) + _subprocess_run_asv(asv_command.split(" ")) + + if overnight_mode: + # Record performance shifts. + # Run the command again but limited to only showing performance + # shifts. + shifts = _subprocess_run_asv( + [*asv_command.split(" "), "--only-changed"], + capture_output=True, + text=True, + ).stdout + if shifts: + # Write the shifts report to a file. + # Dir is used by .github/workflows/benchmarks.yml, + # but not cached - intended to be discarded after run. + shifts_dir.mkdir(exist_ok=True, parents=True) + shifts_path = (shifts_dir / after).with_suffix(".txt") + with shifts_path.open("w") as shifts_file: + shifts_file.write(shifts) class _SubParserGenerator(ABC): @@ -331,21 +168,18 @@ def __init__(self, subparsers: ArgumentParser.add_subparsers) -> None: formatter_class=argparse.RawTextHelpFormatter, ) self.add_arguments() - self.add_asv_arguments() + self.subparser.add_argument( + "asv_args", + nargs=argparse.REMAINDER, + help="Any number of arguments to pass down to ASV.", + ) self.subparser.set_defaults(func=self.func) @abstractmethod def add_arguments(self) -> None: - """All custom self.subparser.add_argument() calls.""" + """All self.subparser.add_argument() calls.""" _ = NotImplemented - def add_asv_arguments(self) -> None: - self.subparser.add_argument( - "asv_args", - nargs=argparse.REMAINDER, - help="Any number of arguments to pass down to the ASV benchmark command.", - ) - @staticmethod @abstractmethod def func(args: argparse.Namespace): @@ -363,11 +197,11 @@ class Overnight(_SubParserGenerator): name = "overnight" description = ( "Benchmarks all commits between the input **first_commit** to ``HEAD``, " - "comparing each to its parent for performance shifts. If running on " - "GitHub Actions: performance shift(s) will be reported in a new issue.\n" + "comparing each to its parent for performance shifts. If a commit causes " + "shifts, the output is saved to a file:\n" + "``.asv/performance-shifts/``\n\n" "Designed for checking the previous 24 hours' commits, typically in a " - "scheduled script.\n" - "Uses `asv run`." + "scheduled script." ) epilog = ( "e.g. python bm_runner.py overnight a1b23d4\n" @@ -386,20 +220,16 @@ def func(args: argparse.Namespace) -> None: _setup_common() commit_range = f"{args.first_commit}^^.." - # git rev-list --first-parent is the command ASV uses. - git_command = shlex.split( - f"git rev-list --first-parent {commit_range}" - ) - commit_string = _subprocess_runner_capture(git_command) - commit_list = commit_string.split("\n") + asv_command = ASV_HARNESS.format(posargs=commit_range) + _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) - asv_command = shlex.split(ASV_HARNESS.format(posargs=commit_range)) - try: - _subprocess_runner([*asv_command, *args.asv_args], asv=True) - finally: - # Designed for long running - want to compare/post any valid - # results even if some are broken. - _asv_compare(*reversed(commit_list), overnight_mode=True) + # git rev-list --first-parent is the command ASV uses. + git_command = f"git rev-list --first-parent {commit_range}" + commit_string = _subprocess_run_print( + git_command.split(" "), capture_output=True, text=True + ).stdout + commit_list = commit_string.rstrip().split("\n") + _asv_compare(*reversed(commit_list), overnight_mode=True) class Branch(_SubParserGenerator): @@ -407,15 +237,11 @@ class Branch(_SubParserGenerator): description = ( "Performs the same operations as ``overnight``, but always on two commits " "only - ``HEAD``, and ``HEAD``'s merge-base with the input " - "**base_branch**. If running on GitHub Actions: HEAD will be GitHub's " - "merge commit and merge-base will be the merge target. Performance " - "comparisons will be posted in a comment on the relevant pull request.\n" - "Designed " + "**base_branch**. Output from this run is never saved to a file. Designed " "for testing if the active branch's changes cause performance shifts - " "anticipating what would be caught by ``overnight`` once merged.\n\n" "**For maximum accuracy, avoid using the machine that is running this " - "session. Run time could be >1 hour for the full benchmark suite.**\n" - "Uses `asv run`." + "session. Run time could be >1 hour for the full benchmark suite.**" ) epilog = ( "e.g. python bm_runner.py branch upstream/main\n" @@ -433,22 +259,19 @@ def add_arguments(self) -> None: def func(args: argparse.Namespace) -> None: _setup_common() - git_command = shlex.split("git rev-parse HEAD") - head_sha = _subprocess_runner_capture(git_command)[:8] - - git_command = shlex.split( - f"git merge-base {head_sha} {args.base_branch}" - ) - merge_base = _subprocess_runner_capture(git_command)[:8] + git_command = f"git merge-base HEAD {args.base_branch}" + merge_base = _subprocess_run_print( + git_command.split(" "), capture_output=True, text=True + ).stdout[:8] with NamedTemporaryFile("w") as hashfile: - hashfile.writelines([merge_base, "\n", head_sha]) + hashfile.writelines([merge_base, "\n", "HEAD"]) hashfile.flush() commit_range = f"HASHFILE:{hashfile.name}" - asv_command = shlex.split(ASV_HARNESS.format(posargs=commit_range)) - _subprocess_runner([*asv_command, *args.asv_args], asv=True) + asv_command = ASV_HARNESS.format(posargs=commit_range) + _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) - _asv_compare(merge_base, head_sha) + _asv_compare(merge_base, "HEAD") class _CSPerf(_SubParserGenerator, ABC): @@ -458,8 +281,7 @@ class _CSPerf(_SubParserGenerator, ABC): "Run the on-demand {} suite of benchmarks (part of the UK Met " "Office NG-VAT project) for the ``HEAD`` of ``upstream/main`` only, " "and publish the results to the input **publish_dir**, within a " - "unique subdirectory for this run.\n" - "Uses `asv run`." + "unique subdirectory for this run." ) epilog = ( "e.g. python bm_runner.py {0} my_publish_dir\n" @@ -499,28 +321,19 @@ def csperf( asv_command = ( ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}" ) - + # C/SPerf benchmarks are much bigger than the CI ones: + # Don't fail the whole run if memory blows on 1 benchmark. + asv_command = asv_command.replace(" --strict", "") # Only do a single round. - asv_command = shlex.split( - re.sub(r"rounds=\d", "rounds=1", asv_command) - ) - try: - _subprocess_runner([*asv_command, *args.asv_args], asv=True) - except subprocess.CalledProcessError as err: - # C/SPerf benchmarks are much bigger than the CI ones: - # Don't fail the whole run if memory blows on 1 benchmark. - # ASV produces return code of 2 if the run includes crashes. - if err.returncode != 2: - raise - - asv_command = shlex.split( - f"publish {commit_range} --html-dir={publish_subdir}" - ) - _subprocess_runner(asv_command, asv=True) + asv_command = re.sub(r"rounds=\d", "rounds=1", asv_command) + _subprocess_run_asv([*asv_command.split(" "), *args.asv_args]) + + asv_command = f"publish {commit_range} --html-dir={publish_subdir}" + _subprocess_run_asv(asv_command.split(" ")) # Print completion message. location = BENCHMARKS_DIR / ".asv" - echo( + print( f'New ASV results for "{run_type}".\n' f'See "{publish_subdir}",' f'\n or JSON files under "{location / "results"}".' @@ -567,29 +380,7 @@ def add_arguments(self) -> None: @staticmethod def func(args: argparse.Namespace) -> None: _setup_common() - _subprocess_runner([args.asv_sub_command, *args.asv_args], asv=True) - - -class GhPost(_SubParserGenerator): - name = "_gh_post" - description = ( - "Used by GitHub Actions to post benchmark reports that were prepared " - "during previous actions. Separated to comply with GitHub's security " - "requirements." - ) - epilog = "Sole acceptable syntax: python bm_runner.py _gh_post" - - @staticmethod - def func(args: argparse.Namespace) -> None: - _gh_post_reports() - - # No arguments permitted for this subclass: - - def add_arguments(self) -> None: - pass - - def add_asv_arguments(self) -> None: - pass + _subprocess_run_asv([args.asv_sub_command, *args.asv_args]) def main(): @@ -599,7 +390,7 @@ def main(): ) subparsers = parser.add_subparsers(required=True) - for gen in (Overnight, Branch, CPerf, SPerf, Custom, GhPost): + for gen in (Overnight, Branch, CPerf, SPerf, Custom): _ = gen(subparsers).subparser parsed = parser.parse_args() diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index 2181b89b8c..bebbad4224 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -54,7 +54,7 @@ def main(): ) # Generate area-weights array. As e1 and a1b are on the same grid we can - # do this just once and reuse. This method requires bounds on lat/lon + # do this just once and re-use. This method requires bounds on lat/lon # coords, so let's add some in sensible locations using the "guess_bounds" # method. e1.coord("latitude").guess_bounds() diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 529018ec8c..5e158346a9 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -171,13 +171,23 @@ def main(): ) plt.gca().coastlines() - # Now add a colour bar which spans the two plots. Here we pass Figure.axes - # which is a list of all (two) axes currently on the figure. Note that - # these are different to the contents of ax_array, because those were - # standard Matplotlib Axes that Iris automatically replaced with Cartopy - # GeoAxes. + # Now add a colourbar who's leftmost point is the same as the leftmost + # point of the left hand plot and rightmost point is the rightmost + # point of the right hand plot. + + # Get the positions of the 2nd plot and the left position of the 1st plot. + left, bottom, width, height = ax_array[1].get_position().bounds + first_plot_left = ax_array[0].get_position().bounds[0] + + # The width of the colorbar should now be simple. + width = left - first_plot_left + width + + # Add axes to the figure, to place the colour bar. + colorbar_axes = fig.add_axes([first_plot_left, 0.18, width, 0.03]) + + # Add the colour bar. cbar = plt.colorbar( - contour_result, ax=fig.axes, aspect=60, orientation="horizontal" + contour_result, colorbar_axes, orientation="horizontal" ) # Label the colour bar and add ticks. diff --git a/docs/gallery_tests/__init__.py b/docs/gallery_tests/__init__.py index 091e997248..ac5753e58b 100644 --- a/docs/gallery_tests/__init__.py +++ b/docs/gallery_tests/__init__.py @@ -1,4 +1,5 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py index d3ca8309f8..a218b305a2 100644 --- a/docs/gallery_tests/conftest.py +++ b/docs/gallery_tests/conftest.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Pytest fixtures for the gallery tests.""" diff --git a/docs/gallery_tests/test_gallery_examples.py b/docs/gallery_tests/test_gallery_examples.py index 93f361a62a..0d0793a7da 100644 --- a/docs/gallery_tests/test_gallery_examples.py +++ b/docs/gallery_tests/test_gallery_examples.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import importlib diff --git a/docs/src/_templates/layout.html b/docs/src/_templates/layout.html new file mode 100644 index 0000000000..974bd12753 --- /dev/null +++ b/docs/src/_templates/layout.html @@ -0,0 +1,20 @@ +{% extends "pydata_sphinx_theme/layout.html" %} + +{# This uses blocks. See: + https://www.sphinx-doc.org/en/master/templating.html +#} + + + {%- block docs_body %} + + {% if on_rtd and rtd_version == 'latest' %} +
+ You are viewing the latest unreleased documentation + v{{ version }}. You can switch to a stable version + via the flyout menu in the bottom corner of the screen. +
+

+ {%- endif %} + + {{ super() }} +{%- endblock %} diff --git a/docs/src/community/index.rst b/docs/src/community/index.rst index ee227513b3..114cb96fe9 100644 --- a/docs/src/community/index.rst +++ b/docs/src/community/index.rst @@ -40,14 +40,12 @@ smoother interoperability: * The :mod:`iris.pandas` module * :doc:`iris_xarray` -* :doc:`phrasebook` .. toctree:: :maxdepth: 1 :hidden: iris_xarray - phrasebook Plugins ------- diff --git a/docs/src/community/iris_xarray.rst b/docs/src/community/iris_xarray.rst index 9d795fcd9e..2250e3c0a3 100644 --- a/docs/src/community/iris_xarray.rst +++ b/docs/src/community/iris_xarray.rst @@ -7,7 +7,6 @@ Iris ❤️ :term:`Xarray` There is a lot of overlap between Iris and :term:`Xarray`, but some important differences too. Below is a summary of the most important differences, so that you can be prepared, and to help you choose the best package for your use case. -See :doc:`phrasebook` for a broad comparison of terminology. Overall Experience ------------------ diff --git a/docs/src/community/phrasebook.rst b/docs/src/community/phrasebook.rst deleted file mode 100644 index bcd91cca83..0000000000 --- a/docs/src/community/phrasebook.rst +++ /dev/null @@ -1,66 +0,0 @@ -.. include:: ../common_links.inc - -.. _phrasebook: - -Package Phrasebook -=================== - -There are a number of similar packages to Iris, and a lot of these have their own -terminology for similar things. Whether you're coming or going, we hope this might -be a helpful guide to these differences! -Definitions for each can be found in :ref:`glossary`. See also -`Xarray terminology `_. - -.. list-table:: Phrasebook - :widths: 25 25 25 50 - :header-rows: 1 - - * - Iris - - Xarray - - Example - - Notes - * - Non-Lazy - - Eager - - - - Used to relate to functions, rather than the data. - * - Cube - - DataArray - - - - - * - CubeList - - Dataset - - - - Though similar, a CubeList is a simpler object, and is - not a perfect comparison to a Dataset - * - Merge/ Concatenate - - Concatenate - - `Xarray concatenate `_ - - Xarray's concatenate has the capability to largely do what both - Iris merge and Iris concatenate do. However, this is not a perfect comparison, - please see the link for more information. - * - - - Merge - - `Xarray merge `_ - - Xarray's Merge function doesn't map neatly map to any Iris feature. - Please see the link for more information. - * - Scalar Coordinate - - - - - - Iris makes a distinction between scalar coordinates and non-scalar coordinates, - whereas xarray documentation makes a distinction between scalar and non-scalar *data*. - It is possible to make coordinates with scalar data in both Iris and xarray - but only Iris will label such coordinates. - * - AuxCoord - - Non-Dimensional Coordinate - - - - Coordinates in Iris and xarray are categorised using different rules, - and so are not a one-to-one match. - * - DimCoord - - Dimension Coordinate - - - - Coordinates in Iris and xarray are categorised using different rules, - and so are not a one-to-one match. - ----- - -`To top `_ \ No newline at end of file diff --git a/docs/src/conf.py b/docs/src/conf.py index c59aca4909..7f7322c1f8 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # -*- coding: utf-8 -*- # @@ -15,6 +16,7 @@ # # All configuration values have a default; values that are commented out # serve to show the default. + # ---------------------------------------------------------------------------- import datetime @@ -194,7 +196,7 @@ def _dotv(version): todo_include_todos = True # api generation configuration -autodoc_member_order = "alphabetical" +autodoc_member_order = "groupwise" autodoc_default_flags = ["show-inheritance"] # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints @@ -292,9 +294,7 @@ def _dotv(version): "collapse_navigation": True, "navigation_depth": 3, "show_prev_next": True, - "navbar_align": "content", - # removes the search box from the top bar - "navbar_persistent": [], + "navbar_align": "left", # TODO: review if 6 links is too crowded. "header_links_before_dropdown": 6, "github_url": "https://github.com/SciTools/iris", @@ -329,16 +329,6 @@ def _dotv(version): }, } -# if we are building via Read The Docs and it is the latest (not stable) -if on_rtd and rtd_version == "latest": - html_theme_options[ - "announcement" - ] = f""" - You are viewing the latest unreleased documentation - {version}. You can switch to a - stable - version.""" - rev_parse = run(["git", "rev-parse", "--short", "HEAD"], capture_output=True) commit_sha = rev_parse.stdout.decode().strip() @@ -388,6 +378,7 @@ def _dotv(version): "https://docs.github.com", "https://github.com", "http://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html", + "http://schacon.github.com/git", "http://scitools.github.com/cartopy", "http://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf", "https://software.ac.uk/how-cite-software", diff --git a/docs/src/copyright.rst b/docs/src/copyright.rst index d5996fd999..2829374f47 100644 --- a/docs/src/copyright.rst +++ b/docs/src/copyright.rst @@ -6,15 +6,18 @@ Iris Code --------- All Iris source code, unless explicitly stated, is ``Copyright Iris -contributors`` and is licensed under the **BSD-3 License**. +contributors`` and is licensed under the **GNU Lesser General Public +License** as published by the Free Software Foundation, either version 3 of +the License, or (at your option) any later version. You should find all source files with the following header: .. admonition:: Code License Copyright Iris contributors - This file is part of Iris and is released under the BSD license. - See LICENSE in the root of the repository for full licensing details. + This file is part of Iris and is released under the LGPL license. + See COPYING and COPYING.LESSER in the root of the repository for full + licensing details. Iris Documentation and Examples diff --git a/docs/src/developers_guide/contributing_documentation_easy.rst b/docs/src/developers_guide/contributing_documentation_easy.rst index 51554f9e19..f54de628bf 100755 --- a/docs/src/developers_guide/contributing_documentation_easy.rst +++ b/docs/src/developers_guide/contributing_documentation_easy.rst @@ -81,9 +81,9 @@ Describing what you've changed and why will help the person who reviews your cha .. tip:: If you're not sure that you're making your pull request right, or have a - question, then make it anyway! You can then comment on it to ask your - question, then someone from the dev team will be happy to help you out (then - edit your pull request if you need to). + question, then make it anyway! You can then comment on it tagging + ``@SciTools/iris-devs`` to ask your question (then edit your pull request if + you need to). What Happens Next? ^^^^^^^^^^^^^^^^^^ diff --git a/docs/src/developers_guide/contributing_getting_involved.rst b/docs/src/developers_guide/contributing_getting_involved.rst index 6ade098b6b..9ec6559114 100644 --- a/docs/src/developers_guide/contributing_getting_involved.rst +++ b/docs/src/developers_guide/contributing_getting_involved.rst @@ -61,5 +61,6 @@ If you are new to using GitHub we recommend reading the ../generated/api/iris ../whatsnew/index + ../techpapers/index ../copyright ../voted_issues diff --git a/docs/src/developers_guide/gitwash/development_workflow.rst b/docs/src/developers_guide/gitwash/development_workflow.rst index 8545a04308..b086922d5b 100644 --- a/docs/src/developers_guide/gitwash/development_workflow.rst +++ b/docs/src/developers_guide/gitwash/development_workflow.rst @@ -14,7 +14,8 @@ Workflow Summary In what follows we'll refer to the upstream iris ``main`` branch, as "trunk". -* Don't use your ``main`` (that is on your fork) branch for development. +* Don't use your ``main`` (that is on your fork) branch for anything. + Consider deleting it. * When you are starting a new set of changes, fetch any changes from trunk, and start a new *feature branch* from that. * Make a new branch for each separable set of changes |emdash| "one task, one @@ -33,6 +34,13 @@ what you've done, and why you did it. See `linux git workflow`_ for some explanation. +Consider Deleting Your Main Branch +================================== + +It may sound strange, but deleting your own ``main`` branch can help reduce +confusion about which branch you are on. See `deleting master on github`_ for +details. + .. _update-mirror-trunk: Update the Mirror of Trunk @@ -74,7 +82,7 @@ what the changes in the branch are for. For example ``add-ability-to-fly``, or git checkout my-new-feature Generally, you will want to keep your feature branches on your public github_ -fork of `iris`_. To do this, you ``git push`` this new branch up to your +fork of `iris`_. To do this, you `git push`_ this new branch up to your github repo. Generally (if you followed the instructions in these pages, and by default), git will have a link to your github repo, called ``origin``. You push up to your own repo on github with:: @@ -108,7 +116,7 @@ In More Detail -------------- #. Make some changes -#. See which files have changed with ``git status``. +#. See which files have changed with ``git status`` (see `git status`_). You'll see a listing like this one:: # On branch ny-new-feature @@ -124,15 +132,16 @@ In More Detail # INSTALL no changes added to commit (use "git add" and/or "git commit -a") -#. Check what the actual changes are with ``git diff``. -#. Add any new files to version control ``git add new_file_name``. +#. Check what the actual changes are with ``git diff`` (`git diff`_). +#. Add any new files to version control ``git add new_file_name`` (see + `git add`_). #. To commit all modified files into the local copy of your repo, do ``git commit -am 'A commit message'``. Note the ``-am`` options to ``commit``. The ``m`` flag just signals that you're going to type a message on the command line. The ``a`` flag will automatically stage all files that have been modified and deleted. #. To push the changes up to your forked repo on github, do a ``git - push``. + push`` (see `git push`_). Testing Your Changes diff --git a/docs/src/developers_guide/gitwash/git_links.inc b/docs/src/developers_guide/gitwash/git_links.inc index bf20d13e5f..11d037ccf4 100644 --- a/docs/src/developers_guide/gitwash/git_links.inc +++ b/docs/src/developers_guide/gitwash/git_links.inc @@ -13,6 +13,21 @@ .. _github help: https://help.github.com .. _git documentation: https://git-scm.com/docs +.. _git clone: http://schacon.github.com/git/git-clone.html +.. _git checkout: http://schacon.github.com/git/git-checkout.html +.. _git commit: http://schacon.github.com/git/git-commit.html +.. _git push: http://schacon.github.com/git/git-push.html +.. _git pull: http://schacon.github.com/git/git-pull.html +.. _git add: http://schacon.github.com/git/git-add.html +.. _git status: http://schacon.github.com/git/git-status.html +.. _git diff: http://schacon.github.com/git/git-diff.html +.. _git log: http://schacon.github.com/git/git-log.html +.. _git branch: http://schacon.github.com/git/git-branch.html +.. _git remote: http://schacon.github.com/git/git-remote.html +.. _git rebase: http://schacon.github.com/git/git-rebase.html +.. _git config: http://schacon.github.com/git/git-config.html + .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html +.. _deleting master on github: https://matthew-brett.github.io/pydagogue/gh_delete_master.html .. |emdash| unicode:: U+02014 diff --git a/docs/src/further_topics/dask_best_practices/index.rst b/docs/src/further_topics/dask_best_practices/index.rst index f126427d3f..eb3321345b 100644 --- a/docs/src/further_topics/dask_best_practices/index.rst +++ b/docs/src/further_topics/dask_best_practices/index.rst @@ -144,8 +144,8 @@ Iris provides a basic chunking shape to Dask, attempting to set the shape for best performance. The chunking that is used can depend on the file format that is being loaded. See below for how chunking is performed for: -* :ref:`chunking_netcdf` -* :ref:`chunking_pp_ff` + * :ref:`chunking_netcdf` + * :ref:`chunking_pp_ff` It can in some cases be beneficial to re-chunk the arrays in Iris cubes. For information on how to do this, see :ref:`dask_rechunking`. @@ -208,9 +208,9 @@ If you feel you have an example of a Dask best practice that you think may be he please share them with us by raising a new `discussion on the Iris repository `_. -* :doc:`dask_pp_to_netcdf` -* :doc:`dask_parallel_loop` -* :doc:`dask_bags_and_greed` + * :doc:`dask_pp_to_netcdf` + * :doc:`dask_parallel_loop` + * :doc:`dask_bags_and_greed` .. toctree:: :hidden: diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst deleted file mode 100644 index a69247008a..0000000000 --- a/docs/src/further_topics/filtering_warnings.rst +++ /dev/null @@ -1,271 +0,0 @@ -.. _filtering-warnings: - -================== -Filtering Warnings -================== - -Since Iris cannot predict your specific needs, it by default raises Warnings -for anything that might be a problem for **any** user, and is designed to work with -you to ``ignore`` Warnings which you do not find helpful. - -.. testsetup:: filtering_warnings - - from pathlib import Path - import sys - import warnings - - import iris - import iris.coord_systems - import iris.exceptions - - # Hack to ensure doctests actually see Warnings that are raised, and that - # they have a relative path (so a test pass is not machine-dependent). - warnings.filterwarnings("default") - IRIS_FILE = Path(iris.__file__) - def custom_warn(message, category, filename, lineno, file=None, line=None): - filepath = Path(filename) - filename = str(filepath.relative_to(IRIS_FILE.parents[1])) - sys.stdout.write(warnings.formatwarning(message, category, filename, lineno)) - warnings.showwarning = custom_warn - - geog_cs_globe = iris.coord_systems.GeogCS(6400000) - orthographic_coord_system = iris.coord_systems.Orthographic( - longitude_of_projection_origin=0, - latitude_of_projection_origin=0, - ellipsoid=geog_cs_globe, - ) - - - def my_operation(): - geog_cs_globe.inverse_flattening = 0.1 - _ = orthographic_coord_system.as_cartopy_crs() - -Here is a hypothetical operation - ``my_operation()`` - which raises two -Warnings: - -.. doctest:: filtering_warnings - - >>> my_operation() - ... - iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:822: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. - warnings.warn( - -Warnings can be suppressed using the Python warnings filter with the ``ignore`` -action. Detailed information is available in the Python documentation: -:external+python:mod:`warnings`. - -The key points are: - -- :ref:`When`: a warnings filter can be applied - either from the command line or from within Python. -- :ref:`What`: a warnings filter accepts - various arguments to specify which Warnings are being filtered. Both broad - and narrow filters are possible. - -.. _warning-filter-application: - -**When** a Warnings Filter can be Applied ------------------------------------------ - -- **Command line:** setting the :external+python:envvar:`PYTHONWARNINGS` - environment variable. -- **Command line:** the `python -W `_ - command line argument. -- **Within Python:** use :func:`warnings.filterwarnings` . - -The :ref:`warning-filter-specificity` section demonstrates using -:func:`warnings.filterwarnings`, and shows the equivalent **command line** -approaches. - - -.. _warning-filter-specificity: - -**What** Warnings will be Filtered ----------------------------------- - -.. note:: - - For all of these examples we are using the - :class:`~warnings.catch_warnings` context manager to ensure any changes to - settings are temporary. - - This should always work fine for the ``ignore`` - warning filter action, but note that some of the other actions - may not behave correctly with all Iris operations, as - :class:`~warnings.catch_warnings` is not thread-safe (e.g. using the - ``once`` action may cause 1 warning per chunk of lazy data). - -Specific Warnings -~~~~~~~~~~~~~~~~~ - -**When you do not want a specific warning, but still want all others.** - -You can target specific Warning messages, e.g. - -.. doctest:: filtering_warnings - - >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", message="Discarding false_easting") - ... my_operation() - ... - iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - -:: - - python -W ignore:"Discarding false_easting" - export PYTHONWARNINGS=ignore:"Discarding false_easting" - ----- - -Or you can target Warnings raised by specific lines of specific modules, e.g. - -.. doctest:: filtering_warnings - - >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=455) - ... my_operation() - ... - iris/coord_systems.py:822: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. - warnings.warn( - -:: - - python -W ignore:::iris.coord_systems:453 - export PYTHONWARNINGS=ignore:::iris.coord_systems:453 - -Warnings from a Common Source -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -**When you do not want ANY warnings raised by a module, or collection of -modules.** - -E.g. filtering the ``coord_systems`` module: - -.. doctest:: filtering_warnings - - >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems") - ... my_operation() - -:: - - python -W ignore:::iris.coord_systems - export PYTHONWARNINGS=ignore:::iris.coord_systems - ----- - -If using :func:`warnings.filterwarnings` , you can also use partial -definitions. The below example will ``ignore`` all Warnings from ``iris`` as a -whole. - -.. doctest:: filtering_warnings - - >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris") - ... my_operation() - -The above 'partial' filter is not available with the command line approaches. - -Warnings of a Common Type -~~~~~~~~~~~~~~~~~~~~~~~~~ - -**When you do not want any Warnings of the same nature, from anywhere in the -code you are calling.** - -The below example will ``ignore`` any -:class:`~iris.exceptions.IrisDefaultingWarning` that gets raised by *any* -module during execution: - -.. doctest:: filtering_warnings - - >>> with warnings.catch_warnings(): - ... warnings.filterwarnings( - ... "ignore", - ... category=iris.exceptions.IrisDefaultingWarning - ... ) - ... my_operation() - ... - iris/coord_systems.py:455: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - ----- - -Using :class:`~iris.exceptions.IrisUserWarning` in the filter will ``ignore`` -both Warnings, since :class:`~iris.exceptions.IrisDefaultingWarning` subclasses -:class:`~iris.exceptions.IrisUserWarning` : - -.. doctest:: filtering_warnings - - >>> with warnings.catch_warnings(): - ... warnings.filterwarnings( - ... "ignore", - ... category=iris.exceptions.IrisUserWarning - ... ) - ... my_operation() - ----- - -The command line approaches can only handle the built-in Warning -categories (`cpython#66733`_):: - - python -W ignore::UserWarning - export PYTHONWARNINGS=ignore::UserWarning - ----- - -There are several built-in Python warning categories that can be used here -(:class:`DeprecationWarning` being a popular example, see -:external+python:mod:`warnings` for more). Since Iris has -so many different warnings that might be raised, Iris subclasses -:class:`UserWarning` to :class:`~iris.exceptions.IrisUserWarning`, which itself -has **many** specialised subclasses. These subclasses exist to give you more -granularity in your warning filtering; you can see the full list by -searching the :mod:`iris.exceptions` page for ``warning`` . - -.. attention:: - - If you have ideas for adding/altering Iris' warning categories, please - :ref:`get in touch`! The categories exist to - make your life easier, and it is simple to make modifications. - - -More Detail ------------ - -Different people use Iris for very different purposes, from quick file -visualisation to extract-transform-load to statistical analysis. These -contrasting priorities mean disagreement on which Iris problems can be ignored -and which are critically important. - -For problems that prevent Iris functioning: **Concrete Exceptions** are raised, which -stop code from running any further - no debate here. For less catastrophic -problems: **Warnings** are raised, -which notify you (in ``stderr``) but allow code to continue running. The Warnings are -there because Iris may **OR may not** function in the way you expect, -depending on what you need - e.g. a problem might prevent data being saved to -NetCDF, but statistical analysis will still work fine. - -Examples of Iris Warnings -~~~~~~~~~~~~~~~~~~~~~~~~~ - -- If you attempt to plot un-bounded point data as a ``pcolormesh``: Iris will - guess appropriate bounds around each point so that quadrilaterals can be - plotted. This permanently modifies the relevant coordinates, so the you are - warned in case downstream operations assume un-bounded coordinates. -- If you load a NetCDF file where a CF variable references another variable - - e.g. ``my_var:coordinates = "depth_var" ;`` - but the referenced variable - (``depth_var``) is not in the file: Iris will still construct - its data model, but without this reference relationship. You are warned since - the file includes an error and the loaded result might therefore not be as - expected. - - -.. testcleanup:: filtering_warnings - - warnings.filterwarnings("ignore") - - -.. _cpython#66733: https://github.com/python/cpython/issues/66733 diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst deleted file mode 100644 index 016d9f80ea..0000000000 --- a/docs/src/further_topics/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. _further_topics_index: - - -Further Topics -=============== - -Extra information on specific technical issues. - -.. toctree:: - :maxdepth: 1 - - filtering_warnings - metadata - lenient_metadata - lenient_maths - um_files_loading - missing_data_handling - netcdf_io - dask_best_practices/index - ugrid/index \ No newline at end of file diff --git a/docs/src/further_topics/lenient_maths.rst b/docs/src/further_topics/lenient_maths.rst index 51f77fb956..818efe4763 100644 --- a/docs/src/further_topics/lenient_maths.rst +++ b/docs/src/further_topics/lenient_maths.rst @@ -35,9 +35,9 @@ introduced and discussed the concept of lenient metadata; a more pragmatic and forgiving approach to :ref:`comparing `, :ref:`combining ` and understanding the :ref:`differences ` between your metadata -(:ref:`metadata members table`). The lenient metadata philosophy introduced +(:numref:`metadata members table`). The lenient metadata philosophy introduced there is extended to cube maths, with the view to also preserving as much common -coordinate (:ref:`metadata classes table`) information, as well as common +coordinate (:numref:`metadata classes table`) information, as well as common metadata, between the participating :class:`~iris.cube.Cube` operands as possible. Let's consolidate our understanding of lenient and strict cube maths through diff --git a/docs/src/further_topics/lenient_metadata.rst b/docs/src/further_topics/lenient_metadata.rst index 5de9ad70c4..b68ed501ba 100644 --- a/docs/src/further_topics/lenient_metadata.rst +++ b/docs/src/further_topics/lenient_metadata.rst @@ -17,10 +17,10 @@ and also :ref:`conversion `. The common metadata API is implemented through the ``metadata`` property on each of the Iris `CF Conventions`_ class containers -(:ref:`metadata classes table`), and provides a common gateway for users to +(:numref:`metadata classes table`), and provides a common gateway for users to easily manage and manipulate their metadata in a consistent and unified way. -This is primarily all thanks to the metadata classes (:ref:`metadata classes table`) +This is primarily all thanks to the metadata classes (:numref:`metadata classes table`) that support the necessary state and behaviour required by the common metadata API. Namely, it is the ``equal`` (``__eq__``), ``difference`` and ``combine`` methods that provide this rich metadata behaviour, all of which are explored @@ -267,7 +267,7 @@ Now, compare our metadata, >>> metadata.equal(latitude.metadata, lenient=True) True -Again, lenient equality (:ref:`lenient equality table`) offers a more +Again, lenient equality (:numref:`lenient equality table`) offers a more forgiving and practical alternative to strict behaviour. @@ -277,7 +277,7 @@ Lenient Difference ------------------ Similar to :ref:`lenient equality`, the lenient ``difference`` method -(:ref:`lenient difference table`) considers there to be no difference between +(:numref:`lenient difference table`) considers there to be no difference between comparing **something** with **nothing** (``None``). This working assumption is not naively applied to all metadata members, but rather a more pragmatic approach is adopted, as discussed later in :ref:`lenient members`. @@ -334,7 +334,7 @@ Lenient Combination ------------------- The behaviour of the lenient ``combine`` metadata class method is outlined -in :ref:`lenient combine table`, and as with :ref:`lenient equality` and +in :numref:`lenient combine table`, and as with :ref:`lenient equality` and :ref:`lenient difference` is enabled through the ``lenient`` keyword argument. The difference in behaviour between **lenient** and diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index 6d32b10b7a..a564b2ba68 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -52,9 +52,9 @@ give them meaning. The **metadata** used to define an Iris `CF Conventions`_ class is composed of individual **metadata members**, almost all of which reference specific `CF Conventions`_ terms. The individual metadata members used to define each of -the Iris `CF Conventions`_ classes are shown in :ref:`metadata members table`. +the Iris `CF Conventions`_ classes are shown in :numref:`metadata members table`. -As :ref:`metadata members table` highlights, **specific** metadata is used to +As :numref:`metadata members table` highlights, **specific** metadata is used to define and represent each Iris `CF Conventions`_ class. This means that metadata alone, can be used to easily **identify**, **compare** and **differentiate** between individual class instances. @@ -91,16 +91,6 @@ actual `data attribute`_ names of the metadata members on the Iris class. metadata members are Iris specific terms, rather than recognised `CF Conventions`_ terms. -.. note:: - - :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` implement the - concept of dataset-level and variable-level attributes, to enable correct - NetCDF loading and saving (see :class:`~iris.cube.CubeAttrsDict` and NetCDF - :func:`~iris.fileformats.netcdf.saver.save` for more). ``attributes`` on - the other classes do not have this distinction, but the ``attributes`` - members of ALL the classes still have the same interface, and can be - compared. - Common Metadata API =================== @@ -111,7 +101,7 @@ Common Metadata API cube = iris.load_cube(iris.sample_data_path("A1B_north_america.nc")) As of Iris ``3.0.0``, a unified treatment of metadata has been applied -across each Iris class (:ref:`metadata members table`) to allow users +across each Iris class (:numref:`metadata members table`) to allow users to easily manage and manipulate their metadata in a consistent way. This is achieved through the ``metadata`` property, which allows you to @@ -138,12 +128,10 @@ For example, given the following :class:`~iris.cube.Cube`, source 'Data from Met Office Unified Model 6.05' We can easily get all of the associated metadata of the :class:`~iris.cube.Cube` -using the ``metadata`` property (note the specialised -:class:`~iris.cube.CubeAttrsDict` for the :attr:`~iris.cube.Cube.attributes`, -as mentioned earlier): +using the ``metadata`` property: >>> cube.metadata - CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes=CubeAttrsDict(globals={'Conventions': 'CF-1.5'}, locals={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}), cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) We can also inspect the ``metadata`` of the ``longitude`` :class:`~iris.coords.DimCoord` attached to the :class:`~iris.cube.Cube` in the same way: @@ -158,7 +146,7 @@ Or use the ``metadata`` property again, but this time on the ``forecast_period`` CoordMetadata(standard_name='forecast_period', long_name=None, var_name='forecast_period', units=Unit('hours'), attributes={}, coord_system=None, climatological=False) Note that, the ``metadata`` property is available on each of the Iris `CF Conventions`_ -class containers referenced in :ref:`metadata members table`, and thus provides +class containers referenced in :numref:`metadata members table`, and thus provides a **common** and **consistent** approach to managing your metadata, which we'll now explore a little more fully. @@ -168,7 +156,7 @@ Metadata Classes The ``metadata`` property will return an appropriate `namedtuple`_ metadata class for each Iris `CF Conventions`_ class container. The metadata class returned by -each container class is shown in :ref:`metadata classes table` below, +each container class is shown in :numref:`metadata classes table` below, .. _metadata classes table: .. table:: - Iris namedtuple metadata classes @@ -187,7 +175,7 @@ each container class is shown in :ref:`metadata classes table` below, ========================================== ======================================================== Akin to the behaviour of a `namedtuple`_, the metadata classes in -:ref:`metadata classes table` create **tuple-like** instances i.e., they provide a +:numref:`metadata classes table` create **tuple-like** instances i.e., they provide a **snapshot** of the associated metadata member **values**, which are **not settable**, but they **may be mutable** depending on the data-type of the member. For example, given the following ``metadata`` of a :class:`~iris.coords.DimCoord`, @@ -243,13 +231,13 @@ with a **snapshot** of the container class metadata values at that point in time Skip ahead to :ref:`metadata assignment ` for a fuller discussion on options how to **set** and **get** metadata on the instance of -an Iris `CF Conventions`_ container class (:ref:`metadata classes table`). +an Iris `CF Conventions`_ container class (:numref:`metadata classes table`). Metadata Class Behaviour ------------------------ -As mentioned previously, the metadata classes in :ref:`metadata classes table` +As mentioned previously, the metadata classes in :numref:`metadata classes table` inherit the behaviour of a `namedtuple`_, and so act and feel like a `namedtuple`_, just as you might expect. For example, given the following ``metadata``, @@ -326,7 +314,7 @@ Richer Metadata Behaviour cube = iris.load_cube(iris.sample_data_path("A1B_north_america.nc")) longitude = cube.coord("longitude") -The metadata classes from :ref:`metadata classes table` support additional +The metadata classes from :numref:`metadata classes table` support additional behaviour above and beyond that of the standard Python `namedtuple`_, which allows you to easily **compare**, **combine**, **convert** and understand the **difference** between your ``metadata`` instances. @@ -340,7 +328,7 @@ Metadata Equality The metadata classes support both **equality** (``__eq__``) and **inequality** (``__ne__``), but no other `rich comparison`_ operators are implemented. This is simply because there is no obvious ordering to any collective of metadata -members, as defined in :ref:`metadata members table`. +members, as defined in :numref:`metadata members table`. For example, given the following :class:`~iris.coords.DimCoord`, @@ -455,7 +443,7 @@ be ``False``, The reason different metadata classes cannot be compared is simply because each metadata class contains **different** members, as shown in -:ref:`metadata members table`. However, there is an exception to the rule... +:numref:`metadata members table`. However, there is an exception to the rule... .. _exception rule: @@ -687,8 +675,8 @@ For example, consider the following :class:`~iris.common.metadata.CubeMetadata`, .. doctest:: metadata-combine - >>> cube.metadata - CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes=CubeAttrsDict(globals={'Conventions': 'CF-1.5'}, locals={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}), cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + >>> cube.metadata # doctest: +SKIP + CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) We can perform the **identity function** by comparing the metadata with itself, @@ -713,7 +701,7 @@ which is replaced with a **different value**, >>> metadata != cube.metadata True >>> metadata.combine(cube.metadata) # doctest: +SKIP - CubeMetadata(standard_name=None, long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05', 'Conventions': 'CF-1.5'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + CubeMetadata(standard_name=None, long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'STASH': STASH(model=1, section=3, item=236), 'source': 'Data from Met Office Unified Model 6.05', 'Model scenario': 'A1B', 'Conventions': 'CF-1.5'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) The ``combine`` method combines metadata by performing a **strict** comparison between each of the associated metadata member values, @@ -736,7 +724,7 @@ Let's reinforce this behaviour, but this time by combining metadata where the >>> metadata != cube.metadata True >>> metadata.combine(cube.metadata).attributes - CubeAttrsDict(globals={}, locals={'Model scenario': 'A1B'}) + {'Model scenario': 'A1B'} The combined result for the ``attributes`` member only contains those **common keys** with **common values**. @@ -822,19 +810,18 @@ the ``from_metadata`` class method. For example, given the following .. doctest:: metadata-convert - >>> cube.metadata - CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes=CubeAttrsDict(globals={'Conventions': 'CF-1.5'}, locals={'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}), cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) + >>> cube.metadata # doctest: +SKIP + CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) We can easily convert it to a :class:`~iris.common.metadata.DimCoordMetadata` instance using ``from_metadata``, .. doctest:: metadata-convert - >>> newmeta = DimCoordMetadata.from_metadata(cube.metadata) - >>> print(newmeta) - DimCoordMetadata(standard_name=air_temperature, var_name=air_temperature, units=K, attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}) + >>> DimCoordMetadata.from_metadata(cube.metadata) # doctest: +SKIP + DimCoordMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, coord_system=None, climatological=None, circular=None) -By examining :ref:`metadata members table`, we can see that the +By examining :numref:`metadata members table`, we can see that the :class:`~iris.cube.Cube` and :class:`~iris.coords.DimCoord` container classes share the following common metadata members, @@ -862,9 +849,9 @@ class instance, .. doctest:: metadata-convert - >>> newmeta = longitude.metadata.from_metadata(cube.metadata) - >>> print(newmeta) - DimCoordMetadata(standard_name=air_temperature, var_name=air_temperature, units=K, attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}) + >>> longitude.metadata.from_metadata(cube.metadata) + DimCoordMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, coord_system=None, climatological=None, circular=None) + .. _metadata assignment: @@ -880,7 +867,7 @@ Metadata Assignment latitude = cube.coord("latitude") The ``metadata`` property available on each Iris `CF Conventions`_ container -class (:ref:`metadata classes table`) can not only be used **to get** +class (:numref:`metadata classes table`) can not only be used **to get** the metadata of an instance, but also **to set** the metadata on an instance. For example, given the following :class:`~iris.common.metadata.DimCoordMetadata` of the @@ -991,7 +978,7 @@ Indeed, it's also possible to assign to the ``metadata`` property with a >>> longitude.metadata DimCoordMetadata(standard_name='longitude', long_name=None, var_name='longitude', units=Unit('degrees'), attributes={}, coord_system=GeogCS(6371229.0), climatological=False, circular=False) >>> longitude.metadata = cube.metadata - >>> longitude.metadata + >>> longitude.metadata # doctest: +SKIP DimCoordMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'Conventions': 'CF-1.5', 'STASH': STASH(model=1, section=3, item=236), 'Model scenario': 'A1B', 'source': 'Data from Met Office Unified Model 6.05'}, coord_system=GeogCS(6371229.0), climatological=False, circular=False) Note that, only **common** metadata members will be assigned new associated diff --git a/docs/src/further_topics/netcdf_io.rst b/docs/src/further_topics/netcdf_io.rst deleted file mode 100644 index e151b2b7c1..0000000000 --- a/docs/src/further_topics/netcdf_io.rst +++ /dev/null @@ -1,140 +0,0 @@ -.. testsetup:: chunk_control - - import iris - from iris.fileformats.netcdf.loader import CHUNK_CONTROL - - from pathlib import Path - import dask - import shutil - import tempfile - - tmp_dir = Path(tempfile.mkdtemp()) - tmp_filepath = tmp_dir / "tmp.nc" - - cube = iris.load(iris.sample_data_path("E1_north_america.nc"))[0] - iris.save(cube, tmp_filepath, chunksizes=(120, 37, 49)) - old_dask = dask.config.get("array.chunk-size") - dask.config.set({'array.chunk-size': '500KiB'}) - - -.. testcleanup:: chunk_control - - dask.config.set({'array.chunk-size': old_dask}) - shutil.rmtree(tmp_dir) - -.. _netcdf_io: - -============================= -NetCDF I/O Handling in Iris -============================= - -This document provides a basic account of how Iris loads and saves NetCDF files. - -.. admonition:: Under Construction - - This document is still a work in progress, so might include blank or unfinished sections, - watch this space! - - -Chunk Control --------------- - -Default Chunking -^^^^^^^^^^^^^^^^ - -Chunks are, by default, optimised by Iris on load. This will automatically -decide the best chunksize for your data without any user input. This is -calculated based on a number of factors, including: - -- File Variable Chunking -- Full Variable Shape -- Dask Default Chunksize -- Dimension Order: Earlier (outer) dimensions will be prioritised to be split over later (inner) dimensions. - -.. doctest:: chunk_control - - >>> cube = iris.load_cube(tmp_filepath) - >>> - >>> print(cube.shape) - (240, 37, 49) - >>> print(cube.core_data().chunksize) - (60, 37, 49) - -For more user control, functionality was updated in :pull:`5588`, with the -creation of the :data:`iris.fileformats.netcdf.loader.CHUNK_CONTROL` class. - -Custom Chunking: Set -^^^^^^^^^^^^^^^^^^^^ - -There are three context manangers within :data:`~iris.fileformats.netcdf.loader.CHUNK_CONTROL`. The most basic is -:meth:`~iris.fileformats.netcdf.loader.ChunkControl.set`. This allows you to specify the chunksize for each dimension, -and to specify a ``var_name`` specifically to change. - -Using ``-1`` in place of a chunksize will ensure the chunksize stays the same -as the shape, i.e. no optimisation occurs on that dimension. - -.. doctest:: chunk_control - - >>> with CHUNK_CONTROL.set("air_temperature", time=180, latitude=-1, longitude=25): - ... cube = iris.load_cube(tmp_filepath) - >>> - >>> print(cube.core_data().chunksize) - (180, 37, 25) - -Note that ``var_name`` is optional, and that you don't need to specify every dimension. If you -specify only one dimension, the rest will be optimised using Iris' default behaviour. - -.. doctest:: chunk_control - - >>> with CHUNK_CONTROL.set(longitude=25): - ... cube = iris.load_cube(tmp_filepath) - >>> - >>> print(cube.core_data().chunksize) - (120, 37, 25) - -Custom Chunking: From File -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The second context manager is :meth:`~iris.fileformats.netcdf.loader.ChunkControl.from_file`. -This takes chunksizes as defined in the NetCDF file. Any dimensions without specified chunks -will default to Iris optimisation. - -.. doctest:: chunk_control - - >>> with CHUNK_CONTROL.from_file(): - ... cube = iris.load_cube(tmp_filepath) - >>> - >>> print(cube.core_data().chunksize) - (120, 37, 49) - -Custom Chunking: As Dask -^^^^^^^^^^^^^^^^^^^^^^^^ - -The final context manager, :meth:`~iris.fileformats.netcdf.loader.ChunkControl.as_dask`, bypasses -Iris' optimisation all together, and will take its chunksizes from Dask's behaviour. - -.. doctest:: chunk_control - - >>> with CHUNK_CONTROL.as_dask(): - ... cube = iris.load_cube(tmp_filepath) - >>> - >>> print(cube.core_data().chunksize) - (70, 37, 49) - - -Split Attributes ------------------ - -TBC - - -Deferred Saving ----------------- - -TBC - - -Guess Axis ------------ - -TBC diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst index 0b4334e0f0..cc3cc7b793 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -46,7 +46,7 @@ Structured Grids (the old world) Assigning data to locations using a structured grid is essentially an act of matching coordinate arrays to each dimension of the data array. The data can also be represented as an area (instead of a point) by including a bounds array -for each coordinate array. :ref:`data_structured_grid` visualises an +for each coordinate array. :numref:`data_structured_grid` visualises an example. .. _data_structured_grid: @@ -125,7 +125,7 @@ datum per element, matched to its element by matching the datum index with the coordinate or connectivity index along the **unstructured dimension**. So for an example data array called ``foo``: ``foo[3]`` would be at position ``(x[3], y[3])`` if it were node-located, or at -``faces[3]`` if it were face-located. :ref:`data_ugrid_mesh` visualises an +``faces[3]`` if it were face-located. :numref:`data_ugrid_mesh` visualises an example of what is described above. .. _data_ugrid_mesh: @@ -152,7 +152,7 @@ example of what is described above. The mesh model also supports edges/faces/volumes having associated 'centre' coordinates - to allow point data to be assigned to these elements. 'Centre' is just a convenience term - the points can exist anywhere within their respective -elements. See :ref:`ugrid_element_centres` for a visualised example. +elements. See :numref:`ugrid_element_centres` for a visualised example. .. _ugrid_element_centres: .. figure:: images/ugrid_element_centres.svg @@ -175,7 +175,7 @@ Above we have seen how one could replicate data on a structured grid using a mesh instead. But the utility of a mesh is the extra flexibility it offers. Here are the main examples: -Every node is completely independent - every one can have unique X andY (and Z) coordinate values. See :ref:`ugrid_node_independence`. +Every node is completely independent - every one can have unique X andY (and Z) coordinate values. See :numref:`ugrid_node_independence`. .. _ugrid_node_independence: .. figure:: images/ugrid_node_independence.svg @@ -194,7 +194,7 @@ Every node is completely independent - every one can have unique X andY (and Z) Faces and volumes can have variable node counts, i.e. different numbers of sides. This is achieved by masking the unused 'slots' in the connectivity -array. See :ref:`ugrid_variable_faces`. +array. See :numref:`ugrid_variable_faces`. .. _ugrid_variable_faces: .. figure:: images/ugrid_variable_faces.svg @@ -211,7 +211,7 @@ array. See :ref:`ugrid_variable_faces`. (black circles) for faces with fewer nodes than the maximum. Data can be assigned to lines (edges) just as easily as points (nodes) or -areas (faces). See :ref:`ugrid_edge_data`. +areas (faces). See :numref:`ugrid_edge_data`. .. _ugrid_edge_data: .. figure:: images/ugrid_edge_data.svg @@ -484,20 +484,20 @@ How UGRID information is stored | Described in detail in `MeshCoords`_. | Stores the following information: - * | :attr:`~iris.experimental.ugrid.MeshCoord.mesh` - | The :class:`~iris.experimental.ugrid.Mesh` associated with this - :class:`~iris.experimental.ugrid.MeshCoord`. This determines the - :attr:`~iris.cube.Cube.mesh` attribute of any :class:`~iris.cube.Cube` - this :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see - `The Basics`_) - - * | :attr:`~iris.experimental.ugrid.MeshCoord.location` - | ``node``/``edge``/``face`` - the element detailed by this - :class:`~iris.experimental.ugrid.MeshCoord`. This determines the - :attr:`~iris.cube.Cube.location` attribute of any - :class:`~iris.cube.Cube` this - :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see - `The Basics`_). + * | :attr:`~iris.experimental.ugrid.MeshCoord.mesh` + | The :class:`~iris.experimental.ugrid.Mesh` associated with this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.mesh` attribute of any :class:`~iris.cube.Cube` + this :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_) + + * | :attr:`~iris.experimental.ugrid.MeshCoord.location` + | ``node``/``edge``/``face`` - the element detailed by this + :class:`~iris.experimental.ugrid.MeshCoord`. This determines the + :attr:`~iris.cube.Cube.location` attribute of any + :class:`~iris.cube.Cube` this + :class:`~iris.experimental.ugrid.MeshCoord` is attached to (see + `The Basics`_). .. _ugrid MeshCoords: diff --git a/docs/src/sphinxext/api_rst_formatting.py b/docs/src/sphinxext/api_rst_formatting.py index 6dd82de91e..8f1aa3c5f3 100644 --- a/docs/src/sphinxext/api_rst_formatting.py +++ b/docs/src/sphinxext/api_rst_formatting.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # This script will process all .rst files that have been created by # sphinxcontrib.apidoc extension and perform minor changes, specifically: diff --git a/docs/src/techpapers/index.rst b/docs/src/techpapers/index.rst new file mode 100644 index 0000000000..773c8f7059 --- /dev/null +++ b/docs/src/techpapers/index.rst @@ -0,0 +1,13 @@ +.. _techpapers_index: + + +Iris Technical Papers +===================== + +Extra information on specific technical issues. + +.. toctree:: + :maxdepth: 1 + + um_files_loading.rst + missing_data_handling.rst diff --git a/docs/src/further_topics/missing_data_handling.rst b/docs/src/techpapers/missing_data_handling.rst similarity index 100% rename from docs/src/further_topics/missing_data_handling.rst rename to docs/src/techpapers/missing_data_handling.rst diff --git a/docs/src/further_topics/um_files_loading.rst b/docs/src/techpapers/um_files_loading.rst similarity index 93% rename from docs/src/further_topics/um_files_loading.rst rename to docs/src/techpapers/um_files_loading.rst index 9d9393f16d..f8c94cab08 100644 --- a/docs/src/further_topics/um_files_loading.rst +++ b/docs/src/techpapers/um_files_loading.rst @@ -1,3 +1,5 @@ +.. _um_files_loading: + .. testsetup:: import numpy as np @@ -11,8 +13,6 @@ np.set_printoptions(precision=8) -.. _um_files_loading: - =================================== Iris Handling of PP and Fieldsfiles =================================== @@ -125,21 +125,21 @@ with latitude and longitude axes are also supported). For an ordinary latitude-longitude grid, the cubes have coordinates called 'longitude' and 'latitude': -* These are mapped to the appropriate data dimensions. -* They have units of 'degrees'. -* They have a coordinate system of type :class:`iris.coord_systems.GeogCS`. -* The coordinate points are normally set to the regular sequence - ``ZDX/Y + BDX/Y * (1 .. LBNPT/LBROW)`` (*except*, if BDX/BDY is zero, the - values are taken from the extra data vector X/Y, if present). -* If X/Y_LOWER_BOUNDS extra data is available, this appears as bounds values - of the horizontal coordinates. + * These are mapped to the appropriate data dimensions. + * They have units of 'degrees'. + * They have a coordinate system of type :class:`iris.coord_systems.GeogCS`. + * The coordinate points are normally set to the regular sequence + ``ZDX/Y + BDX/Y * (1 .. LBNPT/LBROW)`` (*except*, if BDX/BDY is zero, the + values are taken from the extra data vector X/Y, if present). + * If X/Y_LOWER_BOUNDS extra data is available, this appears as bounds values + of the horizontal coordinates. For **rotated** latitude-longitude coordinates (as for LBCODE=101), the horizontal coordinates differ only slightly -- -* The names are 'grid_latitude' and 'grid_longitude'. -* The coord_system is a :class:`iris.coord_systems.RotatedGeogCS`, created - with a pole defined by BPLAT, BPLON. + * The names are 'grid_latitude' and 'grid_longitude'. + * The coord_system is a :class:`iris.coord_systems.RotatedGeogCS`, created + with a pole defined by BPLAT, BPLON. For example: >>> # Load a PP field. @@ -304,9 +304,10 @@ For hybrid height levels (LBVC=65): multidimensional or non-monotonic. See an example printout of a hybrid height cube, -:ref:`here `. Notice that this contains all of the -above coordinates -- ``model_level_number``, ``sigma``, ``level_height`` and -the derived ``altitude``. +:ref:`here `: + + Notice that this contains all of the above coordinates -- + 'model_level_number', 'sigma', 'level_height' and the derived 'altitude'. .. note:: @@ -363,7 +364,7 @@ Data at a single measurement timepoint (LBTIM.IB=0): defined according to LBTIM.IC. Values forecast from T2, valid at T1 (LBTIM.IB=1): - Coordinates ``time`` and ``forecast_reference_time`` are created from the T1 + Coordinates ``time` and ``forecast_reference_time`` are created from the T1 and T2 values, respectively. These have no bounds, and units of 'hours since 1970-01-01 00:00:00', with the appropriate calendar. A ``forecast_period`` coordinate is also created, with values T1-T2, no @@ -382,11 +383,12 @@ these may become dimensions of the resulting data cube. This will depend on the values actually present in the source fields for each of the elements. See an example printout of a forecast data cube, -:ref:`here `. Notice that this example -contains all of the above coordinates -- ``time``, ``forecast_period`` and -``forecast_reference_time``. In this case the data are forecasts, so ``time`` -is a dimension, ``forecast_period``` varies with time and -``forecast_reference_time`` is a constant. +:ref:`here ` : + + Notice that this example contains all of the above coordinates -- 'time', + 'forecast_period' and 'forecast_reference_time'. In this case the data are + forecasts, so 'time' is a dimension, 'forecast_period' varies with time and + 'forecast_reference_time' is a constant. Statistical Measures diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst index d986a986ad..771aa450a3 100644 --- a/docs/src/userguide/index.rst +++ b/docs/src/userguide/index.rst @@ -18,12 +18,6 @@ they may serve as a useful reference for future exploration. sequentially using the ``next`` and ``previous`` links at the bottom of each page. -.. note:: - - There is also useful learning material held in the - https://github.com/scitools-classroom repo, including tutorials, courses - and presentations. - .. toctree:: :maxdepth: 2 @@ -42,4 +36,14 @@ they may serve as a useful reference for future exploration. citation code_maintenance glossary - ../further_topics/index + + +.. toctree:: + :maxdepth: 2 + :caption: Further Topics + + ../further_topics/metadata + ../further_topics/lenient_metadata + ../further_topics/lenient_maths + ../further_topics/dask_best_practices/index + ../further_topics/ugrid/index diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst index 03b5093efc..267f97b0fc 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -85,10 +85,7 @@ A cube consists of: data dimensions as the coordinate has dimensions. * an attributes dictionary which, other than some protected CF names, can - hold arbitrary extra metadata. This implements the concept of dataset-level - and variable-level attributes when loading and and saving NetCDF files (see - :class:`~iris.cube.CubeAttrsDict` and NetCDF - :func:`~iris.fileformats.netcdf.saver.save` for more). + hold arbitrary extra metadata. * a list of cell methods to represent operations which have already been applied to the data (e.g. "mean over time") * a list of coordinate "factories" used for deriving coordinates from the diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/userguide/navigating_a_cube.rst index ec3cd8e0dc..b4c16b094b 100644 --- a/docs/src/userguide/navigating_a_cube.rst +++ b/docs/src/userguide/navigating_a_cube.rst @@ -191,10 +191,10 @@ Adding and Removing Metadata to the Cube at Load Time Sometimes when loading a cube problems occur when the amount of metadata is more or less than expected. This is often caused by one of the following: -* The file does not contain enough metadata, and therefore the cube cannot know everything about the file. -* Some of the metadata of the file is contained in the filename, but is not part of the actual file. -* There is not enough metadata loaded from the original file as Iris has not handled the format fully. *(in which case, - please let us know about it)* + * The file does not contain enough metadata, and therefore the cube cannot know everything about the file. + * Some of the metadata of the file is contained in the filename, but is not part of the actual file. + * There is not enough metadata loaded from the original file as Iris has not handled the format fully. *(in which case, + please let us know about it)* To solve this, all of :func:`iris.load`, :func:`iris.load_cube`, and :func:`iris.load_cubes` support a callback keyword. diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst index e4c041886c..ef4de0c429 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -247,21 +247,20 @@ output file, to be performed by `Dask `_ lat thus enabling parallel save operations. This works in the following way : + 1. an :func:`iris.save` call is made, with a NetCDF file output and the additional + keyword ``compute=False``. + This is currently *only* available when saving to NetCDF, so it is documented in + the Iris NetCDF file format API. See: :func:`iris.fileformats.netcdf.save`. -1. an :func:`iris.save` call is made, with a NetCDF file output and the additional - keyword ``compute=False``. - This is currently *only* available when saving to NetCDF, so it is documented in - the Iris NetCDF file format API. See: :func:`iris.fileformats.netcdf.save`. + 2. the call creates the output file, but does not fill in variables' data, where + the data is a lazy array in the Iris object. Instead, these variables are + initially created "empty". -2. the call creates the output file, but does not fill in variables' data, where - the data is a lazy array in the Iris object. Instead, these variables are - initially created "empty". + 3. the :meth:`~iris.save` call returns a ``result`` which is a + :class:`~dask.delayed.Delayed` object. -3. the :meth:`~iris.save` call returns a ``result`` which is a - :class:`~dask.delayed.Delayed` object. - -4. the save can be completed later by calling ``result.compute()``, or by passing it - to the :func:`dask.compute` call. + 4. the save can be completed later by calling ``result.compute()``, or by passing it + to the :func:`dask.compute` call. The benefit of this, is that costly data transfer operations can be performed in parallel with writes to other data files. Also, where array contents are calculated diff --git a/docs/src/voted_issues.rst b/docs/src/voted_issues.rst index 33d1982a7b..0c99638bbd 100644 --- a/docs/src/voted_issues.rst +++ b/docs/src/voted_issues.rst @@ -53,3 +53,5 @@ the below table. .. note:: The data in this table is updated every 30 minutes and is sourced from `voted-issues.json`_. For the latest data please see the `issues on GitHub`_. + Note that the list on Github does not show the number of votes 👍 + only the total number of comments for the whole issue. \ No newline at end of file diff --git a/docs/src/whatsnew/1.4.rst b/docs/src/whatsnew/1.4.rst index 912a1e3bad..989198296c 100644 --- a/docs/src/whatsnew/1.4.rst +++ b/docs/src/whatsnew/1.4.rst @@ -58,7 +58,7 @@ Features * Use the latest release of Cartopy, v0.8.0. -.. _OPeNDAP: http://www.opendap.org +.. _OPeNDAP: http://www.opendap.org/about .. _exp-regrid: Experimental Regridding Enhancements diff --git a/docs/src/whatsnew/1.7.rst b/docs/src/whatsnew/1.7.rst index 4c3f3197dc..1d7c7c3f60 100644 --- a/docs/src/whatsnew/1.7.rst +++ b/docs/src/whatsnew/1.7.rst @@ -329,6 +329,6 @@ Documentation * A clarification of the behaviour of :func:`iris.analysis.calculus.differentiate`. -* A new Technical Papers section has been added to +* A new :doc:`"Technical Papers" ` section has been added to the documentation along with the addition of a paper providing an - :ref:`overview of the load process for UM-like fileformats (e.g. PP and Fieldsfile) `. + :doc:`overview of the load process for UM-like fileformats (e.g. PP and Fieldsfile) `. diff --git a/docs/src/whatsnew/3.7.rst b/docs/src/whatsnew/3.7.rst index 3472374711..71ce4da735 100644 --- a/docs/src/whatsnew/3.7.rst +++ b/docs/src/whatsnew/3.7.rst @@ -47,7 +47,7 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles lazy data. (:pull:`5307`) - + .. _concat_warnings: #. `@acchamber`_ added error and warning messages about coordinate overlaps to @@ -70,11 +70,11 @@ This document explains the changes made to Iris for this release ============= #. `@acchamber`_ fixed a bug with :func:`~iris.util.unify_time_units` so it does not block - concatenation through different data types in rare instances. (:pull:`5372`) + concatenation through different data types in rare instances. (:pull:`5372`) #. `@acchamber`_ removed some obsolete code that prevented extraction of time points from cubes with bounded times (:pull:`5175`) - + .. _cftime_warnings: #. `@rcomer`_ modified pp-loading to avoid a ``cftime`` warning for non-standard diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index c556f82761..d2a15be1f7 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -5,13 +5,12 @@ What's New in Iris ------------------ -.. include:: latest.rst +.. include:: 3.7.rst .. toctree:: :maxdepth: 1 :hidden: - latest.rst 3.7.rst 3.6.rst 3.5.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst deleted file mode 100644 index 884bfd376a..0000000000 --- a/docs/src/whatsnew/latest.rst +++ /dev/null @@ -1,184 +0,0 @@ -.. include:: ../common_links.inc - -|iris_version| |build_date| [unreleased] -**************************************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -.. dropdown:: |iris_version| Release Highlights - :color: primary - :icon: info - :animate: fade-in - :open: - - The highlights for this major/minor release of Iris include: - - * N/A - - And finally, get in touch with us on :issue:`GitHub` if you have - any issues or feature requests for improving Iris. Enjoy! - - -📢 Announcements -================ - -#. `@lbdreyer`_ relicensed Iris from LGPL-3 to BSD-3. (:pull: `5577`) - - -✨ Features -=========== -#. `@pp-mo`_, `@lbdreyer`_ and `@trexfeathers`_ improved - :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` handling to - better preserve the distinction between dataset-level and variable-level - attributes, allowing file-Cube-file round-tripping of NetCDF attributes. See - :class:`~iris.cube.CubeAttrsDict`, NetCDF - :func:`~iris.fileformats.netcdf.saver.save` and :data:`~iris.Future` for more. - (:pull:`5152`, `split attributes project`_) - -#. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles - lazy data. (:pull:`5307`) - -#. `@trexfeathers`_ and `@HGWright`_ (reviewer) sub-categorised all Iris' - :class:`UserWarning`\s for richer filtering. The full index of - sub-categories can be seen here: :mod:`iris.exceptions` . (:pull:`5498`) - -#. `@trexfeathers`_ added the :class:`~iris.coord_systems.ObliqueMercator` - and :class:`~iris.coord_systems.RotatedMercator` coordinate systems, - complete with NetCDF loading and saving. (:pull:`5548`) - -#. `@trexfeathers`_ added the ``use_year_at_season_start`` parameter to - :func:`iris.coord_categorisation.add_season_year`. When - ``use_year_at_season_start==True``: seasons spanning the year boundary (e.g. - Winter - December to February) will be assigned to the preceding year (e.g. - the year of December) instead of the following year (the default behaviour). - (:pull:`5573`) - -#. `@HGWright`_ added :attr:`~iris.coords.Coord.ignore_axis` to allow manual - intervention preventing :func:`~iris.util.guess_coord_axis` from acting on a - coordinate. (:pull:`5551`) - -#. `@pp-mo`_, `@trexfeathers`_ and `@ESadek-MO`_ added more control over - NetCDF chunking with the use of the :data:`iris.fileformats.netcdf.loader.CHUNK_CONTROL` - context manager. (:pull:`5588`) - - -🐛 Bugs Fixed -============= - -#. `@scottrobinson02`_ fixed the output units when dividing a coordinate by a - cube. (:issue:`5305`, :pull:`5331`) - -#. `@ESadek-MO`_ has updated :mod:`iris.tests.graphics.idiff` to stop duplicated file names - preventing acceptance. (:issue:`5098`, :pull:`5482`) - -#. `@acchamber`_ and `@rcomer`_ modified 2D plots so that time axes and their - ticks have more sensible default labels. (:issue:`5426`, :pull:`5561`) - -#. `@rcomer`_ and `@trexfeathers`_ (reviewer) added handling for realization - coordinates when saving pp files (:issue:`4747`, :pull:`5568`) - - -💣 Incompatible Changes -======================= - -#. N/A - - -🚀 Performance Enhancements -=========================== - -#. `@stephenworsley`_ improved the speed of :class:`~iris.analysis.AreaWeighted` - regridding. (:pull:`5543`) - - -🔥 Deprecations -=============== - -#. N/A - - -🔗 Dependencies -=============== - -#. `@bjlittle`_ enforced the minimum pin of ``numpy>1.21`` in accordance with the `NEP29 Drop Schedule`_. - (:pull:`5525`) - - -📚 Documentation -================ - -#. `@trexfeathers`_ documented the intended use of warnings filtering with - Iris. See :ref:`filtering-warnings`. (:pull:`5509`) - -#. `@rcomer`_ updated the - :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_maps.py` to show how - a colourbar may steal space from multiple axes. (:pull:`5537`) - -#. `@tkknight`_ improved the top navgation bar alignment and amount of - links shown. Also improved how the warning banner is implemented. - (:pull:`5505` and :pull:`5508`) - -#. `@tkknight`_ removed broken git links. (:pull:`5569`) - -#. `@ESadek-MO`_ added a phrasebook for synonymous terms used in similar - packages. (:pull:`5564`) - -#. `@ESadek-MO`_ and `@trexfeathers`_ created a technical paper for NetCDF - saving and loading, :ref:`netcdf_io` with a section on chunking, and placeholders - for further topics. (:pull:`5588`) - - -💼 Internal -=========== - -#. `@trexfeathers`_ and `@ESadek-MO`_ (reviewer) performed a suite of fixes and - improvements for benchmarking, primarily to get - :ref:`on demand pull request benchmarking ` - working properly. (Main pull request: :pull:`5437`, more detail: - :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) - -#. `@trexfeathers`_ set a number of memory benchmarks to be on-demand, as they - were vulnerable to false positives in CI runs. (:pull:`5481`) - -#. `@acchamber`_ and `@ESadek-MO`_ resolved several deprecation to reduce - number of warnings raised during tests. - (:pull:`5493`, :pull:`5511`) - -#. `@trexfeathers`_ replaced all uses of the ``logging.WARNING`` level, in - favour of using Python warnings, following team agreement. (:pull:`5488`) - -#. `@trexfeathers`_ adapted benchmarking to work with ASV ``>=v0.6`` by no - longer using the ``--strict`` argument. (:pull:`5496`) - -#. `@fazledyn-or`_ replaced ``NotImplementedError`` with ``NotImplemented`` as - a proper method call. (:pull:`5544`) - -#. `@bjlittle`_ corrected various comment spelling mistakes detected by - `codespell`_. (:pull:`5546`) - -#. `@rcomer`_ reduced the size of the conda environment used for testing. - (:pull:`5606`) - -#. `@trexfeathers`_ and `@pp-mo`_ improved how the conda-forge feedstock - release candidate branch is managed, via: - :doc:`../developers_guide/release_do_nothing`. - (:pull:`5515`) - - -.. comment - Whatsnew author names (@github name) in alphabetical order. Note that, - core dev names are automatically included by the common_links.inc: - -.. _@scottrobinson02: https://github.com/scottrobinson02 -.. _@acchamber: https://github.com/acchamber -.. _@fazledyn-or: https://github.com/fazledyn-or - - -.. comment - Whatsnew resources in alphabetical order: - -.. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule -.. _codespell: https://github.com/codespell-project/codespell -.. _split attributes project: https://github.com/orgs/SciTools/projects/5?pane=info diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template deleted file mode 100644 index 966a91e976..0000000000 --- a/docs/src/whatsnew/latest.rst.template +++ /dev/null @@ -1,107 +0,0 @@ -.. include:: ../common_links.inc - -|iris_version| |build_date| [unreleased] -**************************************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -.. dropdown:: |iris_version| Release Highlights - :color: primary - :icon: info - :animate: fade-in - :open: - - The highlights for this major/minor release of Iris include: - - * N/A - - And finally, get in touch with us on :issue:`GitHub` if you have - any issues or feature requests for improving Iris. Enjoy! - - -NOTE: section BELOW is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst' - -|iris_version| |build_date| -=========================== - -.. dropdown:: |iris_version| Patches - :color: primary - :icon: alert - :animate: fade-in - - The patches in this release of Iris include: - - #. N/A - -NOTE: section ABOVE is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst') - - -📢 Announcements -================ - -#. N/A - - -✨ Features -=========== - -#. N/A - - -🐛 Bugs Fixed -============= - -#. N/A - - -💣 Incompatible Changes -======================= - -#. N/A - - -🚀 Performance Enhancements -=========================== - -#. N/A - - -🔥 Deprecations -=============== - -#. N/A - - -🔗 Dependencies -=============== - -#. N/A - - -📚 Documentation -================ - -#. N/A - - -💼 Internal -=========== - -#. N/A - - -.. comment - Whatsnew author names (@github name) in alphabetical order. Note that, - core dev names are automatically included by the common_links.inc: - - - - -.. comment - Whatsnew resources in alphabetical order: diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index a10169b7bb..0e6670533f 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A package for handling multi-dimensional data and associated metadata. @@ -141,9 +142,7 @@ def callback(cube, field, filename): class Future(threading.local): """Run-time configuration controller.""" - def __init__( - self, datum_support=False, pandas_ndim=False, save_split_attrs=False - ): + def __init__(self, datum_support=False, pandas_ndim=False): """ A container for run-time options controls. @@ -165,11 +164,6 @@ def __init__( pandas_ndim : bool, default=False See :func:`iris.pandas.as_data_frame` for details - opts in to the newer n-dimensional behaviour. - save_split_attrs : bool, default=False - Save "global" and "local" cube attributes to netcdf in appropriately - different ways : "global" ones are saved as dataset attributes, where - possible, while "local" ones are saved as data-variable attributes. - See :func:`iris.fileformats.netcdf.saver.save`. """ # The flag 'example_future_flag' is provided as a reference for the @@ -181,18 +175,12 @@ def __init__( # self.__dict__['example_future_flag'] = example_future_flag self.__dict__["datum_support"] = datum_support self.__dict__["pandas_ndim"] = pandas_ndim - self.__dict__["save_split_attrs"] = save_split_attrs - - # TODO: next major release: set IrisDeprecation to subclass - # DeprecationWarning instead of UserWarning. def __repr__(self): # msg = ('Future(example_future_flag={})') # return msg.format(self.example_future_flag) - msg = "Future(datum_support={}, pandas_ndim={}, save_split_attrs={})" - return msg.format( - self.datum_support, self.pandas_ndim, self.save_split_attrs - ) + msg = "Future(datum_support={}, pandas_ndim={})" + return msg.format(self.datum_support, self.pandas_ndim) # deprecated_options = {'example_future_flag': 'warning',} deprecated_options = {} diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 554f14d914..c6d58b1622 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Automatic concatenation of multiple cubes over one or more existing dimensions. @@ -15,7 +16,6 @@ import iris.coords import iris.cube -import iris.exceptions from iris.util import array_equal, guess_coord_axis # @@ -998,7 +998,7 @@ def register( raise iris.exceptions.ConcatenateError([msg]) elif not match: msg = f"Found cubes with overlap on concatenate axis {candidate_axis}, skipping concatenation for these cubes" - warnings.warn(msg, category=iris.exceptions.IrisUserWarning) + warnings.warn(msg) # Check for compatible AuxCoords. if match: diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 82225ec516..1884cbcbd9 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides objects for building up expressions useful for pattern matching. diff --git a/lib/iris/_data_manager.py b/lib/iris/_data_manager.py index 9ea4481307..486a58de45 100644 --- a/lib/iris/_data_manager.py +++ b/lib/iris/_data_manager.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Management of common state and behaviour for cube and coordinate data. diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index ad4dc5a560..73fcedcd82 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Utilities for producing runtime deprecation messages. @@ -11,13 +12,7 @@ class IrisDeprecation(UserWarning): - """ - An Iris deprecation warning. - - Note this subclasses UserWarning for backwards compatibility with Iris' - original deprecation warnings. Should subclass DeprecationWarning at the - next major release. - """ + """An Iris deprecation warning.""" pass @@ -49,7 +44,7 @@ def warn_deprecated(msg, stacklevel=2): >>> """ - warnings.warn(msg, category=IrisDeprecation, stacklevel=stacklevel) + warnings.warn(msg, IrisDeprecation, stacklevel=stacklevel) # A Mixin for a wrapper class that copies the docstring of the wrapped class diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 11477a2fa6..4c294a7d2f 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Routines for lazy data handling. @@ -61,7 +62,6 @@ def _optimum_chunksize_internals( shape, limit=None, dtype=np.dtype("f4"), - dims_fixed=None, dask_array_chunksize=dask.config.get("array.chunk-size"), ): """ @@ -71,8 +71,8 @@ def _optimum_chunksize_internals( Args: - * chunks (tuple of int): - Pre-existing chunk shape of the target data. + * chunks (tuple of int, or None): + Pre-existing chunk shape of the target data : None if unknown. * shape (tuple of int): The full array shape of the target data. * limit (int): @@ -80,11 +80,6 @@ def _optimum_chunksize_internals( :mod:`dask.config`. * dtype (np.dtype): Numpy dtype of target data. - * dims_fixed (list of bool): - If set, a list of values equal in length to 'chunks' or 'shape'. - 'True' values indicate a dimension that can not be changed, i.e. that - element of the result must equal the corresponding value in 'chunks' or - data.shape. Returns: * chunk (tuple of int): @@ -105,7 +100,6 @@ def _optimum_chunksize_internals( "chunks = [c[0] for c in normalise_chunks('auto', ...)]". """ - # Set the chunksize limit. if limit is None: # Fetch the default 'optimal' chunksize from the dask config. @@ -115,90 +109,58 @@ def _optimum_chunksize_internals( point_size_limit = limit / dtype.itemsize - if dims_fixed is not None: - if not np.any(dims_fixed): - dims_fixed = None - - if dims_fixed is None: - # Get initial result chunks, starting with a copy of the input. - working = list(chunks) - else: - # Adjust the operation to ignore the 'fixed' dims. - # (We reconstruct the original later, before return). - chunks = np.array(chunks) - dims_fixed_arr = np.array(dims_fixed) - # Reduce the target size by the fixed size of all the 'fixed' dims. - point_size_limit = point_size_limit // np.prod(chunks[dims_fixed_arr]) - # Work on only the 'free' dims. - original_shape = tuple(shape) - shape = tuple(np.array(shape)[~dims_fixed_arr]) - working = list(chunks[~dims_fixed_arr]) - - if len(working) >= 1: - if np.prod(working) < point_size_limit: - # If size is less than maximum, expand the chunks, multiplying - # later (i.e. inner) dims first. - i_expand = len(shape) - 1 - while np.prod(working) < point_size_limit and i_expand >= 0: - factor = np.floor(point_size_limit * 1.0 / np.prod(working)) - new_dim = working[i_expand] * int(factor) - if new_dim >= shape[i_expand]: - # Clip to dim size : must not exceed the full shape. - new_dim = shape[i_expand] - else: - # 'new_dim' is less than the relevant dim of 'shape' -- but - # it is also the largest possible multiple of the - # input-chunks, within the size limit. - # So : 'i_expand' is the outer (last) dimension over which - # we will multiply the input chunks, and 'new_dim' is a - # value giving the fewest possible chunks within that dim. - - # Now replace 'new_dim' with the value **closest to - # equal-size chunks**, for the same (minimum) number of - # chunks. More-equal chunks are practically better. - # E.G. : "divide 8 into multiples of 2, with a limit of 7", - # produces new_dim=6, meaning chunks of sizes (6, 2). - # But (4, 4) is clearly better for memory and time cost. - - # Calculate how many (expanded) chunks fit in this dim. - dim_chunks = np.ceil(shape[i_expand] * 1.0 / new_dim) - # Get "ideal" (equal) size for that many chunks. - ideal_equal_chunk_size = shape[i_expand] / dim_chunks - # Use the nearest whole multiple of input chunks >= ideal. - new_dim = int( - working[i_expand] - * np.ceil(ideal_equal_chunk_size / working[i_expand]) - ) - - working[i_expand] = new_dim - i_expand -= 1 - else: - # Similarly, reduce if too big, reducing earlier (outer) dims first. - i_reduce = 0 - while np.prod(working) > point_size_limit: - factor = np.ceil(np.prod(working) / point_size_limit) - new_dim = int(working[i_reduce] / factor) - if new_dim < 1: - new_dim = 1 - working[i_reduce] = new_dim - i_reduce += 1 - - working = tuple(working) - - if dims_fixed is None: - result = working - else: - # Reconstruct the original form - result = [] - for i_dim in range(len(original_shape)): - if dims_fixed[i_dim]: - dim = chunks[i_dim] + # Create result chunks, starting with a copy of the input. + result = list(chunks) + + if np.prod(result) < point_size_limit: + # If size is less than maximum, expand the chunks, multiplying later + # (i.e. inner) dims first. + i_expand = len(shape) - 1 + while np.prod(result) < point_size_limit and i_expand >= 0: + factor = np.floor(point_size_limit * 1.0 / np.prod(result)) + new_dim = result[i_expand] * int(factor) + if new_dim >= shape[i_expand]: + # Clip to dim size : chunk dims must not exceed the full shape. + new_dim = shape[i_expand] else: - dim = working[0] - working = working[1:] - result.append(dim) + # 'new_dim' is less than the relevant dim of 'shape' -- but it + # is also the largest possible multiple of the input-chunks, + # within the size limit. + # So : 'i_expand' is the outer (last) dimension over which we + # will multiply the input chunks, and 'new_dim' is a value that + # ensures the fewest possible chunks within that dim. + + # Now replace 'new_dim' with the value **closest to equal-size + # chunks**, for the same (minimum) number of chunks. + # More-equal chunks are practically better. + # E.G. : "divide 8 into multiples of 2, with a limit of 7", + # produces new_dim=6, which would mean chunks of sizes (6, 2). + # But (4, 4) is clearly better for memory and time cost. + + # Calculate how many (expanded) chunks fit into this dimension. + dim_chunks = np.ceil(shape[i_expand] * 1.0 / new_dim) + # Get "ideal" (equal) size for that many chunks. + ideal_equal_chunk_size = shape[i_expand] / dim_chunks + # Use the nearest whole multiple of input chunks >= ideal. + new_dim = int( + result[i_expand] + * np.ceil(ideal_equal_chunk_size / result[i_expand]) + ) + + result[i_expand] = new_dim + i_expand -= 1 + else: + # Similarly, reduce if too big, reducing earlier (outer) dims first. + i_reduce = 0 + while np.prod(result) > point_size_limit: + factor = np.ceil(np.prod(result) / point_size_limit) + new_dim = int(result[i_reduce] / factor) + if new_dim < 1: + new_dim = 1 + result[i_reduce] = new_dim + i_reduce += 1 - return result + return tuple(result) @wraps(_optimum_chunksize_internals) @@ -207,7 +169,6 @@ def _optimum_chunksize( shape, limit=None, dtype=np.dtype("f4"), - dims_fixed=None, ): # By providing dask_array_chunksize as an argument, we make it so that the # output of _optimum_chunksize_internals depends only on its arguments (and @@ -217,14 +178,11 @@ def _optimum_chunksize( tuple(shape), limit=limit, dtype=dtype, - dims_fixed=dims_fixed, dask_array_chunksize=dask.config.get("array.chunk-size"), ) -def as_lazy_data( - data, chunks=None, asarray=False, dims_fixed=None, dask_chunking=False -): +def as_lazy_data(data, chunks=None, asarray=False): """ Convert the input array `data` to a :class:`dask.array.Array`. @@ -243,16 +201,6 @@ def as_lazy_data( If True, then chunks will be converted to instances of `ndarray`. Set to False (default) to pass passed chunks through unchanged. - * dims_fixed (list of bool): - If set, a list of values equal in length to 'chunks' or data.ndim. - 'True' values indicate a dimension which can not be changed, i.e. the - result for that index must equal the value in 'chunks' or data.shape. - - * dask_chunking (bool): - If True, Iris chunking optimisation will be bypassed, and dask's default - chunking will be used instead. Including a value for chunks while dask_chunking - is set to True will result in a failure. - Returns: The input array converted to a :class:`dask.array.Array`. @@ -264,38 +212,24 @@ def as_lazy_data( but reduced by a factor if that exceeds the dask default chunksize. """ - if dask_chunking: - if chunks is not None: - raise ValueError( - f"Dask chunking chosen, but chunks already assigned value {chunks}" - ) - lazy_params = {"asarray": asarray, "meta": np.ndarray} - else: - if chunks is None: - # No existing chunks : Make a chunk the shape of the entire input array - # (but we will subdivide it if too big). - chunks = list(data.shape) - - # Adjust chunk size for better dask performance, - # NOTE: but only if no shape dimension is zero, so that we can handle the - # PPDataProxy of "raw" landsea-masked fields, which have a shape of (0, 0). - if all(elem > 0 for elem in data.shape): - # Expand or reduce the basic chunk shape to an optimum size. - chunks = _optimum_chunksize( - chunks, - shape=data.shape, - dtype=data.dtype, - dims_fixed=dims_fixed, - ) - lazy_params = { - "chunks": chunks, - "asarray": asarray, - "meta": np.ndarray, - } + if chunks is None: + # No existing chunks : Make a chunk the shape of the entire input array + # (but we will subdivide it if too big). + chunks = list(data.shape) + + # Adjust chunk size for better dask performance, + # NOTE: but only if no shape dimension is zero, so that we can handle the + # PPDataProxy of "raw" landsea-masked fields, which have a shape of (0, 0). + if all(elem > 0 for elem in data.shape): + # Expand or reduce the basic chunk shape to an optimum size. + chunks = _optimum_chunksize(chunks, shape=data.shape, dtype=data.dtype) + if isinstance(data, ma.core.MaskedConstant): data = ma.masked_array(data.data, mask=data.mask) if not is_lazy_data(data): - data = da.from_array(data, **lazy_params) + data = da.from_array( + data, chunks=chunks, asarray=asarray, meta=np.ndarray + ) return data diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index a8f079e70e..0f748d6d34 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Automatic collation of cubes into higher-dimensional cubes. @@ -22,9 +23,6 @@ multidim_lazy_stack, ) from iris.common import CoordMetadata, CubeMetadata -from iris.common._split_attribute_dicts import ( - _convert_splitattrs_to_pairedkeys_dict as convert_splitattrs_to_pairedkeys_dict, -) import iris.coords import iris.cube import iris.exceptions @@ -393,10 +391,8 @@ def _defn_msgs(self, other_defn): ) ) if self_defn.attributes != other_defn.attributes: - attrs_1, attrs_2 = self_defn.attributes, other_defn.attributes - diff_keys = sorted( - set(attrs_1.globals) ^ set(attrs_2.globals) - | set(attrs_1.locals) ^ set(attrs_2.locals) + diff_keys = set(self_defn.attributes.keys()) ^ set( + other_defn.attributes.keys() ) if diff_keys: msgs.append( @@ -404,16 +400,14 @@ def _defn_msgs(self, other_defn): + ", ".join(repr(key) for key in diff_keys) ) else: - attrs_1, attrs_2 = [ - convert_splitattrs_to_pairedkeys_dict(dic) - for dic in (attrs_1, attrs_2) - ] diff_attrs = [ - repr(key[1]) - for key in attrs_1 - if np.all(attrs_1[key] != attrs_2[key]) + repr(key) + for key in self_defn.attributes + if np.all( + self_defn.attributes[key] != other_defn.attributes[key] + ) ] - diff_attrs = ", ".join(sorted(diff_attrs)) + diff_attrs = ", ".join(diff_attrs) msgs.append( "cube.attributes values differ for keys: {}".format( diff_attrs diff --git a/lib/iris/_representation/__init__.py b/lib/iris/_representation/__init__.py index aec46ec927..f6c7fdf9b4 100644 --- a/lib/iris/_representation/__init__.py +++ b/lib/iris/_representation/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Code to make printouts and other representations (e.g. html) of Iris objects. diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 9239c96949..ea32fc5126 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides text printouts of Iris cubes. diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 1094588fa6..4e0fcfb1ea 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides objects describing cube summaries. """ diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 76dd52de6e..f00c3dd850 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A package providing :class:`iris.cube.Cube` analysis support. diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index bd2ad90a3a..edead3948a 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -1,13 +1,13 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import functools import cf_units import numpy as np import numpy.ma as ma -from scipy.sparse import csr_array from iris._lazy_data import map_complete_blocks from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid @@ -76,7 +76,8 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): self.grid_y, self.meshgrid_x, self.meshgrid_y, - self.weights, + self.weights_info, + self.index_info, ) = _regrid_info def __call__(self, cube): @@ -125,7 +126,8 @@ def __call__(self, cube): self.grid_y, self.meshgrid_x, self.meshgrid_y, - self.weights, + self.weights_info, + self.index_info, ) return _regrid_area_weighted_rectilinear_src_and_grid__perform( cube, _regrid_info, mdtol=self._mdtol @@ -223,17 +225,468 @@ def _get_xy_coords(cube): return x_coord, y_coord -def _get_bounds_in_units(coord, units, dtype): +def _within_bounds(src_bounds, tgt_bounds, orderswap=False): + """ + Determine which target bounds lie within the extremes of the source bounds. + + Args: + + * src_bounds (ndarray): + An (n, 2) shaped array of monotonic contiguous source bounds. + * tgt_bounds (ndarray): + An (n, 2) shaped array corresponding to the target bounds. + + Kwargs: + + * orderswap (bool): + A Boolean indicating whether the target bounds are in descending order + (True). Defaults to False. + + Returns: + Boolean ndarray, indicating whether each target bound is within the + extremes of the source bounds. + + """ + min_bound = np.min(src_bounds) - 1e-14 + max_bound = np.max(src_bounds) + 1e-14 + + # Swap upper-lower is necessary. + if orderswap is True: + upper, lower = tgt_bounds.T + else: + lower, upper = tgt_bounds.T + + return ((lower <= max_bound) * (lower >= min_bound)) * ( + (upper <= max_bound) * (upper >= min_bound) + ) + + +def _cropped_bounds(bounds, lower, upper): """ - Return a copy of coord's bounds in the specified units and dtype. + Return a new bounds array and corresponding slice object (or indices) of + the original data array, resulting from cropping the provided bounds + between the specified lower and upper values. The bounds at the + extremities will be truncated so that they start and end with lower and + upper. + + This function will return an empty NumPy array and slice if there is no + overlap between the region covered by bounds and the region from lower to + upper. + + If lower > upper the resulting bounds may not be contiguous and the + indices object will be a tuple of indices rather than a slice object. + + Args: + + * bounds: + An (n, 2) shaped array of monotonic contiguous bounds. + * lower: + Lower bound at which to crop the bounds array. + * upper: + Upper bound at which to crop the bounds array. + + Returns: + A tuple of the new bounds array and the corresponding slice object or + indices from the zeroth axis of the original array. + + """ + reversed_flag = False + # Ensure order is increasing. + if bounds[0, 0] > bounds[-1, 0]: + # Reverse bounds + bounds = bounds[::-1, ::-1] + reversed_flag = True + + # Number of bounds. + n = bounds.shape[0] + + if lower <= upper: + if lower > bounds[-1, 1] or upper < bounds[0, 0]: + new_bounds = bounds[0:0] + indices = slice(0, 0) + else: + # A single region lower->upper. + if lower < bounds[0, 0]: + # Region extends below bounds so use first lower bound. + lindex = 0 + lower = bounds[0, 0] + else: + # Index of last lower bound less than or equal to lower. + lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] + if upper > bounds[-1, 1]: + # Region extends above bounds so use last upper bound. + uindex = n - 1 + upper = bounds[-1, 1] + else: + # Index of first upper bound greater than or equal to + # upper. + uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] + # Extract the bounds in our region defined by lower->upper. + new_bounds = np.copy(bounds[lindex : (uindex + 1), :]) + # Replace first and last values with specified bounds. + new_bounds[0, 0] = lower + new_bounds[-1, 1] = upper + if reversed_flag: + indices = slice(n - (uindex + 1), n - lindex) + else: + indices = slice(lindex, uindex + 1) + else: + # Two regions [0]->upper, lower->[-1] + # [0]->upper + if upper < bounds[0, 0]: + # Region outside src bounds. + new_bounds_left = bounds[0:0] + indices_left = tuple() + slice_left = slice(0, 0) + else: + if upper > bounds[-1, 1]: + # Whole of bounds. + uindex = n - 1 + upper = bounds[-1, 1] + else: + # Index of first upper bound greater than or equal to upper. + uindex = np.nonzero(bounds[:, 1] >= upper)[0][0] + # Extract the bounds in our region defined by [0]->upper. + new_bounds_left = np.copy(bounds[0 : (uindex + 1), :]) + # Replace last value with specified bound. + new_bounds_left[-1, 1] = upper + if reversed_flag: + indices_left = tuple(range(n - (uindex + 1), n)) + slice_left = slice(n - (uindex + 1), n) + else: + indices_left = tuple(range(0, uindex + 1)) + slice_left = slice(0, uindex + 1) + # lower->[-1] + if lower > bounds[-1, 1]: + # Region is outside src bounds. + new_bounds_right = bounds[0:0] + indices_right = tuple() + slice_right = slice(0, 0) + else: + if lower < bounds[0, 0]: + # Whole of bounds. + lindex = 0 + lower = bounds[0, 0] + else: + # Index of last lower bound less than or equal to lower. + lindex = np.nonzero(bounds[:, 0] <= lower)[0][-1] + # Extract the bounds in our region defined by lower->[-1]. + new_bounds_right = np.copy(bounds[lindex:, :]) + # Replace first value with specified bound. + new_bounds_right[0, 0] = lower + if reversed_flag: + indices_right = tuple(range(0, n - lindex)) + slice_right = slice(0, n - lindex) + else: + indices_right = tuple(range(lindex, n)) + slice_right = slice(lindex, None) + + if reversed_flag: + # Flip everything around. + indices_left, indices_right = indices_right, indices_left + slice_left, slice_right = slice_right, slice_left + + # Combine regions. + new_bounds = np.concatenate((new_bounds_left, new_bounds_right)) + # Use slices if possible, but if we have two regions use indices. + if indices_left and indices_right: + indices = indices_left + indices_right + elif indices_left: + indices = slice_left + elif indices_right: + indices = slice_right + else: + indices = slice(0, 0) + + if reversed_flag: + new_bounds = new_bounds[::-1, ::-1] + + return new_bounds, indices + + +def _cartesian_area(y_bounds, x_bounds): + """ + Return an array of the areas of each cell given two arrays + of cartesian bounds. + + Args: + + * y_bounds: + An (n, 2) shaped NumPy array. + * x_bounds: + An (m, 2) shaped NumPy array. + + Returns: + An (n, m) shaped Numpy array of areas. + + """ + heights = y_bounds[:, 1] - y_bounds[:, 0] + widths = x_bounds[:, 1] - x_bounds[:, 0] + return np.abs(np.outer(heights, widths)) + + +def _spherical_area(y_bounds, x_bounds, radius=1.0): + """ + Return an array of the areas of each cell on a sphere + given two arrays of latitude and longitude bounds in radians. + + Args: + + * y_bounds: + An (n, 2) shaped NumPy array of latitude bounds in radians. + * x_bounds: + An (m, 2) shaped NumPy array of longitude bounds in radians. + * radius: + Radius of the sphere. Default is 1.0. + + Returns: + An (n, m) shaped Numpy array of areas. - Return as contiguous bounds. """ + return iris.analysis.cartography._quadrant_area(y_bounds, x_bounds, radius) + + +def _get_bounds_in_units(coord, units, dtype): + """Return a copy of coord's bounds in the specified units and dtype.""" # The bounds are cast to dtype before conversion to prevent issues when # mixing float32 and float64 types. - return coord.units.convert( - coord.contiguous_bounds().astype(dtype), units - ).astype(dtype) + return coord.units.convert(coord.bounds.astype(dtype), units).astype(dtype) + + +def _weighted_mean_with_mdtol(data, weights, axis=None, mdtol=0): + """ + Return the weighted mean of an array over the specified axis + using the provided weights (if any) and a permitted fraction of + masked data. + + Args: + + * data (array-like): + Data to be averaged. + + * weights (array-like): + An array of the same shape as the data that specifies the contribution + of each corresponding data element to the calculated mean. + + Kwargs: + + * axis (int or tuple of ints): + Axis along which the mean is computed. The default is to compute + the mean of the flattened array. + + * mdtol (float): + Tolerance of missing data. The value returned in each element of the + returned array will be masked if the fraction of masked data exceeds + mdtol. This fraction is weighted by the `weights` array if one is + provided. mdtol=0 means no missing data is tolerated + while mdtol=1 will mean the resulting element will be masked if and + only if all the contributing elements of data are masked. + Defaults to 0. + + Returns: + Numpy array (possibly masked) or scalar. + + """ + if ma.is_masked(data): + res, unmasked_weights_sum = ma.average( + data, weights=weights, axis=axis, returned=True + ) + if mdtol < 1: + weights_sum = weights.sum(axis=axis) + frac_masked = 1 - np.true_divide(unmasked_weights_sum, weights_sum) + mask_pt = frac_masked > mdtol + if np.any(mask_pt) and not isinstance(res, ma.core.MaskedConstant): + if np.isscalar(res): + res = ma.masked + elif ma.isMaskedArray(res): + res.mask |= mask_pt + else: + res = ma.masked_array(res, mask=mask_pt) + else: + res = np.average(data, weights=weights, axis=axis) + return res + + +def _regrid_area_weighted_array( + src_data, x_dim, y_dim, weights_info, index_info, mdtol=0 +): + """ + Regrid the given data from its source grid to a new grid using + an area weighted mean to determine the resulting data values. + + .. note:: + + Elements in the returned array that lie either partially + or entirely outside of the extent of the source grid will + be masked irrespective of the value of mdtol. + + Args: + + * src_data: + An N-dimensional NumPy array. + * x_dim: + The X dimension within `src_data`. + * y_dim: + The Y dimension within `src_data`. + * weights_info: + The area weights information to be used for area-weighted + regridding. + + Kwargs: + + * mdtol: + Tolerance of missing data. The value returned in each element of the + returned array will be masked if the fraction of missing data exceeds + mdtol. This fraction is calculated based on the area of masked cells + within each target cell. mdtol=0 means no missing data is tolerated + while mdtol=1 will mean the resulting element will be masked if and + only if all the overlapping elements of the source grid are masked. + Defaults to 0. + + Returns: + The regridded data as an N-dimensional NumPy array. The lengths + of the X and Y dimensions will now match those of the target + grid. + + """ + ( + blank_weights, + src_area_weights, + new_data_mask_basis, + ) = weights_info + + ( + result_x_extent, + result_y_extent, + square_data_indices_y, + square_data_indices_x, + src_area_datas_required, + ) = index_info + + # Ensure we have x_dim and y_dim. + x_dim_orig = x_dim + y_dim_orig = y_dim + if y_dim is None: + src_data = np.expand_dims(src_data, axis=src_data.ndim) + y_dim = src_data.ndim - 1 + if x_dim is None: + src_data = np.expand_dims(src_data, axis=src_data.ndim) + x_dim = src_data.ndim - 1 + # Move y_dim and x_dim to last dimensions + if not x_dim == src_data.ndim - 1: + src_data = np.moveaxis(src_data, x_dim, -1) + if not y_dim == src_data.ndim - 2: + if x_dim < y_dim: + # note: y_dim was shifted along by one position when + # x_dim was moved to the last dimension + src_data = np.moveaxis(src_data, y_dim - 1, -2) + elif x_dim > y_dim: + src_data = np.moveaxis(src_data, y_dim, -2) + x_dim = src_data.ndim - 1 + y_dim = src_data.ndim - 2 + + # Create empty "pre-averaging" data array that will enable the + # src_data data corresponding to a given target grid point, + # to be stacked per point. + # Note that dtype is not preserved and that the array mask + # allows for regions that do not overlap. + new_shape = list(src_data.shape) + new_shape[x_dim] = result_x_extent + new_shape[y_dim] = result_y_extent + + # Use input cube dtype or convert values to the smallest possible float + # dtype when necessary. + dtype = np.promote_types(src_data.dtype, np.float16) + + # Axes of data over which the weighted mean is calculated. + axis = (y_dim, x_dim) + + # Use previously established indices + + src_area_datas_square = src_data[ + ..., square_data_indices_y, square_data_indices_x + ] + + _, src_area_datas_required = np.broadcast_arrays( + src_area_datas_square, src_area_datas_required + ) + + src_area_datas = np.where( + src_area_datas_required, src_area_datas_square, 0 + ) + + # Flag to indicate whether the original data was a masked array. + src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False + if src_masked: + src_area_masks_square = src_data.mask[ + ..., square_data_indices_y, square_data_indices_x + ] + src_area_masks = np.where( + src_area_datas_required, src_area_masks_square, True + ) + + else: + # If the weights were originally blank, set the weights to all 1 to + # avoid divide by 0 error and set the new data mask for making the + # values 0 + src_area_weights = np.where(blank_weights, 1, src_area_weights) + + new_data_mask = np.broadcast_to(new_data_mask_basis, new_shape) + + # Broadcast the weights array to allow numpy's ma.average + # to be called. + # Assign new shape to raise error on copy. + src_area_weights.shape = src_area_datas.shape[-3:] + # Broadcast weights to match shape of data. + _, src_area_weights = np.broadcast_arrays(src_area_datas, src_area_weights) + + # Mask the data points + if src_masked: + src_area_datas = np.ma.array(src_area_datas, mask=src_area_masks) + + # Calculate weighted mean taking into account missing data. + new_data = _weighted_mean_with_mdtol( + src_area_datas, weights=src_area_weights, axis=axis, mdtol=mdtol + ) + new_data = new_data.reshape(new_shape) + if src_masked: + new_data_mask = new_data.mask + + # Mask the data if originally masked or if the result has masked points + if ma.isMaskedArray(src_data): + new_data = ma.array( + new_data, + mask=new_data_mask, + fill_value=src_data.fill_value, + dtype=dtype, + ) + elif new_data_mask.any(): + new_data = ma.array(new_data, mask=new_data_mask, dtype=dtype) + else: + new_data = new_data.astype(dtype) + + # Restore data to original form + if x_dim_orig is None and y_dim_orig is None: + new_data = np.squeeze(new_data, axis=x_dim) + new_data = np.squeeze(new_data, axis=y_dim) + elif y_dim_orig is None: + new_data = np.squeeze(new_data, axis=y_dim) + new_data = np.moveaxis(new_data, -1, x_dim_orig) + elif x_dim_orig is None: + new_data = np.squeeze(new_data, axis=x_dim) + new_data = np.moveaxis(new_data, -1, y_dim_orig) + elif x_dim_orig < y_dim_orig: + # move the x_dim back first, so that the y_dim will + # then be moved to its original position + new_data = np.moveaxis(new_data, -1, x_dim_orig) + new_data = np.moveaxis(new_data, -1, y_dim_orig) + else: + # move the y_dim back first, so that the x_dim will + # then be moved to its original position + new_data = np.moveaxis(new_data, -2, y_dim_orig) + new_data = np.moveaxis(new_data, -1, x_dim_orig) + + return new_data def _regrid_area_weighted_rectilinear_src_and_grid__prepare( @@ -323,51 +776,290 @@ def _regrid_area_weighted_rectilinear_src_and_grid__prepare( # Create 2d meshgrids as required by _create_cube func. meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points) + # Determine whether target grid bounds are decreasing. This must + # be determined prior to wrap_lons being called. + grid_x_decreasing = grid_x_bounds[-1, 0] < grid_x_bounds[0, 0] + grid_y_decreasing = grid_y_bounds[-1, 0] < grid_y_bounds[0, 0] + # Wrapping of longitudes. if spherical: + base = np.min(src_x_bounds) modulus = x_units.modulus + # Only wrap if necessary to avoid introducing floating + # point errors. + if np.min(grid_x_bounds) < base or np.max(grid_x_bounds) > ( + base + modulus + ): + grid_x_bounds = iris.analysis.cartography.wrap_lons( + grid_x_bounds, base, modulus + ) + + # Determine whether the src_x coord has periodic boundary conditions. + circular = getattr(src_x, "circular", False) + + # Use simple cartesian area function or one that takes into + # account the curved surface if coord system is spherical. + if spherical: + area_func = _spherical_area else: - modulus = None + area_func = _cartesian_area def _calculate_regrid_area_weighted_weights( src_x_bounds, src_y_bounds, grid_x_bounds, grid_y_bounds, - spherical, - modulus=None, + grid_x_decreasing, + grid_y_decreasing, + area_func, + circular=False, ): - """Return weights matrix to be used in regridding.""" - src_shape = (len(src_x_bounds) - 1, len(src_y_bounds) - 1) - tgt_shape = (len(grid_x_bounds) - 1, len(grid_y_bounds) - 1) - - if spherical: - # Changing the dtype here replicates old regridding behaviour. - dtype = np.float64 - src_x_bounds = src_x_bounds.astype(dtype) - src_y_bounds = src_y_bounds.astype(dtype) - grid_x_bounds = grid_x_bounds.astype(dtype) - grid_y_bounds = grid_y_bounds.astype(dtype) - - src_y_bounds = np.sin(src_y_bounds) - grid_y_bounds = np.sin(grid_y_bounds) - x_info = _get_coord_to_coord_matrix_info( - src_x_bounds, grid_x_bounds, circular=spherical, mod=modulus + """ + Compute the area weights used for area-weighted regridding. + Args: + * src_x_bounds: + A NumPy array of bounds along the X axis defining the source grid. + * src_y_bounds: + A NumPy array of bounds along the Y axis defining the source grid. + * grid_x_bounds: + A NumPy array of bounds along the X axis defining the new grid. + * grid_y_bounds: + A NumPy array of bounds along the Y axis defining the new grid. + * grid_x_decreasing: + Boolean indicating whether the X coordinate of the new grid is + in descending order. + * grid_y_decreasing: + Boolean indicating whether the Y coordinate of the new grid is + in descending order. + * area_func: + A function that returns an (p, q) array of weights given an (p, 2) + shaped array of Y bounds and an (q, 2) shaped array of X bounds. + Kwargs: + * circular: + A boolean indicating whether the `src_x_bounds` are periodic. + Default is False. + Returns: + The area weights to be used for area-weighted regridding. + """ + # Determine which grid bounds are within src extent. + y_within_bounds = _within_bounds( + src_y_bounds, grid_y_bounds, grid_y_decreasing + ) + x_within_bounds = _within_bounds( + src_x_bounds, grid_x_bounds, grid_x_decreasing ) - y_info = _get_coord_to_coord_matrix_info(src_y_bounds, grid_y_bounds) - weights_matrix = _combine_xy_weights( - x_info, y_info, src_shape, tgt_shape + + # Cache which src_bounds are within grid bounds + cached_x_bounds = [] + cached_x_indices = [] + max_x_indices = 0 + for x_0, x_1 in grid_x_bounds: + if grid_x_decreasing: + x_0, x_1 = x_1, x_0 + x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1) + cached_x_bounds.append(x_bounds) + cached_x_indices.append(x_indices) + # Keep record of the largest slice + if isinstance(x_indices, slice): + x_indices_size = np.sum(x_indices.stop - x_indices.start) + else: # is tuple of indices + x_indices_size = len(x_indices) + if x_indices_size > max_x_indices: + max_x_indices = x_indices_size + + # Cache which y src_bounds areas and weights are within grid bounds + cached_y_indices = [] + cached_weights = [] + max_y_indices = 0 + for j, (y_0, y_1) in enumerate(grid_y_bounds): + # Reverse lower and upper if dest grid is decreasing. + if grid_y_decreasing: + y_0, y_1 = y_1, y_0 + y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1) + cached_y_indices.append(y_indices) + # Keep record of the largest slice + if isinstance(y_indices, slice): + y_indices_size = np.sum(y_indices.stop - y_indices.start) + else: # is tuple of indices + y_indices_size = len(y_indices) + if y_indices_size > max_y_indices: + max_y_indices = y_indices_size + + weights_i = [] + for i, (x_0, x_1) in enumerate(grid_x_bounds): + # Reverse lower and upper if dest grid is decreasing. + if grid_x_decreasing: + x_0, x_1 = x_1, x_0 + x_bounds = cached_x_bounds[i] + x_indices = cached_x_indices[i] + + # Determine whether element i, j overlaps with src and hence + # an area weight should be computed. + # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in the case + # of wrapped longitudes. However if the src grid is not global + # (i.e. circular) this new cell would include a region outside of + # the extent of the src grid and thus the weight is therefore + # invalid. + outside_extent = x_0 > x_1 and not circular + if ( + outside_extent + or not y_within_bounds[j] + or not x_within_bounds[i] + ): + weights = False + else: + # Calculate weights based on areas of cropped bounds. + if isinstance(x_indices, tuple) and isinstance( + y_indices, tuple + ): + raise RuntimeError( + "Cannot handle split bounds " "in both x and y." + ) + weights = area_func(y_bounds, x_bounds) + weights_i.append(weights) + cached_weights.append(weights_i) + return ( + tuple(cached_x_indices), + tuple(cached_y_indices), + max_x_indices, + max_y_indices, + tuple(cached_weights), ) - return weights_matrix - weights = _calculate_regrid_area_weighted_weights( + ( + cached_x_indices, + cached_y_indices, + max_x_indices, + max_y_indices, + cached_weights, + ) = _calculate_regrid_area_weighted_weights( src_x_bounds, src_y_bounds, grid_x_bounds, grid_y_bounds, - spherical, - modulus, + grid_x_decreasing, + grid_y_decreasing, + area_func, + circular, + ) + + # Go further, calculating the full weights array that we'll need in the + # perform step and the indices we'll need to extract from the cube we're + # regridding (src_data) + + result_y_extent = len(grid_y_bounds) + result_x_extent = len(grid_x_bounds) + + # Total number of points + num_target_pts = result_y_extent * result_x_extent + + # Create empty array to hold weights + src_area_weights = np.zeros( + list((max_y_indices, max_x_indices, num_target_pts)) ) + + # Built for the case where the source cube isn't masked + blank_weights = np.zeros((num_target_pts,)) + new_data_mask_basis = np.full( + (len(cached_y_indices), len(cached_x_indices)), False, dtype=np.bool_ + ) + + # To permit fancy indexing, we need to store our data in an array whose + # first two dimensions represent the indices needed for the target cell. + # Since target cells can require a different number of indices, the size of + # these dimensions should be the maximum of this number. + # This means we need to track whether the data in + # that array is actually required and build those squared-off arrays + # TODO: Consider if a proper mask would be better + src_area_datas_required = np.full( + (max_y_indices, max_x_indices, num_target_pts), False + ) + square_data_indices_y = np.zeros( + (max_y_indices, max_x_indices, num_target_pts), dtype=int + ) + square_data_indices_x = np.zeros( + (max_y_indices, max_x_indices, num_target_pts), dtype=int + ) + + # Stack the weights for each target point and build the indices we'll need + # to extract the src_area_data + target_pt_ji = -1 + for j, y_indices in enumerate(cached_y_indices): + for i, x_indices in enumerate(cached_x_indices): + target_pt_ji += 1 + # Determine whether to mask element i, j based on whether + # there are valid weights. + weights = cached_weights[j][i] + if weights is False: + # Prepare for the src_data not being masked by storing the + # information that will let us fill the data with zeros and + # weights as one. The weighted average result will be the same, + # but we avoid dividing by zero. + blank_weights[target_pt_ji] = True + new_data_mask_basis[j, i] = True + else: + # Establish which indices are actually in y_indices and x_indices + if isinstance(y_indices, slice): + y_indices = list( + range( + y_indices.start, + y_indices.stop, + y_indices.step or 1, + ) + ) + else: + y_indices = list(y_indices) + + if isinstance(x_indices, slice): + x_indices = list( + range( + x_indices.start, + x_indices.stop, + x_indices.step or 1, + ) + ) + else: + x_indices = list(x_indices) + + # For the weights, we just need the lengths of these as we're + # dropping them into a pre-made array + + len_y = len(y_indices) + len_x = len(x_indices) + + src_area_weights[0:len_y, 0:len_x, target_pt_ji] = weights + + # To build the indices for the source cube, we need equal + # shaped array so we pad with 0s and record the need to mask + # them in src_area_datas_required + padded_y_indices = y_indices + [0] * (max_y_indices - len_y) + padded_x_indices = x_indices + [0] * (max_x_indices - len_x) + + square_data_indices_y[..., target_pt_ji] = np.array( + padded_y_indices + )[:, np.newaxis] + square_data_indices_x[..., target_pt_ji] = padded_x_indices + + src_area_datas_required[0:len_y, 0:len_x, target_pt_ji] = True + + # Package up the return data + + weights_info = ( + blank_weights, + src_area_weights, + new_data_mask_basis, + ) + + index_info = ( + result_x_extent, + result_y_extent, + square_data_indices_y, + square_data_indices_x, + src_area_datas_required, + ) + + # Now return it + return ( src_x, src_y, @@ -377,7 +1069,8 @@ def _calculate_regrid_area_weighted_weights( grid_y, meshgrid_x, meshgrid_y, - weights, + weights_info, + index_info, ) @@ -399,18 +1092,17 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( grid_y, meshgrid_x, meshgrid_y, - weights, + weights_info, + index_info, ) = regrid_info - tgt_shape = (len(grid_y.points), len(grid_x.points)) - # Calculate new data array for regridded cube. regrid = functools.partial( - _regrid_along_dims, + _regrid_area_weighted_array, x_dim=src_x_dim, y_dim=src_y_dim, - weights=weights, - tgt_shape=tgt_shape, + weights_info=weights_info, + index_info=index_info, mdtol=mdtol, ) @@ -429,9 +1121,9 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( ) # TODO: investigate if an area weighted callback would be more appropriate. # _regrid_callback = functools.partial( - # _regrid_along_dims, - # weights=weights, - # tgt_shape=tgt_shape, + # _regrid_area_weighted_array, + # weights_info=weights_info, + # index_info=index_info, # mdtol=mdtol, # ) @@ -458,263 +1150,3 @@ def regrid_callback(*args, **kwargs): new_cube = new_cube[tuple(indices)] return new_cube - - -def _get_coord_to_coord_matrix_info( - src_bounds, tgt_bounds, circular=False, mod=None -): - """ - First part of weight calculation. - - Calculate the weights contribution from a single pair of - coordinate bounds. Search for pairs of overlapping source and - target bounds and associate weights with them. - - Note: this assumes that the bounds are monotonic. - """ - # Calculate the number of cells represented by the bounds. - m = len(tgt_bounds) - 1 - n = len(src_bounds) - 1 - - # Ensure bounds are strictly increasing. - src_decreasing = src_bounds[0] > src_bounds[1] - tgt_decreasing = tgt_bounds[0] > tgt_bounds[1] - if src_decreasing: - src_bounds = src_bounds[::-1] - if tgt_decreasing: - tgt_bounds = tgt_bounds[::-1] - - if circular: - # For circular coordinates (e.g. longitude) account for source and - # target bounds which span different ranges (e.g. (-180, 180) vs - # (0, 360)). We ensure that all possible overlaps between source and - # target bounds are accounted for by including two copies of the - # source bounds, shifted appropriately by the modulus. - adjust = (tgt_bounds.min() - src_bounds.min()) // mod - src_bounds = src_bounds + (mod * adjust) - src_bounds = np.append(src_bounds, src_bounds + mod) - nn = (2 * n) + 1 - else: - nn = n - - # Before iterating through pairs of overlapping bounds, find an - # appropriate place to start iteration. Note that this assumes that - # the bounds are increasing. - i = max(np.searchsorted(tgt_bounds, src_bounds[0], side="right") - 1, 0) - j = max(np.searchsorted(src_bounds, tgt_bounds[0], side="right") - 1, 0) - - data = [] - rows = [] - cols = [] - - # Iterate through overlapping cells in the source and target bounds. - # For the sake of calculations, we keep track of the minimum value of - # the intersection of each cell. - floor = max(tgt_bounds[i], src_bounds[j]) - while i < m and j < nn: - # Record the current indices. - rows.append(i) - cols.append(j) - - # Determine the next indices and floor. - if tgt_bounds[i + 1] < src_bounds[j + 1]: - next_floor = tgt_bounds[i + 1] - next_i = i + 1 - elif tgt_bounds[i + 1] == src_bounds[j + 1]: - next_floor = tgt_bounds[i + 1] - next_i = i + 1 - j += 1 - else: - next_floor = src_bounds[j + 1] - next_i = i - j += 1 - - # Calculate and record the weight for the current overlapping cells. - weight = (next_floor - floor) / (tgt_bounds[i + 1] - tgt_bounds[i]) - data.append(weight) - - # Update indices and floor - i = next_i - floor = next_floor - - data = np.array(data) - rows = np.array(rows) - cols = np.array(cols) - - if circular: - # Remove out of bounds points. When the source bounds were duplicated - # an "out of bounds" cell was introduced between the two copies. - oob = np.where(cols == n) - data = np.delete(data, oob) - rows = np.delete(rows, oob) - cols = np.delete(cols, oob) - - # Wrap indices. Since we duplicated the source bounds there may be - # indices which are greater than n which will need to be corrected. - cols = cols % (n + 1) - - # Correct indices which were flipped due to reversing decreasing bounds. - if src_decreasing: - cols = n - cols - 1 - if tgt_decreasing: - rows = m - rows - 1 - - return data, rows, cols - - -def _combine_xy_weights(x_info, y_info, src_shape, tgt_shape): - """ - Second part of weight calculation. - - Combine the weights contributions from both pairs of coordinate - bounds (i.e. the source/target pairs for the x and y coords). - Return the result as a sparse array. - """ - x_src, y_src = src_shape - x_tgt, y_tgt = tgt_shape - src_size = x_src * y_src - tgt_size = x_tgt * y_tgt - x_weight, x_rows, x_cols = x_info - y_weight, y_rows, y_cols = y_info - - # Regridding weights will be applied to a flattened (y, x) array. - # Weights and indices are constructed in a way to account for this. - # Weights of the combined matrix are constructed by broadcasting - # the x_weights and y_weights. The resulting array contains every - # combination of x weight and y weight. Then we flatten this array. - xy_weight = y_weight[:, np.newaxis] * x_weight[np.newaxis, :] - xy_weight = xy_weight.flatten() - - # Given the x index and y index associated with a weight, calculate - # the equivalent index in the flattened (y, x) array. - xy_rows = (y_rows[:, np.newaxis] * x_tgt) + x_rows[np.newaxis, :] - xy_rows = xy_rows.flatten() - xy_cols = (y_cols[:, np.newaxis] * x_src) + x_cols[np.newaxis, :] - xy_cols = xy_cols.flatten() - - # Create a sparse matrix for efficient weight application. - combined_weights = csr_array( - (xy_weight, (xy_rows, xy_cols)), shape=(tgt_size, src_size) - ) - return combined_weights - - -def _standard_regrid_no_masks(data, weights, tgt_shape): - """ - Regrid unmasked data to an unmasked result. - - Assumes that the first two dimensions are the x-y grid. - """ - # Reshape data to a form suitable for matrix multiplication. - extra_shape = data.shape[:-2] - data = data.reshape(-1, np.prod(data.shape[-2:])) - - # Apply regridding weights. - # The order of matrix multiplication is chosen to be consistent - # with existing regridding code. - result = data @ weights.T - - # Reshape result to a suitable form. - result = result.reshape(*(extra_shape + tgt_shape)) - return result - - -def _standard_regrid(data, weights, tgt_shape, mdtol): - """ - Regrid data and handle masks. - - Assumes that the first two dimensions are the x-y grid. - """ - # This is set to keep consistent with legacy behaviour. - # This is likely to become switchable in the future, see: - # https://github.com/SciTools/iris/issues/5461 - oob_invalid = True - - data_shape = data.shape - if ma.is_masked(data): - unmasked = ~ma.getmaskarray(data) - # Calculate contribution from unmasked sources to each target point. - weight_sums = _standard_regrid_no_masks(unmasked, weights, tgt_shape) - else: - # If there are no masked points then all contributions will be - # from unmasked sources, so we can skip this calculation - weight_sums = np.ones(data_shape[:-2] + tgt_shape) - mdtol = max(mdtol, 1e-8) - tgt_mask = weight_sums > 1 - mdtol - # If out of bounds sources are treated the same as masked sources this - # will already have been calculated above, so we can skip this calculation. - if oob_invalid or not ma.is_masked(data): - # Calculate the proportion of each target cell which is covered by the - # source. For the sake of efficiency, this is calculated for a 2D slice - # which is then broadcast. - inbound_sums = _standard_regrid_no_masks( - np.ones(data_shape[-2:]), weights, tgt_shape - ) - if oob_invalid: - # Legacy behaviour, if the full area of a target cell does not lie - # in bounds it will be masked. - oob_mask = inbound_sums > 1 - 1e-8 - else: - # Note: this code is currently inaccessible. This code exists to lay - # the groundwork for future work which will make out of bounds - # behaviour switchable. - oob_mask = inbound_sums > 1 - mdtol - # Broadcast the mask to the shape of the full array - oob_slice = ((np.newaxis,) * len(data.shape[:-2])) + np.s_[:, :] - tgt_mask = tgt_mask * oob_mask[oob_slice] - - # Calculate normalisations. - normalisations = tgt_mask.astype(weight_sums.dtype) - normalisations[tgt_mask] /= weight_sums[tgt_mask] - - # Mask points in the result. - if ma.isMaskedArray(data): - # If the source is masked, the result should have a similar mask. - fill_value = data.fill_value - normalisations = ma.array( - normalisations, mask=~tgt_mask, fill_value=fill_value - ) - elif np.any(~tgt_mask): - normalisations = ma.array(normalisations, mask=~tgt_mask) - - # Use input cube dtype or convert values to the smallest possible float - # dtype when necessary. - dtype = np.promote_types(data.dtype, np.float16) - - # Perform regridding on unmasked data. - result = _standard_regrid_no_masks( - ma.filled(data, 0.0), weights, tgt_shape - ) - # Apply normalisations and masks to the regridded data. - result = result * normalisations - result = result.astype(dtype) - return result - - -def _regrid_along_dims(data, x_dim, y_dim, weights, tgt_shape, mdtol): - """Regrid data, handling masks and dimensions.""" - # Handle scalar coordinates. - # Note: scalar source coordinates are only handled when their - # corresponding target coordinate is also scalar. - num_scalar_dims = 0 - if x_dim is None: - num_scalar_dims += 1 - data = np.expand_dims(data, -1) - x_dim = -1 - if y_dim is None: - num_scalar_dims += 1 - data = np.expand_dims(data, -1) - y_dim = -1 - if num_scalar_dims == 2: - y_dim = -2 - - # Standard regridding expects the last two dimensions to belong - # to the y and x coordinate and will output as such. - # Axes are moved to account for an arbitrary dimension ordering. - data = np.moveaxis(data, [y_dim, x_dim], [-2, -1]) - result = _standard_regrid(data, weights, tgt_shape, mdtol) - result = np.moveaxis(result, [-2, -1], [y_dim, x_dim]) - - for _ in range(num_scalar_dims): - result = np.squeeze(result, axis=-1) - return result diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 86a0c38086..4cb449ae51 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Code to implement vector rotation by angles, and inferring gridcell angles from coordinate points and bounds. diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 091d29d7e2..34dcae3026 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """A collection of helpers for interpolation.""" from collections import namedtuple diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 113c21e6e3..4592a0ede7 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import copy import functools @@ -19,7 +20,6 @@ snapshot_grid, ) from iris.analysis._scipy_interpolate import _RegularGridInterpolator -from iris.exceptions import IrisImpossibleUpdateWarning from iris.util import _meshgrid, guess_coord_axis @@ -1136,6 +1136,6 @@ def regrid_reference_surface( "Cannot update aux_factory {!r} because of dropped" " coordinates.".format(factory.name()) ) - warnings.warn(msg, category=IrisImpossibleUpdateWarning) + warnings.warn(msg) return result diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index f312aa02a0..75b7d86406 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Calculus operations on :class:`iris.cube.Cube` instances. @@ -23,7 +24,6 @@ import iris.analysis.maths import iris.coord_systems import iris.coords -from iris.exceptions import IrisUserWarning from iris.util import delta __all__ = ["cube_delta", "curl", "differentiate"] @@ -85,10 +85,7 @@ def _construct_midpoint_coord(coord, circular=None): "Construction coordinate midpoints for the '{}' coordinate, " "though it has the attribute 'circular'={}." ) - warnings.warn( - msg.format(circular, coord.circular, coord.name()), - category=IrisUserWarning, - ) + warnings.warn(msg.format(circular, coord.circular, coord.name())) if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index a760f5ab50..0d17f0b38a 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Various utilities and numeric transformations relevant to cartography. @@ -400,25 +401,16 @@ def area_weights(cube, normalize=False): cs = cube.coord_system("CoordSystem") if isinstance(cs, iris.coord_systems.GeogCS): if cs.inverse_flattening != 0.0: - warnings.warn( - "Assuming spherical earth from ellipsoid.", - category=iris.exceptions.IrisDefaultingWarning, - ) + warnings.warn("Assuming spherical earth from ellipsoid.") radius_of_earth = cs.semi_major_axis elif isinstance(cs, iris.coord_systems.RotatedGeogCS) and ( cs.ellipsoid is not None ): if cs.ellipsoid.inverse_flattening != 0.0: - warnings.warn( - "Assuming spherical earth from ellipsoid.", - category=iris.exceptions.IrisDefaultingWarning, - ) + warnings.warn("Assuming spherical earth from ellipsoid.") radius_of_earth = cs.ellipsoid.semi_major_axis else: - warnings.warn( - "Using DEFAULT_SPHERICAL_EARTH_RADIUS.", - category=iris.exceptions.IrisDefaultingWarning, - ) + warnings.warn("Using DEFAULT_SPHERICAL_EARTH_RADIUS.") radius_of_earth = DEFAULT_SPHERICAL_EARTH_RADIUS # Get the lon and lat coords and axes @@ -559,7 +551,7 @@ def cosine_latitude_weights(cube): warnings.warn( "Out of range latitude values will be " "clipped to the valid range.", - category=iris.exceptions.IrisDefaultingWarning, + UserWarning, ) points = lat.points l_weights = np.cos(points).clip(0.0, 1.0) @@ -673,8 +665,7 @@ def project(cube, target_proj, nx=None, ny=None): # Assume WGS84 latlon if unspecified warnings.warn( "Coordinate system of latitude and longitude " - "coordinates is not specified. Assuming WGS84 Geodetic.", - category=iris.exceptions.IrisDefaultingWarning, + "coordinates is not specified. Assuming WGS84 Geodetic." ) orig_cs = iris.coord_systems.GeogCS( semi_major_axis=6378137.0, inverse_flattening=298.257223563 @@ -866,8 +857,7 @@ def project(cube, target_proj, nx=None, ny=None): lat_coord.name(), lon_coord.name(), [coord.name() for coord in discarded_coords], - ), - category=iris.exceptions.IrisIgnoringWarning, + ) ) # TODO handle derived coords/aux_factories diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index d7ed7f8840..b246b518d4 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Various utilities related to geometric operations. @@ -73,7 +74,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's x dimension at the " "lower end.", - category=iris.exceptions.IrisGeometryExceedWarning, + UserWarning, ) x_min_ix = 0 if x_ascending else x_coord.points.size - 1 @@ -83,7 +84,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's x dimension at the " "upper end.", - category=iris.exceptions.IrisGeometryExceedWarning, + UserWarning, ) x_max_ix = x_coord.points.size - 1 if x_ascending else 0 @@ -93,7 +94,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's y dimension at the " "lower end.", - category=iris.exceptions.IrisGeometryExceedWarning, + UserWarning, ) y_min_ix = 0 if y_ascending else y_coord.points.size - 1 @@ -103,7 +104,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's y dimension at the " "upper end.", - category=iris.exceptions.IrisGeometryExceedWarning, + UserWarning, ) y_max_ix = y_coord.points.size - 1 if y_ascending else 0 diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index a24203ba2a..b77c6cd80f 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Basic mathematical and statistical operations. @@ -987,8 +988,7 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): if other.has_bounds(): warnings.warn( "Using {!r} with a bounded coordinate is not well " - "defined; ignoring bounds.".format(operation_name), - category=iris.exceptions.IrisIgnoringBoundsWarning, + "defined; ignoring bounds.".format(operation_name) ) points = other.points diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 121d862adb..711e3c5bfb 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Statistical operations between cubes. diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 99c8add123..84ce89ab6f 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory. @@ -733,7 +734,7 @@ class UnstructuredNearestNeigbourRegridder: """ - # TODO: cache the necessary bits of the operation so reuse can actually + # TODO: cache the necessary bits of the operation so re-use can actually # be more efficient. def __init__(self, src_cube, target_grid_cube): """ @@ -872,7 +873,7 @@ def __init__(self, src_cube, target_grid_cube): def __call__(self, src_cube): # Check the source cube X and Y coords match the original. # Note: for now, this is sufficient to ensure a valid trajectory - # interpolation, but if in future we save and reuse the cache context + # interpolation, but if in future we save + re-use the cache context # for the 'interpolate' call, we may need more checks here. # Check the given cube against the original. diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index 61855f1188..f49de62b3f 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Definitions of derived coordinates. @@ -20,7 +21,6 @@ metadata_manager_factory, ) import iris.coords -from iris.exceptions import IrisIgnoringBoundsWarning class AuxCoordFactory(CFVariableMixin, metaclass=ABCMeta): @@ -441,9 +441,7 @@ def _check_dependencies(pressure_at_top, sigma, surface_air_pressure): f"Coordinate '{coord.name()}' has bounds. These will " "be disregarded" ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Check units if sigma.units.is_unknown(): @@ -524,8 +522,7 @@ def make_coord(self, coord_dims_func): if pressure_at_top.shape[-1:] not in [(), (1,)]: warnings.warn( "Pressure at top coordinate has bounds. These are being " - "disregarded", - category=IrisIgnoringBoundsWarning, + "disregarded" ) pressure_at_top_pts = nd_points_by_key["pressure_at_top"] bds_shape = list(pressure_at_top_pts.shape) + [1] @@ -533,8 +530,7 @@ def make_coord(self, coord_dims_func): if surface_air_pressure.shape[-1:] not in [(), (1,)]: warnings.warn( "Surface pressure coordinate has bounds. These are being " - "disregarded", - category=IrisIgnoringBoundsWarning, + "disregarded" ) surface_air_pressure_pts = nd_points_by_key[ "surface_air_pressure" @@ -599,9 +595,7 @@ def __init__(self, delta=None, sigma=None, orography=None): "Orography coordinate {!r} has bounds." " These will be disregarded.".format(orography.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) self.delta = delta self.sigma = sigma @@ -690,7 +684,7 @@ def make_coord(self, coord_dims_func): warnings.warn( "Orography coordinate has bounds. " "These are being disregarded.", - category=IrisIgnoringBoundsWarning, + UserWarning, stacklevel=2, ) orography_pts = nd_points_by_key["orography"] @@ -745,9 +739,7 @@ def update(self, old_coord, new_coord=None): "Orography coordinate {!r} has bounds." " These will be disregarded.".format(new_coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) self.orography = new_coord @@ -814,9 +806,7 @@ def _check_dependencies(delta, sigma, surface_air_pressure): "Surface pressure coordinate {!r} has bounds. These will" " be disregarded.".format(surface_air_pressure.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Check units. if sigma is not None and sigma.units.is_unknown(): @@ -908,8 +898,7 @@ def make_coord(self, coord_dims_func): if surface_air_pressure.shape[-1:] not in [(), (1,)]: warnings.warn( "Surface pressure coordinate has bounds. " - "These are being disregarded.", - category=IrisIgnoringBoundsWarning, + "These are being disregarded." ) surface_air_pressure_pts = nd_points_by_key[ "surface_air_pressure" @@ -1023,9 +1012,7 @@ def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) for coord, term in ((depth_c, "depth_c"), (nsigma, "nsigma")): if coord is not None and coord.shape != (1,): @@ -1200,9 +1187,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1283,9 +1268,7 @@ def _check_dependencies(sigma, eta, depth): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Check units. if sigma is not None and sigma.units.is_unknown(): @@ -1366,9 +1349,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1463,9 +1444,7 @@ def _check_dependencies(s, c, eta, depth, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) if depth_c is not None and depth_c.shape != (1,): msg = ( @@ -1564,9 +1543,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1660,9 +1637,7 @@ def _check_dependencies(s, eta, depth, a, b, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) coords = ((a, "a"), (b, "b"), (depth_c, "depth_c")) for coord, term in coords: @@ -1765,9 +1740,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) @@ -1866,9 +1839,7 @@ def _check_dependencies(s, c, eta, depth, depth_c): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(term, coord.name()) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) if depth_c is not None and depth_c.shape != (1,): msg = ( @@ -1967,9 +1938,7 @@ def make_coord(self, coord_dims_func): "The {} coordinate {!r} has bounds. " "These are being disregarded.".format(key, name) ) - warnings.warn( - msg, category=IrisIgnoringBoundsWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) # Swap bounds with points. bds_shape = list(nd_points_by_key[key].shape) + [1] bounds = nd_points_by_key[key].reshape(bds_shape) diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py index 8526c549c3..d8e8ba80ef 100644 --- a/lib/iris/common/__init__.py +++ b/lib/iris/common/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A package for provisioning common Iris infrastructure. diff --git a/lib/iris/common/_split_attribute_dicts.py b/lib/iris/common/_split_attribute_dicts.py deleted file mode 100644 index 3927974053..0000000000 --- a/lib/iris/common/_split_attribute_dicts.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -""" -Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute -dictionaries. - -The idea here is to convert a split-dictionary into a "plain" one for calculations, -whose keys are all pairs of the form ('global', ) or ('local', ). -And to convert back again after the operation, if the result is a dictionary. - -For "strict" operations this clearly does all that is needed. For lenient ones, -we _might_ want for local+global attributes of the same name to interact. -However, on careful consideration, it seems that this is not actually desirable for -any of the common-metadata operations. -So, we simply treat "global" and "local" attributes of the same name as entirely -independent. Which happily is also the easiest to code, and to explain. -""" -from collections.abc import Mapping, Sequence -from functools import wraps - - -def _convert_splitattrs_to_pairedkeys_dict(dic): - """ - Convert a split-attributes dictionary to a "normal" dict. - - Transform a :class:`~iris.cube.CubeAttributesDict` "split" attributes dictionary - into a 'normal' :class:`dict`, with paired keys of the form ('global', name) or - ('local', name). - - If the input is *not* a split-attrs dict, it is converted to one before - transforming it. This will assign its keys to global/local depending on a standard - set of choices (see :class:`~iris.cube.CubeAttributesDict`). - """ - from iris.cube import CubeAttrsDict - - # Convert input to CubeAttrsDict - if not hasattr(dic, "globals") or not hasattr(dic, "locals"): - dic = CubeAttrsDict(dic) - - def _global_then_local_items(dic): - # Routine to produce global, then local 'items' in order, and with all keys - # "labelled" as local or global type, to ensure they are all unique. - for key, value in dic.globals.items(): - yield ("global", key), value - for key, value in dic.locals.items(): - yield ("local", key), value - - return dict(_global_then_local_items(dic)) - - -def _convert_pairedkeys_dict_to_splitattrs(dic): - """ - Convert an input with global/local paired keys back into a split-attrs dict. - - For now, this is always and only a :class:`iris.cube.CubeAttrsDict`. - """ - from iris.cube import CubeAttrsDict - - result = CubeAttrsDict() - for key, value in dic.items(): - keytype, keyname = key - if keytype == "global": - result.globals[keyname] = value - else: - assert keytype == "local" - result.locals[keyname] = value - return result - - -def adjust_for_split_attribute_dictionaries(operation): - """ - Decorator to make a function of attribute-dictionaries work with split attributes. - - The wrapped function of attribute-dictionaries is currently always one of "equals", - "combine" or "difference", with signatures like : - equals(left: dict, right: dict) -> bool - combine(left: dict, right: dict) -> dict - difference(left: dict, right: dict) -> None | (dict, dict) - - The results of the wrapped operation are either : - * for "equals" (or "__eq__") : a boolean - * for "combine" : a (converted) attributes-dictionary - * for "difference" : a list of (None or "pair"), where a pair contains two - dictionaries - - Before calling the wrapped operation, its inputs (left, right) are modified by - converting any "split" dictionaries to a form where the keys are pairs - of the form ("global", name) or ("local", name). - - After calling the wrapped operation, for "combine" or "difference", the result can - contain a dictionary or dictionaries. These are then transformed back from the - 'converted' form to split-attribute dictionaries, before returning. - - "Split" dictionaries are all of class :class:`~iris.cube.CubeAttrsDict`, since - the only usage of 'split' attribute dictionaries is in Cubes (i.e. they are not - used for cube components). - """ - - @wraps(operation) - def _inner_function(*args, **kwargs): - # Convert all inputs into 'pairedkeys' type dicts - args = [_convert_splitattrs_to_pairedkeys_dict(arg) for arg in args] - - result = operation(*args, **kwargs) - - # Convert known specific cases of 'pairedkeys' dicts in the result, and convert - # those back into split-attribute dictionaries. - if isinstance(result, Mapping): - # Fix a result which is a single dictionary -- for "combine" - result = _convert_pairedkeys_dict_to_splitattrs(result) - elif isinstance(result, Sequence) and len(result) == 2: - # Fix a result which is a pair of dictionaries -- for "difference" - left, right = result - left, right = ( - _convert_pairedkeys_dict_to_splitattrs(left), - _convert_pairedkeys_dict_to_splitattrs(right), - ) - result = result.__class__([left, right]) - # ELSE: leave other types of result unchanged. E.G. None, bool - - return result - - return _inner_function diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index 43dc09d5db..7c530087af 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides the infrastructure to support lenient client/service behaviour. diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index f88a2e57b5..7def79f51e 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides the infrastructure to support the common metadata API. @@ -20,7 +21,6 @@ from xxhash import xxh64_hexdigest from ..config import get_logger -from ._split_attribute_dicts import adjust_for_split_attribute_dictionaries from .lenient import _LENIENT from .lenient import _lenient_service as lenient_service from .lenient import _qualname as qualname @@ -242,11 +242,7 @@ def __str__(self): field_strings = [] for field in self._fields: value = getattr(self, field) - if ( - value is None - or isinstance(value, (str, Mapping)) - and not value - ): + if value is None or isinstance(value, (str, dict)) and not value: continue field_strings.append(f"{field}={value}") @@ -1255,46 +1251,6 @@ def _check(item): return result - # - # Override each of the attribute-dict operations in BaseMetadata, to enable - # them to deal with split-attribute dictionaries correctly. - # There are 6 of these, for (equals/combine/difference) * (lenient/strict). - # Each is overridden with a *wrapped* version of the parent method, using the - # "@adjust_for_split_attribute_dictionaries" decorator, which converts any - # split-attribute dictionaries in the inputs to ordinary dicts, and likewise - # re-converts any dictionaries in the return value. - # - - @staticmethod - @adjust_for_split_attribute_dictionaries - def _combine_lenient_attributes(left, right): - return BaseMetadata._combine_lenient_attributes(left, right) - - @staticmethod - @adjust_for_split_attribute_dictionaries - def _combine_strict_attributes(left, right): - return BaseMetadata._combine_strict_attributes(left, right) - - @staticmethod - @adjust_for_split_attribute_dictionaries - def _compare_lenient_attributes(left, right): - return BaseMetadata._compare_lenient_attributes(left, right) - - @staticmethod - @adjust_for_split_attribute_dictionaries - def _compare_strict_attributes(left, right): - return BaseMetadata._compare_strict_attributes(left, right) - - @staticmethod - @adjust_for_split_attribute_dictionaries - def _difference_lenient_attributes(left, right): - return BaseMetadata._difference_lenient_attributes(left, right) - - @staticmethod - @adjust_for_split_attribute_dictionaries - def _difference_strict_attributes(left, right): - return BaseMetadata._difference_strict_attributes(left, right) - class DimCoordMetadata(CoordMetadata): """ diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index a1b1e4647b..4c19dd756b 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides common metadata mixin behaviour. @@ -16,7 +17,7 @@ from .metadata import BaseMetadata -__all__ = ["CFVariableMixin", "LimitedAttributeDict"] +__all__ = ["CFVariableMixin"] def _get_valid_standard_name(name): @@ -52,29 +53,7 @@ def _get_valid_standard_name(name): class LimitedAttributeDict(dict): - """ - A specialised 'dict' subclass, which forbids (errors) certain attribute names. - - Used for the attribute dictionaries of all Iris data objects (that is, - :class:`CFVariableMixin` and its subclasses). - - The "excluded" attributes are those which either :mod:`netCDF4` or Iris intpret and - control with special meaning, which therefore should *not* be defined as custom - 'user' attributes on Iris data objects such as cubes. - - For example : "coordinates", "grid_mapping", "scale_factor". - - The 'forbidden' attributes are those listed in - :data:`iris.common.mixin.LimitedAttributeDict.CF_ATTRS_FORBIDDEN` . - - All the forbidden attributes are amongst those listed in - `Appendix A of the CF Conventions: `_ - -- however, not *all* of them, since not all are interpreted by Iris. - - """ - - #: Attributes with special CF meaning, forbidden in Iris attribute dictionaries. - CF_ATTRS_FORBIDDEN = ( + _forbidden_keys = ( "standard_name", "long_name", "units", @@ -99,7 +78,7 @@ def __init__(self, *args, **kwargs): dict.__init__(self, *args, **kwargs) # Check validity of keys for key in self.keys(): - if key in self.CF_ATTRS_FORBIDDEN: + if key in self._forbidden_keys: raise ValueError(f"{key!r} is not a permitted attribute") def __eq__(self, other): @@ -120,12 +99,11 @@ def __ne__(self, other): return not self == other def __setitem__(self, key, value): - if key in self.CF_ATTRS_FORBIDDEN: + if key in self._forbidden_keys: raise ValueError(f"{key!r} is not a permitted attribute") dict.__setitem__(self, key, value) def update(self, other, **kwargs): - """Standard ``dict.update()`` operation.""" # Gather incoming keys keys = [] if hasattr(other, "keys"): @@ -137,7 +115,7 @@ def update(self, other, **kwargs): # Check validity of keys for key in keys: - if key in self.CF_ATTRS_FORBIDDEN: + if key in self._forbidden_keys: raise ValueError(f"{key!r} is not a permitted attribute") dict.update(self, other, **kwargs) diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 83ca630353..8d5d57d4a4 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides the infrastructure to support the analysis, identification and combination of metadata common between two :class:`~iris.cube.Cube` diff --git a/lib/iris/config.py b/lib/iris/config.py index 22fb93a06a..79d141e53f 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides access to Iris-specific configuration values. @@ -27,7 +28,6 @@ The [optional] name of the logger to notify when first imported. ---------- - """ import configparser @@ -36,44 +36,46 @@ import os.path import warnings -import iris.exceptions - def get_logger( name, datefmt=None, fmt=None, level=None, propagate=None, handler=True ): """ - Create a custom class for logging. - Create a :class:`logging.Logger` with a :class:`logging.StreamHandler` and custom :class:`logging.Formatter`. - Parameters - ---------- - name + Args: + + * name: The name of the logger. Typically this is the module filename that owns the logger. - datefmt: optional + + Kwargs: + + * datefmt: The date format string of the :class:`logging.Formatter`. Defaults to ``%d-%m-%Y %H:%M:%S``. - fmt: optional + + * fmt: The additional format string of the :class:`logging.Formatter`. This is appended to the default format string ``%(asctime)s %(name)s %(levelname)s - %(message)s``. - level: optional + + * level: The threshold level of the logger. Defaults to ``INFO``. - propagate: optional + + * propagate: Sets the ``propagate`` attribute of the :class:`logging.Logger`, which determines whether events logged to this logger will be passed to the handlers of higher level loggers. Defaults to ``False``. - handler: optional + + * handler: Create and attach a :class:`logging.StreamHandler` to the logger. Defaults to ``True``. - Returns - ------- - :class:`logging.Logger`. + Returns: + A :class:`logging.Logger`. """ if level is None: @@ -115,8 +117,6 @@ def get_logger( # Returns simple string options def get_option(section, option, default=None): """ - Return the option value for the given section. - Returns the option value for the given section, or the default value if the section/option is not present. @@ -130,8 +130,6 @@ def get_option(section, option, default=None): # Returns directory path options def get_dir_option(section, option, default=None): """ - Return the directory path from the given option and section. - Returns the directory path from the given option and section, or returns the given default value if the section/option is not present or does not represent a valid directory. @@ -147,10 +145,7 @@ def get_dir_option(section, option, default=None): "Ignoring config item {!r}:{!r} (section:option) as {!r}" " is not a valid directory path." ) - warnings.warn( - msg.format(section, option, c_path), - category=iris.exceptions.IrisIgnoringWarning, - ) + warnings.warn(msg.format(section, option, c_path)) return path @@ -197,19 +192,20 @@ def __init__(self, conventions_override=None): """ Set up NetCDF processing options for Iris. - Parameters - ---------- - conventions_override : bool, optional + Currently accepted kwargs: + + * conventions_override (bool): Define whether the CF Conventions version (e.g. `CF-1.6`) set when saving a cube to a NetCDF file should be defined by - Iris (the default) or the cube being saved. If `False` - (the default), specifies that Iris should set the + Iris (the default) or the cube being saved. + + If `False` (the default), specifies that Iris should set the CF Conventions version when saving cubes as NetCDF files. If `True`, specifies that the cubes being saved to NetCDF should set the CF Conventions version for the saved NetCDF files. - Examples - -------- + Example usages: + * Specify, for the lifetime of the session, that we want all cubes written to NetCDF to define their own CF Conventions versions:: @@ -255,10 +251,7 @@ def __setattr__(self, name, value): "Attempting to set invalid value {!r} for " "attribute {!r}. Defaulting to {!r}." ) - warnings.warn( - wmsg.format(value, name, good_value), - category=iris.exceptions.IrisDefaultingWarning, - ) + warnings.warn(wmsg.format(value, name, good_value)) value = good_value self.__dict__[name] = value @@ -276,7 +269,6 @@ def _defaults_dict(self): def context(self, **kwargs): """ Allow temporary modification of the options via a context manager. - Accepted kwargs are the same as can be supplied to the Option. """ diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index 87103bf6f1..698b4828f1 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Cube functions for coordinate categorisation. @@ -270,33 +271,20 @@ def _validate_seasons(seasons): return -def _month_year_adjusts(seasons, use_year_at_season_start=False): - """ - Compute the year adjustments required for each month. +def _month_year_adjusts(seasons): + """Compute the year adjustments required for each month. - These adjustments ensure that no season spans two years by assigning months - to the **next** year (use_year_at_season_start is False) or the - **previous** year (use_year_at_season_start is True). E.g. Winter - djf: - either assign Dec to the next year, or Jan and Feb to the previous year. + These determine whether the month belongs to a season in the same + year or is in the start of a season that counts towards the next + year. """ - # 1 'slot' for each month, with an extra leading 'slot' because months - # are 1-indexed - January is 1, therefore corresponding to the 2nd - # array index. - month_year_adjusts = np.zeros(13, dtype=int) - + month_year_adjusts = [None, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] for season in seasons: - months = np.array(_months_in_season(season)) - if use_year_at_season_start: - months_to_shift = months < months[0] - year_shift = -1 - else: - # Sending forwards. - months_to_shift = months > months[-1] - year_shift = 1 - indices_to_shift = months[np.flatnonzero(months_to_shift)] - month_year_adjusts[indices_to_shift] = year_shift - + months = _months_in_season(season) + for month in months: + if month > months[-1]: + month_year_adjusts[month] = 1 return month_year_adjusts @@ -395,40 +383,34 @@ def _season_number(coord, value): def add_season_year( - cube, - coord, - name="season_year", - seasons=("djf", "mam", "jja", "son"), - use_year_at_season_start=False, + cube, coord, name="season_year", seasons=("djf", "mam", "jja", "son") ): """ - Add a categorical year-of-season coordinate, with user specified seasons. - - Parameters - ---------- - cube : :class:`iris.cube.Cube` - The cube containing `coord`. The new coord will be added into it. - coord : :class:`iris.coords.Coord` or str - Coordinate in `cube`, or its name, representing time. - name : str, default="season_year" - Name of the created coordinate. - seasons : tuple of str, default=("djf", "mam", "jja", "son") + Add a categorical year-of-season coordinate, with user specified + seasons. + + Args: + + * cube (:class:`iris.cube.Cube`): + The cube containing 'coord'. The new coord will be added into + it. + * coord (:class:`iris.coords.Coord` or string): + Coordinate in 'cube', or its name, representing time. + + Kwargs: + + * name (string): + Name of the created coordinate. Defaults to "season_year". + * seasons (:class:`list` of strings): List of seasons defined by month abbreviations. Each month must appear once and only once. Defaults to standard meteorological - seasons (``djf``, ``mam``, ``jja``, ``son``). - use_year_at_season_start: bool, default=False - Seasons spanning the year boundary (e.g. Winter ``djf``) will belong - fully to the following year by default (e.g. the year of Jan and Feb). - Set to ``True`` for spanning seasons to belong to the preceding - year (e.g. the year of Dec) instead. + seasons ('djf', 'mam', 'jja', 'son'). """ # Check that the seasons are valid. _validate_seasons(seasons) # Define the adjustments to be made to the year. - month_year_adjusts = _month_year_adjusts( - seasons, use_year_at_season_start=use_year_at_season_start - ) + month_year_adjusts = _month_year_adjusts(seasons) # Define a categorisation function. def _season_year(coord, value): diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index e62f3fbf0e..edf0c1871b 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Definitions of coordinate systems. @@ -9,15 +10,11 @@ from abc import ABCMeta, abstractmethod from functools import cached_property -import re import warnings import cartopy.crs as ccrs import numpy as np -from iris._deprecation import warn_deprecated -import iris.exceptions - def _arg_default(value, default, cast_as=float): """Apply a default value and type for an optional kwarg.""" @@ -452,7 +449,7 @@ def inverse_flattening(self, value): "the GeogCS object. To change other properties set them explicitly" " or create a new GeogCS instance." ) - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + warnings.warn(wmsg, UserWarning) value = float(value) self._inverse_flattening = value @@ -821,8 +818,7 @@ def as_cartopy_crs(self): warnings.warn( "Discarding false_easting and false_northing that are " - "not used by Cartopy.", - category=iris.exceptions.IrisDefaultingWarning, + "not used by Cartopy." ) return ccrs.Orthographic( @@ -1635,197 +1631,3 @@ def as_cartopy_crs(self): def as_cartopy_projection(self): return self.as_cartopy_crs() - - -class ObliqueMercator(CoordSystem): - """ - A cylindrical map projection, with XY coordinates measured in metres. - - Designed for regions not well suited to :class:`Mercator` or - :class:`TransverseMercator`, as the positioning of the cylinder is more - customisable. - - See Also - -------- - :class:`RotatedMercator` - - """ - - grid_mapping_name = "oblique_mercator" - - def __init__( - self, - azimuth_of_central_line, - latitude_of_projection_origin, - longitude_of_projection_origin, - false_easting=None, - false_northing=None, - scale_factor_at_projection_origin=None, - ellipsoid=None, - ): - """ - Constructs an ObliqueMercator object. - - Parameters - ---------- - azimuth_of_central_line : float - Azimuth of centerline clockwise from north at the center point of - the centre line. - latitude_of_projection_origin : float - The true longitude of the central meridian in degrees. - longitude_of_projection_origin: float - The true latitude of the planar origin in degrees. - false_easting: float, optional - X offset from the planar origin in metres. - Defaults to 0.0 . - false_northing: float, optional - Y offset from the planar origin in metres. - Defaults to 0.0 . - scale_factor_at_projection_origin: float, optional - Scale factor at the central meridian. - Defaults to 1.0 . - ellipsoid: :class:`GeogCS`, optional - If given, defines the ellipsoid. - - Examples - -------- - >>> from iris.coord_systems import GeogCS, ObliqueMercator - >>> my_ellipsoid = GeogCS(6371229.0, None, 0.0) - >>> ObliqueMercator(90.0, -22.0, -59.0, -25000.0, -25000.0, 1., my_ellipsoid) - ObliqueMercator(azimuth_of_central_line=90.0, latitude_of_projection_origin=-22.0, longitude_of_projection_origin=-59.0, false_easting=-25000.0, false_northing=-25000.0, scale_factor_at_projection_origin=1.0, ellipsoid=GeogCS(6371229.0)) - - """ - #: Azimuth of centerline clockwise from north. - self.azimuth_of_central_line = float(azimuth_of_central_line) - - #: True latitude of planar origin in degrees. - self.latitude_of_projection_origin = float( - latitude_of_projection_origin - ) - - #: True longitude of planar origin in degrees. - self.longitude_of_projection_origin = float( - longitude_of_projection_origin - ) - - #: X offset from planar origin in metres. - self.false_easting = _arg_default(false_easting, 0) - - #: Y offset from planar origin in metres. - self.false_northing = _arg_default(false_northing, 0) - - #: Scale factor at the central meridian. - self.scale_factor_at_projection_origin = _arg_default( - scale_factor_at_projection_origin, 1.0 - ) - - #: Ellipsoid definition (:class:`GeogCS` or None). - self.ellipsoid = ellipsoid - - def __repr__(self): - return ( - "{!s}(azimuth_of_central_line={!r}, " - "latitude_of_projection_origin={!r}, " - "longitude_of_projection_origin={!r}, false_easting={!r}, " - "false_northing={!r}, scale_factor_at_projection_origin={!r}, " - "ellipsoid={!r})".format( - self.__class__.__name__, - self.azimuth_of_central_line, - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.false_easting, - self.false_northing, - self.scale_factor_at_projection_origin, - self.ellipsoid, - ) - ) - - def as_cartopy_crs(self): - globe = self._ellipsoid_to_globe(self.ellipsoid, None) - - return ccrs.ObliqueMercator( - central_longitude=self.longitude_of_projection_origin, - central_latitude=self.latitude_of_projection_origin, - false_easting=self.false_easting, - false_northing=self.false_northing, - scale_factor=self.scale_factor_at_projection_origin, - azimuth=self.azimuth_of_central_line, - globe=globe, - ) - - def as_cartopy_projection(self): - return self.as_cartopy_crs() - - -class RotatedMercator(ObliqueMercator): - """ - :class:`ObliqueMercator` with ``azimuth_of_central_line=90``. - - As noted in CF versions 1.10 and earlier: - - The Rotated Mercator projection is an Oblique Mercator projection - with azimuth = +90. - - .. deprecated:: 3.8.0 - This coordinate system was introduced as already scheduled for removal - in a future release, since CF version 1.11 onwards now requires use of - :class:`ObliqueMercator` with ``azimuth_of_central_line=90.`` . - Any :class:`RotatedMercator` instances will always be saved to NetCDF - as the ``oblique_mercator`` grid mapping. - - """ - - def __init__( - self, - latitude_of_projection_origin, - longitude_of_projection_origin, - false_easting=None, - false_northing=None, - scale_factor_at_projection_origin=None, - ellipsoid=None, - ): - """ - Constructs a RotatedMercator object. - - Parameters - ---------- - latitude_of_projection_origin : float - The true longitude of the central meridian in degrees. - longitude_of_projection_origin: float - The true latitude of the planar origin in degrees. - false_easting: float, optional - X offset from the planar origin in metres. - Defaults to 0.0 . - false_northing: float, optional - Y offset from the planar origin in metres. - Defaults to 0.0 . - scale_factor_at_projection_origin: float, optional - Scale factor at the central meridian. - Defaults to 1.0 . - ellipsoid: :class:`GeogCS`, optional - If given, defines the ellipsoid. - - """ - message = ( - "iris.coord_systems.RotatedMercator is deprecated, and will be " - "removed in a future release. Instead please use " - "iris.coord_systems.ObliqueMercator with " - "azimuth_of_central_line=90 ." - ) - warn_deprecated(message) - - super().__init__( - 90.0, - latitude_of_projection_origin, - longitude_of_projection_origin, - false_easting, - false_northing, - scale_factor_at_projection_origin, - ellipsoid, - ) - - def __repr__(self): - # Remove the azimuth argument from the parent repr. - result = super().__repr__() - result = re.sub(r"azimuth_of_central_line=\d*\.?\d*, ", "", result) - return result diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 8af7ee0c8a..1a6e8d4e6a 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Definitions of coordinates and other dimensional metadata. @@ -36,9 +37,6 @@ import iris.time import iris.util -#: The default value for ignore_axis which controls guess_coord_axis' behaviour -DEFAULT_IGNORE_AXIS = False - class _DimensionalMetadata(CFVariableMixin, metaclass=ABCMeta): """ @@ -863,6 +861,7 @@ def xml_element(self, doc): element.setAttribute( "climatological", str(self.climatological) ) + if self.attributes: attributes_element = doc.createElement("attributes") for name in sorted(self.attributes.keys()): @@ -1595,8 +1594,6 @@ def __init__( self.bounds = bounds self.climatological = climatological - self._ignore_axis = DEFAULT_IGNORE_AXIS - def copy(self, points=None, bounds=None): """ Returns a copy of this coordinate. @@ -1629,10 +1626,6 @@ def copy(self, points=None, bounds=None): # self. new_coord.bounds = bounds - # The state of ignore_axis is controlled by the coordinate rather than - # the metadata manager - new_coord.ignore_axis = self.ignore_axis - return new_coord @classmethod @@ -1652,14 +1645,7 @@ def from_coord(cls, coord): if issubclass(cls, DimCoord): # DimCoord introduces an extra constructor keyword. kwargs["circular"] = getattr(coord, "circular", False) - - new_coord = cls(**kwargs) - - # The state of ignore_axis is controlled by the coordinate rather than - # the metadata manager - new_coord.ignore_axis = coord.ignore_axis - - return new_coord + return cls(**kwargs) @property def points(self): @@ -1751,24 +1737,6 @@ def climatological(self, value): self._metadata_manager.climatological = value - @property - def ignore_axis(self): - """ - A boolean that controls whether guess_coord_axis acts on this - coordinate. - - Defaults to False, and when set to True it will be skipped by - guess_coord_axis. - """ - return self._ignore_axis - - @ignore_axis.setter - def ignore_axis(self, value): - if not isinstance(value, bool): - emsg = "'ignore_axis' can only be set to 'True' or 'False'" - raise ValueError(emsg) - self._ignore_axis = value - def lazy_points(self): """ Return a lazy array representing the coord points. @@ -2089,8 +2057,7 @@ def contiguous_bounds(self): if self.ndim == 1: warnings.warn( "Coordinate {!r} is not bounded, guessing " - "contiguous bounds.".format(self.name()), - category=iris.exceptions.IrisGuessBoundsWarning, + "contiguous bounds.".format(self.name()) ) bounds = self._guess_bounds() elif self.ndim == 2: @@ -2257,10 +2224,7 @@ def serialize(x): "Collapsing a multi-dimensional coordinate. " "Metadata may not be fully descriptive for {!r}." ) - warnings.warn( - msg.format(self.name()), - category=iris.exceptions.IrisVagueMetadataWarning, - ) + warnings.warn(msg.format(self.name())) else: try: self._sanity_check_bounds() @@ -2270,10 +2234,7 @@ def serialize(x): "Metadata may not be fully descriptive for {!r}. " "Ignoring bounds." ) - warnings.warn( - msg.format(str(exc), self.name()), - category=iris.exceptions.IrisVagueMetadataWarning, - ) + warnings.warn(msg.format(str(exc), self.name())) self.bounds = None else: if not self.is_contiguous(): @@ -2281,10 +2242,7 @@ def serialize(x): "Collapsing a non-contiguous coordinate. " "Metadata may not be fully descriptive for {!r}." ) - warnings.warn( - msg.format(self.name()), - category=iris.exceptions.IrisVagueMetadataWarning, - ) + warnings.warn(msg.format(self.name())) if self.has_bounds(): item = self.core_bounds() @@ -2727,6 +2685,7 @@ def __init__( Will set to True when a climatological time axis is loaded from NetCDF. Always False if no bounds exist. + """ # Configure the metadata manager. self._metadata_manager = metadata_manager_factory(DimCoordMetadata) @@ -3150,7 +3109,7 @@ def __str__(self): def __add__(self, other): # Disable the default tuple behaviour of tuple concatenation - return NotImplemented + raise NotImplementedError() def xml_element(self, doc): """ diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 8aa0b452d5..35e3a903c6 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Classes for representing multi-dimensional data with metadata. @@ -9,20 +10,11 @@ """ from collections import OrderedDict +from collections.abc import Container, Iterable, Iterator, MutableMapping import copy from copy import deepcopy from functools import partial, reduce -import itertools import operator -from typing import ( - Container, - Iterable, - Iterator, - Mapping, - MutableMapping, - Optional, - Union, -) import warnings from xml.dom.minidom import Document import zlib @@ -43,13 +35,12 @@ import iris.aux_factory from iris.common import CFVariableMixin, CubeMetadata, metadata_manager_factory from iris.common.metadata import metadata_filter -from iris.common.mixin import LimitedAttributeDict import iris.coord_systems import iris.coords import iris.exceptions import iris.util -__all__ = ["Cube", "CubeAttrsDict", "CubeList"] +__all__ = ["Cube", "CubeList"] # The XML namespace to use for CubeML documents @@ -799,352 +790,6 @@ def _is_single_item(testee): return isinstance(testee, str) or not isinstance(testee, Iterable) -class CubeAttrsDict(MutableMapping): - """ - A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, - providing unified user access to combined cube "local" and "global" attributes - dictionaries, with the access behaviour of an ordinary (single) dictionary. - - Properties :attr:`globals` and :attr:`locals` are regular - :class:`~iris.common.mixin.LimitedAttributeDict`\\s, which can be accessed and - modified separately. The :class:`CubeAttrsDict` itself contains *no* additional - state, but simply provides a 'combined' view of both global + local attributes. - - All the read- and write-type methods, such as ``get()``, ``update()``, ``values()``, - behave according to the logic documented for : :meth:`__getitem__`, - :meth:`__setitem__` and :meth:`__iter__`. - - Notes - ----- - For type testing, ``issubclass(CubeAttrsDict, Mapping)`` is ``True``, but - ``issubclass(CubeAttrsDict, dict)`` is ``False``. - - Examples - -------- - - >>> from iris.cube import Cube - >>> cube = Cube([0]) - >>> # CF defines 'history' as global by default. - >>> cube.attributes.update({"history": "from test-123", "mycode": 3}) - >>> print(cube.attributes) - {'history': 'from test-123', 'mycode': 3} - >>> print(repr(cube.attributes)) - CubeAttrsDict(globals={'history': 'from test-123'}, locals={'mycode': 3}) - - >>> cube.attributes['history'] += ' +added' - >>> print(repr(cube.attributes)) - CubeAttrsDict(globals={'history': 'from test-123 +added'}, locals={'mycode': 3}) - - >>> cube.attributes.locals['history'] = 'per-variable' - >>> print(cube.attributes) - {'history': 'per-variable', 'mycode': 3} - >>> print(repr(cube.attributes)) - CubeAttrsDict(globals={'history': 'from test-123 +added'}, locals={'mycode': 3, 'history': 'per-variable'}) - - """ - - # TODO: Create a 'further topic' / 'tech paper' on NetCDF I/O, including - # discussion of attribute handling. - - def __init__( - self, - combined: Optional[Union[Mapping, str]] = "__unspecified", - locals: Optional[Mapping] = None, - globals: Optional[Mapping] = None, - ): - """ - Create a cube attributes dictionary. - - We support initialisation from a single generic mapping input, using the default - global/local assignment rules explained at :meth:`__setattr__`, or from - two separate mappings. Two separate dicts can be passed in the ``locals`` - and ``globals`` args, **or** via a ``combined`` arg which has its own - ``.globals`` and ``.locals`` properties -- so this allows passing an existing - :class:`CubeAttrsDict`, which will be copied. - - Parameters - ---------- - combined : dict - values to init both 'self.globals' and 'self.locals'. If 'combined' itself - has attributes named 'locals' and 'globals', these are used to update the - respective content (after initially setting the individual ones). - Otherwise, 'combined' is treated as a generic mapping, applied as - ``self.update(combined)``, - i.e. it will set locals and/or globals with the same logic as - :meth:`~iris.cube.CubeAttrsDict.__setitem__` . - locals : dict - initial content for 'self.locals' - globals : dict - initial content for 'self.globals' - - Examples - -------- - - >>> from iris.cube import CubeAttrsDict - >>> # CF defines 'history' as global by default. - >>> CubeAttrsDict({'history': 'data-story', 'comment': 'this-cube'}) - CubeAttrsDict(globals={'history': 'data-story'}, locals={'comment': 'this-cube'}) - - >>> CubeAttrsDict(locals={'history': 'local-history'}) - CubeAttrsDict(globals={}, locals={'history': 'local-history'}) - - >>> CubeAttrsDict(globals={'x': 'global'}, locals={'x': 'local'}) - CubeAttrsDict(globals={'x': 'global'}, locals={'x': 'local'}) - - >>> x1 = CubeAttrsDict(globals={'x': 1}, locals={'y': 2}) - >>> x2 = CubeAttrsDict(x1) - >>> x2 - CubeAttrsDict(globals={'x': 1}, locals={'y': 2}) - - """ - # First initialise locals + globals, defaulting to empty. - self.locals = locals - self.globals = globals - # Update with combined, if present. - if not isinstance(combined, str) or combined != "__unspecified": - # Treat a single input with 'locals' and 'globals' properties as an - # existing CubeAttrsDict, and update from its content. - # N.B. enforce deep copying, consistent with general Iris usage. - if hasattr(combined, "globals") and hasattr(combined, "locals"): - # Copy a mapping with globals/locals, like another 'CubeAttrsDict' - self.globals.update(deepcopy(combined.globals)) - self.locals.update(deepcopy(combined.locals)) - else: - # Treat any arbitrary single input value as a mapping (dict), and - # update from it. - self.update(dict(deepcopy(combined))) - - # - # Ensure that the stored local/global dictionaries are "LimitedAttributeDicts". - # - @staticmethod - def _normalise_attrs( - attributes: Optional[Mapping], - ) -> LimitedAttributeDict: - # Convert an input attributes arg into a standard form. - # N.B. content is always a LimitedAttributeDict, and a deep copy of input. - # Allow arg of None, etc. - if not attributes: - attributes = {} - else: - attributes = deepcopy(attributes) - - # Ensure the expected mapping type. - attributes = LimitedAttributeDict(attributes) - return attributes - - @property - def locals(self) -> LimitedAttributeDict: - return self._locals - - @locals.setter - def locals(self, attributes: Optional[Mapping]): - self._locals = self._normalise_attrs(attributes) - - @property - def globals(self) -> LimitedAttributeDict: - return self._globals - - @globals.setter - def globals(self, attributes: Optional[Mapping]): - self._globals = self._normalise_attrs(attributes) - - # - # Provide a serialisation interface - # - def __getstate__(self): - return (self.locals, self.globals) - - def __setstate__(self, state): - self.locals, self.globals = state - - # - # Support comparison -- required because default operation only compares a single - # value at each key. - # - def __eq__(self, other): - # For equality, require both globals + locals to match exactly. - # NOTE: array content works correctly, since 'locals' and 'globals' are always - # iris.common.mixin.LimitedAttributeDict, which gets this right. - other = CubeAttrsDict(other) - result = self.locals == other.locals and self.globals == other.globals - return result - - # - # Provide methods duplicating those for a 'dict', but which are *not* provided by - # MutableMapping, for compatibility with code which expected a cube.attributes to be - # a :class:`~iris.common.mixin.LimitedAttributeDict`. - # The extra required methods are : - # 'copy', 'update', '__ior__', '__or__', '__ror__' and 'fromkeys'. - # - def copy(self): - """ - Return a copy. - - Implemented with deep copying, consistent with general Iris usage. - - """ - return CubeAttrsDict(self) - - def update(self, *args, **kwargs): - """ - Update by adding items from a mapping arg, or keyword-values. - - If the argument is a split dictionary, preserve the local/global nature of its - keys. - """ - if args and hasattr(args[0], "globals") and hasattr(args[0], "locals"): - dic = args[0] - self.globals.update(dic.globals) - self.locals.update(dic.locals) - else: - super().update(*args) - super().update(**kwargs) - - def __or__(self, arg): - """Implement 'or' via 'update'.""" - if not isinstance(arg, Mapping): - return NotImplemented - new_dict = self.copy() - new_dict.update(arg) - return new_dict - - def __ior__(self, arg): - """Implement 'ior' via 'update'.""" - self.update(arg) - return self - - def __ror__(self, arg): - """ - Implement 'ror' via 'update'. - - This needs to promote, such that the result is a CubeAttrsDict. - """ - if not isinstance(arg, Mapping): - return NotImplemented - result = CubeAttrsDict(arg) - result.update(self) - return result - - @classmethod - def fromkeys(cls, iterable, value=None): - """ - Create a new object with keys taken from an argument, all set to one value. - - If the argument is a split dictionary, preserve the local/global nature of its - keys. - """ - if hasattr(iterable, "globals") and hasattr(iterable, "locals"): - # When main input is a split-attrs dict, create global/local parts from its - # global/local keys - result = cls( - globals=dict.fromkeys(iterable.globals, value), - locals=dict.fromkeys(iterable.locals, value), - ) - else: - # Create from a dict.fromkeys, using default classification of the keys. - result = cls(dict.fromkeys(iterable, value)) - return result - - # - # The remaining methods are sufficient to generate a complete standard Mapping - # API. See - - # https://docs.python.org/3/reference/datamodel.html#emulating-container-types. - # - - def __iter__(self): - """ - Define the combined iteration order. - - Result is: all global keys, then all local ones, but omitting duplicates. - - """ - # NOTE: this means that in the "summary" view, attributes present in both - # locals+globals are listed first, amongst the globals, even though they appear - # with the *value* from locals. - # Otherwise follows order of insertion, as is normal for dicts. - return itertools.chain( - self.globals.keys(), - (x for x in self.locals.keys() if x not in self.globals), - ) - - def __len__(self): - # Return the number of keys in the 'combined' view. - return len(list(iter(self))) - - def __getitem__(self, key): - """ - Fetch an item from the "combined attributes". - - If the name is present in *both* ``self.locals`` and ``self.globals``, then - the local value is returned. - - """ - if key in self.locals: - store = self.locals - else: - store = self.globals - return store[key] - - def __setitem__(self, key, value): - """ - Assign an attribute value. - - This may be assigned in either ``self.locals`` or ``self.globals``, chosen as - follows: - - * If there is an existing setting in either ``.locals`` or ``.globals``, then - that is updated (i.e. overwritten). - - * If it is present in *both*, only - ``.locals`` is updated. - - * If there is *no* existing attribute, it is usually created in ``.locals``. - **However** a handful of "known normally global" cases, as defined by CF, - go into ``.globals`` instead. - At present these are : ('conventions', 'featureType', 'history', 'title'). - See `CF Conventions, Appendix A: `_ . - - """ - # If an attribute of this name is already present, update that - # (the local one having priority). - if key in self.locals: - store = self.locals - elif key in self.globals: - store = self.globals - else: - # If NO existing attribute, create local unless it is a "known global" one. - from iris.fileformats.netcdf.saver import _CF_GLOBAL_ATTRS - - if key in _CF_GLOBAL_ATTRS: - store = self.globals - else: - store = self.locals - - store[key] = value - - def __delitem__(self, key): - """ - Remove an attribute. - - Delete from both local + global. - - """ - if key in self.locals: - del self.locals[key] - if key in self.globals: - del self.globals[key] - - def __str__(self): - # Print it just like a "normal" dictionary. - # Convert to a normal dict to do that. - return str(dict(self)) - - def __repr__(self): - # Special repr form, showing "real" contents. - return f"CubeAttrsDict(globals={self.globals}, locals={self.locals})" - - class Cube(CFVariableMixin): """ A single Iris cube of data and metadata. @@ -1341,8 +986,8 @@ def __init__( self.cell_methods = cell_methods - #: A dictionary for arbitrary Cube metadata. - #: A few keys are restricted - see :class:`CubeAttrsDict`. + #: A dictionary, with a few restricted keys, for arbitrary + #: Cube metadata. self.attributes = attributes # Coords @@ -1400,22 +1045,6 @@ def _names(self): """ return self._metadata_manager._names - # - # Ensure that .attributes is always a :class:`CubeAttrsDict`. - # - @property - def attributes(self) -> CubeAttrsDict: - return super().attributes - - @attributes.setter - def attributes(self, attributes: Optional[Mapping]): - """ - An override to CfVariableMixin.attributes.setter, which ensures that Cube - attributes are stored in a way which distinguishes global + local ones. - - """ - self._metadata_manager.attributes = CubeAttrsDict(attributes or {}) - def _dimensional_metadata(self, name_or_dimensional_metadata): """ Return a single _DimensionalMetadata instance that matches the given @@ -4093,7 +3722,6 @@ def __idiv__(self, other): def __rdiv__(self, other): data = 1 / self.core_data() reciprocal = self.copy(data=data) - reciprocal.units = reciprocal.units**-1 return iris.analysis.maths.multiply(reciprocal, other) __truediv__ = __div__ @@ -4228,10 +3856,7 @@ def collapsed(self, coords, aggregator, **kwargs): ] if lat_match: for coord in lat_match: - warnings.warn( - msg.format(coord.name()), - category=iris.exceptions.IrisUserWarning, - ) + warnings.warn(msg.format(coord.name())) # Determine the dimensions we need to collapse (and those we don't) if aggregator.cell_method == "peak": @@ -4818,8 +4443,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): if coord_.has_bounds(): warnings.warn( "The bounds of coordinate %r were ignored in " - "the rolling window operation." % coord_.name(), - category=iris.exceptions.IrisIgnoringBoundsWarning, + "the rolling window operation." % coord_.name() ) if coord_.ndim != 1: diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index c3e6b6193f..5d3da3349e 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Exceptions specific to the Iris package. @@ -179,212 +180,3 @@ class CannotAddError(ValueError): """Raised when an object (e.g. coord) cannot be added to a :class:`~iris.cube.Cube`.""" pass - - -############################################################################### -# WARNINGS -# Please namespace all warning objects (i.e. prefix with Iris...). - - -class IrisUserWarning(UserWarning): - """ - Base class for :class:`UserWarning`\\ s generated by Iris. - """ - - pass - - -class IrisLoadWarning(IrisUserWarning): - """Any warning relating to loading.""" - - pass - - -class IrisSaveWarning(IrisUserWarning): - """Any warning relating to saving.""" - - pass - - -class IrisCfWarning(IrisUserWarning): - """Any warning relating to :term:`CF Conventions` .""" - - pass - - -class IrisIgnoringWarning(IrisUserWarning): - """ - Any warning that involves an Iris operation not using some information. - - E.g. :class:`~iris.aux_factory.AuxCoordFactory` generation disregarding - bounds. - """ - - pass - - -class IrisDefaultingWarning(IrisUserWarning): - """ - Any warning that involves Iris changing invalid/missing information. - - E.g. creating a :class:`~iris.coords.AuxCoord` from an invalid - :class:`~iris.coords.DimCoord` definition. - """ - - pass - - -class IrisVagueMetadataWarning(IrisUserWarning): - """Warnings where object metadata may not be fully descriptive.""" - - pass - - -class IrisUnsupportedPlottingWarning(IrisUserWarning): - """Warnings where support for a plotting module/function is not guaranteed.""" - - pass - - -class IrisImpossibleUpdateWarning(IrisUserWarning): - """ - Warnings where it is not possible to update an object. - - Mainly generated during regridding where the necessary information for - updating an :class:`~iris.aux_factory.AuxCoordFactory` is no longer - present. - """ - - pass - - -class IrisGeometryExceedWarning(IrisUserWarning): - """:mod:`iris.analysis.geometry` warnings about geometry exceeding dimensions.""" - - pass - - -class IrisMaskValueMatchWarning(IrisUserWarning): - """Warnings where the value representing masked data is actually present in data.""" - - pass - - -######## - - -class IrisCfLoadWarning(IrisCfWarning, IrisLoadWarning): - """Any warning relating to both loading and :term:`CF Conventions` .""" - - pass - - -class IrisCfSaveWarning(IrisCfWarning, IrisSaveWarning): - """Any warning relating to both saving and :term:`CF Conventions` .""" - - pass - - -class IrisCfInvalidCoordParamWarning(IrisCfLoadWarning): - """ - Warnings where incorrect information for CF coord construction is in a file. - """ - - pass - - -class IrisCfMissingVarWarning(IrisCfLoadWarning): - """ - Warnings where a CF variable references another variable that is not in the file. - """ - - pass - - -class IrisCfLabelVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): - """ - Warnings where a CF string/label variable is being used inappropriately. - """ - - pass - - -class IrisCfNonSpanningVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): - """ - Warnings where a CF variable is ignored because it does not span the required dimension. - """ - - pass - - -######## - - -class IrisIgnoringBoundsWarning(IrisIgnoringWarning): - """ - Warnings where bounds information has not been used by an Iris operation. - """ - - pass - - -class IrisCannotAddWarning(IrisIgnoringWarning): - """ - Warnings where a member object cannot be added to a :class:`~iris.cube.Cube` . - """ - - pass - - -class IrisGuessBoundsWarning(IrisDefaultingWarning): - """ - Warnings where Iris has filled absent bounds information with a best estimate. - """ - - pass - - -class IrisPpClimModifiedWarning(IrisSaveWarning, IrisDefaultingWarning): - """ - Warnings where a climatology has been modified while saving :term:`Post Processing (PP) Format` . - """ - - pass - - -class IrisFactoryCoordNotFoundWarning(IrisLoadWarning): - """ - Warnings where a referenced factory coord can not be found when loading a variable in :term:`NetCDF Format`. - """ - - pass - - -class IrisNimrodTranslationWarning(IrisLoadWarning): - """ - For unsupported vertical coord types in :mod:`iris.file_formats.nimrod_load_rules`. - - (Pre-dates the full categorisation of Iris UserWarnings). - """ - - pass - - -class IrisUnknownCellMethodWarning(IrisCfLoadWarning): - """ - If a loaded :class:`~iris.coords.CellMethod` is not one the method names known to Iris. - - (Pre-dates the full categorisation of Iris UserWarnings). - """ - - pass - - -class IrisSaverFillValueWarning(IrisMaskValueMatchWarning, IrisSaveWarning): - """ - For fill value complications during Iris file saving :term:`NetCDF Format`. - - (Pre-dates the full categorisation of Iris UserWarnings). - """ - - pass diff --git a/lib/iris/experimental/__init__.py b/lib/iris/experimental/__init__.py index 4c7c62b4f5..40ba7fdb66 100644 --- a/lib/iris/experimental/__init__.py +++ b/lib/iris/experimental/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Experimental code can be introduced to Iris through this package. diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index ba4e9e6050..1b6c2d46be 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Wrapper for animating iris cubes using iris or matplotlib plotting functions diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 6fe12ea82a..7c95293469 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index f35a483b01..76c6002d2b 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Regridding functions. @@ -42,7 +43,6 @@ import iris.analysis.cartography import iris.coord_systems import iris.cube -from iris.exceptions import IrisImpossibleUpdateWarning from iris.util import _meshgrid wmsg = ( @@ -538,7 +538,7 @@ def regrid_reference_surface( "Cannot update aux_factory {!r} because of dropped" " coordinates.".format(factory.name()) ) - warnings.warn(msg, category=IrisImpossibleUpdateWarning) + warnings.warn(msg) return result def __call__(self, src_cube): diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index 83e65f89af..fdc23c7bc4 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Support for conservative regridding via ESMPy. diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 785bf43e63..116b340592 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Definitions of how Iris objects should be represented. diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index 604fda38a3..a7cfbf6876 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Routines for putting data on new strata (aka. isosurfaces), often in the Z direction. diff --git a/lib/iris/experimental/ugrid/__init__.py b/lib/iris/experimental/ugrid/__init__.py index 30a934dfba..a3603a5355 100644 --- a/lib/iris/experimental/ugrid/__init__.py +++ b/lib/iris/experimental/ugrid/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Infra-structure for unstructured mesh support, based on diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index ba365aeb1f..2d8d6cc448 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Extensions to Iris' CF variable representation to represent CF UGrid variables. @@ -9,12 +10,15 @@ Eventual destination: :mod:`iris.fileformats.cf`. """ -import warnings +import logging -from ...exceptions import IrisCfLabelVarWarning, IrisCfMissingVarWarning +from ...config import get_logger from ...fileformats import cf from .mesh import Connectivity +# Configure the logger. +logger = get_logger(__name__, propagate=True, handler=False) + class CFUGridConnectivityVariable(cf.CFVariable): """ @@ -46,6 +50,8 @@ class CFUGridConnectivityVariable(cf.CFVariable): def identify(cls, variables, ignore=None, target=None, warn=True): result = {} ignore, target = cls._identify_common(variables, ignore, target) + # TODO: reconsider logging level when we have consistent practice. + log_level = logging.WARNING if warn else logging.DEBUG # Identify all CF-UGRID connectivity variables. for nc_var_name, nc_var in target.items(): @@ -64,10 +70,11 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"{name}, referenced by netCDF variable " f"{nc_var_name}" ) - if warn: - warnings.warn( - message, category=IrisCfMissingVarWarning - ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -81,10 +88,11 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"as a CF-UGRID connectivity - is a " f"CF-netCDF label variable." ) - if warn: - warnings.warn( - message, category=IrisCfLabelVarWarning - ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) return result @@ -123,6 +131,8 @@ class CFUGridAuxiliaryCoordinateVariable(cf.CFVariable): def identify(cls, variables, ignore=None, target=None, warn=True): result = {} ignore, target = cls._identify_common(variables, ignore, target) + # TODO: reconsider logging level when we have consistent practice. + log_level = logging.WARNING if warn else logging.DEBUG # Identify any CF-UGRID-relevant auxiliary coordinate variables. for nc_var_name, nc_var in target.items(): @@ -139,11 +149,11 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"variable {name}, referenced by netCDF " f"variable {nc_var_name}" ) - if warn: - warnings.warn( - message, - category=IrisCfMissingVarWarning, - ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -160,11 +170,11 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"auxiliary coordinate - is a " f"CF-netCDF label variable." ) - if warn: - warnings.warn( - message, - category=IrisCfLabelVarWarning, - ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) return result @@ -195,6 +205,8 @@ class CFUGridMeshVariable(cf.CFVariable): def identify(cls, variables, ignore=None, target=None, warn=True): result = {} ignore, target = cls._identify_common(variables, ignore, target) + # TODO: reconsider logging level when we have consistent practice. + log_level = logging.WARNING if warn else logging.DEBUG # Identify all CF-UGRID mesh variables. all_vars = target == variables @@ -220,10 +232,11 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"Missing CF-UGRID mesh variable {name}, " f"referenced by netCDF variable {nc_var_name}" ) - if warn: - warnings.warn( - message, category=IrisCfMissingVarWarning - ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) else: # Restrict to non-string type i.e. not a # CFLabelVariable. @@ -237,10 +250,11 @@ def identify(cls, variables, ignore=None, target=None, warn=True): f"CF-UGRID mesh - is a CF-netCDF label " f"variable." ) - if warn: - warnings.warn( - message, category=IrisCfLabelVarWarning - ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) return result diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index c2a4b0c563..cfa3935991 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Extensions to Iris' NetCDF loading to allow the construction of @@ -14,15 +15,9 @@ from itertools import groupby from pathlib import Path import threading -import warnings from ...config import get_logger from ...coords import AuxCoord -from ...exceptions import ( - IrisCfWarning, - IrisDefaultingWarning, - IrisIgnoringWarning, -) from ...fileformats._nc_load_rules.helpers import get_attr_units, get_names from ...fileformats.netcdf import loader as nc_loader from ...io import decode_uri, expand_filespecs @@ -39,20 +34,6 @@ logger = get_logger(__name__, propagate=True, handler=False) -class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - -class _WarnComboCfDefaultingIgnoring( - _WarnComboCfDefaulting, IrisIgnoringWarning -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - class ParseUGridOnLoad(threading.local): def __init__(self): """ @@ -369,10 +350,8 @@ def _build_mesh(cf, mesh_var, file_path): ) if cf_role_message: cf_role_message += " Correcting to 'mesh_topology'." - warnings.warn( - cf_role_message, - category=_WarnComboCfDefaulting, - ) + # TODO: reconsider logging level when we have consistent practice. + logger.warning(cf_role_message, extra=dict(cls=None)) if hasattr(mesh_var, "volume_node_connectivity"): topology_dimension = 3 @@ -390,7 +369,8 @@ def _build_mesh(cf, mesh_var, file_path): f" : *Assuming* topology_dimension={topology_dimension}" ", consistent with the attached connectivities." ) - warnings.warn(msg, category=_WarnComboCfDefaulting) + # TODO: reconsider logging level when we have consistent practice. + logger.warning(msg, extra=dict(cls=None)) else: quoted_topology_dimension = mesh_var.topology_dimension if quoted_topology_dimension != topology_dimension: @@ -402,10 +382,8 @@ def _build_mesh(cf, mesh_var, file_path): f"{quoted_topology_dimension}" " -- ignoring this as it is inconsistent." ) - warnings.warn( - msg, - category=_WarnComboCfDefaultingIgnoring, - ) + # TODO: reconsider logging level when we have consistent practice. + logger.warning(msg=msg, extra=dict(cls=None)) node_dimension = None edge_dimension = getattr(mesh_var, "edge_dimension", None) diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 68d208d867..af557c345c 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Iris' data model representation of CF UGrid's Mesh and its constituent parts. diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index bfdcc7e114..44bbe04fe9 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ The common metadata API classes for :mod:`iris.experimental.ugrid.mesh`. diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py index f09740d98c..3c42137905 100644 --- a/lib/iris/experimental/ugrid/save.py +++ b/lib/iris/experimental/ugrid/save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Extensions to Iris' NetCDF saving to allow diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index 05e60c3ce7..a13a43d3fd 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Utility operations specific to unstructured data. diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index b74b420802..86b304b82c 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A package for converting cubes to and from specific file formats. diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 76df5d5718..2545bc39ae 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides UK Met Office Fields File (FF) format specific capabilities. @@ -12,11 +13,7 @@ import numpy as np -from iris.exceptions import ( - IrisDefaultingWarning, - IrisLoadWarning, - NotYetImplementedError, -) +from iris.exceptions import NotYetImplementedError from iris.fileformats._ff_cross_references import STASH_TRANS from . import pp @@ -121,12 +118,6 @@ REAL_POLE_LON = 5 -class _WarnComboLoadingDefaulting(IrisDefaultingWarning, IrisLoadWarning): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - class Grid: """ An abstract class representing the default/file-level grid @@ -440,8 +431,7 @@ def grid(self): grid_class = NewDynamics warnings.warn( "Staggered grid type: {} not currently interpreted, assuming " - "standard C-grid".format(self.grid_staggering), - category=_WarnComboLoadingDefaulting, + "standard C-grid".format(self.grid_staggering) ) grid = grid_class( self.column_dependent_constants, @@ -564,7 +554,7 @@ def range_order(range1, range2, resolution): "may be incorrect, not having taken into account the " "boundary size." ) - warnings.warn(msg, category=IrisLoadWarning) + warnings.warn(msg) else: range2 = field_dim[0] - res_low range1 = field_dim[0] - halo_dim * res_low @@ -638,8 +628,7 @@ def _adjust_field_for_lbc(self, field): "The LBC has a bdy less than 0. No " "case has previously been seen of " "this, and the decompression may be " - "erroneous.", - category=IrisLoadWarning, + "erroneous." ) field.bzx -= field.bdx * boundary_packing.x_halo field.bzy -= field.bdy * boundary_packing.y_halo @@ -752,8 +741,7 @@ def _extract_field(self): "which has not been explicitly " "handled by the fieldsfile loader." " Assuming the data is on a P grid" - ".".format(stash, subgrid), - category=_WarnComboLoadingDefaulting, + ".".format(stash, subgrid) ) field.x, field.y = grid.vectors(subgrid) @@ -769,18 +757,14 @@ def _extract_field(self): "STASH to grid type mapping. Picking the P " "position as the cell type".format(stash) ) - warnings.warn( - msg, - category=_WarnComboLoadingDefaulting, - ) + warnings.warn(msg) field.bzx, field.bdx = grid.regular_x(subgrid) field.bzy, field.bdy = grid.regular_y(subgrid) field.bplat = grid.pole_lat field.bplon = grid.pole_lon elif no_x or no_y: warnings.warn( - "Partially missing X or Y coordinate values.", - category=IrisLoadWarning, + "Partially missing X or Y coordinate values." ) # Check for LBC fields. @@ -826,9 +810,7 @@ def _extract_field(self): "Input field skipped as PPField creation failed :" " error = {!r}" ) - warnings.warn( - msg.format(str(valerr)), category=IrisLoadWarning - ) + warnings.warn(msg.format(str(valerr))) def __iter__(self): return pp._interpret_fields(self._extract_field()) diff --git a/lib/iris/fileformats/_ff_cross_references.py b/lib/iris/fileformats/_ff_cross_references.py index b060ed42e9..ca41f5257f 100644 --- a/lib/iris/fileformats/_ff_cross_references.py +++ b/lib/iris/fileformats/_ff_cross_references.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Table providing UM grid-code, field-code and pseudolevel-type for (some) stash codes. Used in UM file i/o. diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py index ca2f341249..b102a082df 100644 --- a/lib/iris/fileformats/_nc_load_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Support for cube-specific CF-to-Iris translation operations. diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 7db15d21ac..09237d3f11 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Replacement code for the Pyke rules. @@ -43,7 +44,6 @@ import warnings from iris.config import get_logger -import iris.exceptions import iris.fileformats.cf import iris.fileformats.pp as pp @@ -53,24 +53,6 @@ logger = get_logger(__name__, fmt="[%(funcName)s]") -class _WarnComboCfLoadIgnoring( - iris.exceptions.IrisCfLoadWarning, - iris.exceptions.IrisIgnoringWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - -class _WarnComboLoadIgnoring( - iris.exceptions.IrisLoadWarning, - iris.exceptions.IrisIgnoringWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - def _default_rulenamesfunc(func_name): # A simple default function to deduce the rules-name from an action-name. funcname_prefix = "action_" @@ -155,14 +137,6 @@ def action_default(engine): None, hh.build_geostationary_coordinate_system, ), - hh.CF_GRID_MAPPING_OBLIQUE: ( - None, - hh.build_oblique_mercator_coordinate_system, - ), - hh.CF_GRID_MAPPING_ROTATED_MERCATOR: ( - None, - hh.build_oblique_mercator_coordinate_system, - ), } @@ -497,10 +471,7 @@ def action_formula_type(engine, formula_root_fact): succeed = False rule_name += f"(FAILED - unrecognised formula type = {formula_type!r})" msg = f"Ignored formula of unrecognised type: {formula_type!r}." - warnings.warn( - msg, - category=_WarnComboCfLoadIgnoring, - ) + warnings.warn(msg) if succeed: # Check we don't already have one. existing_type = engine.requires.get("formula_type") @@ -515,10 +486,7 @@ def action_formula_type(engine, formula_root_fact): f"Formula of type ={formula_type!r} " f"overrides another of type ={existing_type!r}.)" ) - warnings.warn( - msg, - category=_WarnComboLoadIgnoring, - ) + warnings.warn(msg) rule_name += f"_{formula_type}" # Set 'requires' info for iris.fileformats.netcdf._load_aux_factory. engine.requires["formula_type"] = formula_type diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index ec7a28777b..497c2a12c9 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 7044b3a993..bbf9c660c5 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ All the pure-Python 'helper' functions which were previously included in the Pyke rules database 'fc_rules_cf.krb'. @@ -22,7 +23,6 @@ import pyproj import iris -from iris._deprecation import warn_deprecated import iris.aux_factory from iris.common.mixin import _get_valid_standard_name import iris.coord_systems @@ -124,8 +124,6 @@ CF_GRID_MAPPING_TRANSVERSE = "transverse_mercator" CF_GRID_MAPPING_VERTICAL = "vertical_perspective" CF_GRID_MAPPING_GEOSTATIONARY = "geostationary" -CF_GRID_MAPPING_OBLIQUE = "oblique_mercator" -CF_GRID_MAPPING_ROTATED_MERCATOR = "rotated_mercator" # # CF Attribute Names. @@ -156,7 +154,6 @@ CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" CF_ATTR_GRID_PERSPECTIVE_HEIGHT = "perspective_point_height" CF_ATTR_GRID_SWEEP_ANGLE_AXIS = "sweep_angle_axis" -CF_ATTR_GRID_AZIMUTH_CENT_LINE = "azimuth_of_central_line" CF_ATTR_POSITIVE = "positive" CF_ATTR_STD_NAME = "standard_name" CF_ATTR_LONG_NAME = "long_name" @@ -222,42 +219,6 @@ ] -class _WarnComboIgnoringLoad( - iris.exceptions.IrisIgnoringWarning, - iris.exceptions.IrisLoadWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - -class _WarnComboDefaultingLoad( - iris.exceptions.IrisDefaultingWarning, - iris.exceptions.IrisLoadWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - -class _WarnComboDefaultingCfLoad( - iris.exceptions.IrisCfLoadWarning, - iris.exceptions.IrisDefaultingWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - -class _WarnComboIgnoringCfLoad( - iris.exceptions.IrisIgnoringWarning, - iris.exceptions.IrisCfLoadWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: """ Split a CF cell_methods attribute string into a list of zero or more cell @@ -295,11 +256,7 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: "Cell methods may be incorrectly parsed due to mismatched " "brackets" ) - warnings.warn( - msg, - category=iris.exceptions.IrisCfLoadWarning, - stacklevel=2, - ) + warnings.warn(msg, UserWarning, stacklevel=2) if bracket_depth > 0 and ind in name_start_inds: name_start_inds.remove(ind) @@ -318,21 +275,14 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: msg = ( f"Failed to fully parse cell method string: {nc_cell_methods}" ) - warnings.warn( - msg, category=iris.exceptions.IrisCfLoadWarning, stacklevel=2 - ) + warnings.warn(msg, UserWarning, stacklevel=2) continue nc_cell_methods_matches.append(nc_cell_method_match) return nc_cell_methods_matches -class UnknownCellMethodWarning(iris.exceptions.IrisUnknownCellMethodWarning): - """ - Backwards compatible form of :class:`iris.exceptions.IrisUnknownCellMethodWarning`. - """ - - # TODO: remove at the next major release. +class UnknownCellMethodWarning(Warning): pass @@ -370,7 +320,7 @@ def parse_cell_methods(nc_cell_methods): msg = "NetCDF variable contains unknown cell method {!r}" warnings.warn( msg.format("{}".format(method_words[0])), - category=UnknownCellMethodWarning, + UnknownCellMethodWarning, ) d[_CM_METHOD] = method name = d[_CM_NAME] @@ -439,6 +389,7 @@ def parse_cell_methods(nc_cell_methods): ################################################################################ def build_cube_metadata(engine): """Add the standard meta data to the cube.""" + cf_var = engine.cf_var cube = engine.cube @@ -482,13 +433,10 @@ def build_cube_metadata(engine): # Set the cube global attributes. for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): try: - cube.attributes.globals[str(attr_name)] = attr_value + cube.attributes[str(attr_name)] = attr_value except ValueError as e: - msg = "Skipping disallowed global attribute {!r}: {}" - warnings.warn( - msg.format(attr_name, str(e)), - category=_WarnComboIgnoringLoad, - ) + msg = "Skipping global attribute {!r}: {}" + warnings.warn(msg.format(attr_name, str(e))) ################################################################################ @@ -531,7 +479,7 @@ def _get_ellipsoid(cf_grid_var): "applied. To apply the datum when loading, use the " "iris.FUTURE.datum_support flag." ) - warnings.warn(wmsg, category=FutureWarning, stacklevel=14) + warnings.warn(wmsg, FutureWarning, stacklevel=14) datum = None if datum is not None: @@ -564,10 +512,7 @@ def build_rotated_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0 ) if north_pole_latitude is None or north_pole_longitude is None: - warnings.warn( - "Rotated pole position is not fully specified", - category=iris.exceptions.IrisCfLoadWarning, - ) + warnings.warn("Rotated pole position is not fully specified") north_pole_grid_lon = getattr( cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 @@ -896,58 +841,6 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): return cs -################################################################################ -def build_oblique_mercator_coordinate_system(engine, cf_grid_var): - """ - Create an oblique mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ - ellipsoid = _get_ellipsoid(cf_grid_var) - - azimuth_of_central_line = getattr( - cf_grid_var, CF_ATTR_GRID_AZIMUTH_CENT_LINE, None - ) - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None - ) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None - ) - scale_factor_at_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None - ) - false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - kwargs = dict( - azimuth_of_central_line=azimuth_of_central_line, - latitude_of_projection_origin=latitude_of_projection_origin, - longitude_of_projection_origin=longitude_of_projection_origin, - scale_factor_at_projection_origin=scale_factor_at_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=ellipsoid, - ) - - # Handle the alternative form noted in CF: rotated mercator. - grid_mapping_name = getattr(cf_grid_var, CF_ATTR_GRID_MAPPING_NAME) - candidate_systems = dict( - oblique_mercator=iris.coord_systems.ObliqueMercator, - rotated_mercator=iris.coord_systems.RotatedMercator, - ) - if grid_mapping_name == "rotated_mercator": - message = ( - "Iris will stop loading the rotated_mercator grid mapping name in " - "a future release, in accordance with CF version 1.11 . Instead " - "please use oblique_mercator with azimuth_of_central_line = 90 ." - ) - warn_deprecated(message) - del kwargs[CF_ATTR_GRID_AZIMUTH_CENT_LINE] - - cs = candidate_systems[grid_mapping_name](**kwargs) - return cs - - ################################################################################ def get_attr_units(cf_var, attributes): attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) @@ -966,10 +859,7 @@ def get_attr_units(cf_var, attributes): msg = "Ignoring netCDF variable {!r} invalid units {!r}".format( cf_var.cf_name, attr_units ) - warnings.warn( - msg, - category=_WarnComboIgnoringCfLoad, - ) + warnings.warn(msg) attributes["invalid_units"] = attr_units attr_units = UNKNOWN_UNIT_STRING @@ -1058,8 +948,7 @@ def get_cf_bounds_var(cf_coord_var): if attr_bounds is not None and attr_climatology is not None: warnings.warn( "Ignoring climatology in favour of bounds attribute " - "on NetCDF variable {!r}.".format(cf_coord_var.cf_name), - category=_WarnComboIgnoringCfLoad, + "on NetCDF variable {!r}.".format(cf_coord_var.cf_name) ) return cf_bounds_var, climatological @@ -1118,10 +1007,7 @@ def build_dimension_coordinate( if ma.is_masked(points_data): points_data = ma.filled(points_data) msg = "Gracefully filling {!r} dimension coordinate masked points" - warnings.warn( - msg.format(str(cf_coord_var.cf_name)), - category=_WarnComboDefaultingLoad, - ) + warnings.warn(msg.format(str(cf_coord_var.cf_name))) # Get any coordinate bounds. cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) @@ -1131,10 +1017,7 @@ def build_dimension_coordinate( if ma.is_masked(bounds_data): bounds_data = ma.filled(bounds_data) msg = "Gracefully filling {!r} dimension coordinate masked bounds" - warnings.warn( - msg.format(str(cf_coord_var.cf_name)), - category=_WarnComboDefaultingLoad, - ) + warnings.warn(msg.format(str(cf_coord_var.cf_name))) # Handle transposed bounds where the vertex dimension is not # the last one. Test based on shape to support different # dimension names. @@ -1199,10 +1082,7 @@ def build_dimension_coordinate( "Failed to create {name!r} dimension coordinate: {error}\n" "Gracefully creating {name!r} auxiliary coordinate instead." ) - warnings.warn( - msg.format(name=str(cf_coord_var.cf_name), error=e_msg), - category=_WarnComboDefaultingCfLoad, - ) + warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) coord = iris.coords.AuxCoord( points_data, standard_name=standard_name, @@ -1217,10 +1097,7 @@ def build_dimension_coordinate( try: cube.add_aux_coord(coord, data_dims) except iris.exceptions.CannotAddError as e_msg: - warnings.warn( - coord_skipped_msg.format(error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, - ) + warnings.warn(coord_skipped_msg.format(error=e_msg)) coord_skipped = True else: # Add the dimension coordinate to the cube. @@ -1231,10 +1108,7 @@ def build_dimension_coordinate( # Scalar coords are placed in the aux_coords container. cube.add_aux_coord(coord, data_dims) except iris.exceptions.CannotAddError as e_msg: - warnings.warn( - coord_skipped_msg.format(error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, - ) + warnings.warn(coord_skipped_msg.format(error=e_msg)) coord_skipped = True if not coord_skipped: @@ -1312,10 +1186,7 @@ def build_auxiliary_coordinate( cube.add_aux_coord(coord, data_dims) except iris.exceptions.CannotAddError as e_msg: msg = "{name!r} coordinate not added to Cube: {error}" - warnings.warn( - msg.format(name=str(cf_coord_var.cf_name), error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, - ) + warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) else: # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) @@ -1366,10 +1237,7 @@ def build_cell_measures(engine, cf_cm_var): cube.add_cell_measure(cell_measure, data_dims) except iris.exceptions.CannotAddError as e_msg: msg = "{name!r} cell measure not added to Cube: {error}" - warnings.warn( - msg.format(name=str(cf_cm_var.cf_name), error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, - ) + warnings.warn(msg.format(name=str(cf_cm_var.cf_name), error=e_msg)) else: # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["cell_measures"].append( @@ -1418,10 +1286,7 @@ def build_ancil_var(engine, cf_av_var): cube.add_ancillary_variable(av, data_dims) except iris.exceptions.CannotAddError as e_msg: msg = "{name!r} ancillary variable not added to Cube: {error}" - warnings.warn( - msg.format(name=str(cf_av_var.cf_name), error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, - ) + warnings.warn(msg.format(name=str(cf_av_var.cf_name), error=e_msg)) else: # Make a list with names, stored on the engine, so we can find them all later. engine.cube_parts["ancillary_variables"].append( @@ -1638,8 +1503,7 @@ def has_supported_mercator_parameters(engine, cf_name): ): warnings.warn( "It does not make sense to provide both " - '"scale_factor_at_projection_origin" and "standard_parallel".', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, + '"scale_factor_at_projection_origin" and "standard_parallel".' ) is_valid = False @@ -1669,10 +1533,7 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): latitude_of_projection_origin != 90 and latitude_of_projection_origin != -90 ): - warnings.warn( - '"latitude_of_projection_origin" must be +90 or -90.', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, - ) + warnings.warn('"latitude_of_projection_origin" must be +90 or -90.') is_valid = False if ( @@ -1681,16 +1542,14 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): ): warnings.warn( "It does not make sense to provide both " - '"scale_factor_at_projection_origin" and "standard_parallel".', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, + '"scale_factor_at_projection_origin" and "standard_parallel".' ) is_valid = False if scale_factor_at_projection_origin is None and standard_parallel is None: warnings.warn( 'One of "scale_factor_at_projection_origin" and ' - '"standard_parallel" is required.', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, + '"standard_parallel" is required.' ) is_valid = False diff --git a/lib/iris/fileformats/_pp_lbproc_pairs.py b/lib/iris/fileformats/_pp_lbproc_pairs.py index 86a5f9381d..7ad6f21848 100644 --- a/lib/iris/fileformats/_pp_lbproc_pairs.py +++ b/lib/iris/fileformats/_pp_lbproc_pairs.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import itertools diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index 031a5e7483..b313500de7 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. r""" The purpose of this module is to provide utilities for the identification of multi-dimensional structure in a flat sequence of homogeneous objects. @@ -416,7 +417,7 @@ def filter_strides_of_length(length): # If we are to build another dimension on top of this possible # structure, we need to compute the stride that would be # needed for that dimension. - next_stride = np.prod( + next_stride = np.product( [struct.size for (_, struct) in potential] ) diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 26a1f307b2..4dcd5ce6aa 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides ABF (and ABL) file format capabilities. diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 86960003db..2ed01846bd 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides the capability to load netCDF files and interpret them according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. @@ -22,7 +23,6 @@ import numpy as np import numpy.ma as ma -import iris.exceptions from iris.fileformats.netcdf import _thread_safe_nc import iris.util @@ -280,10 +280,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF ancillary data variable %r, referenced by netCDF variable %r" - warnings.warn( - message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, - ) + warnings.warn(message % (name, nc_var_name)) else: result[name] = CFAncillaryDataVariable( name, variables[name] @@ -326,10 +323,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF auxiliary coordinate variable %r, referenced by netCDF variable %r" - warnings.warn( - message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, - ) + warnings.warn(message % (name, nc_var_name)) else: # Restrict to non-string type i.e. not a CFLabelVariable. if not _is_str_dtype(variables[name]): @@ -375,10 +369,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF boundary variable %r, referenced by netCDF variable %r" - warnings.warn( - message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, - ) + warnings.warn(message % (name, nc_var_name)) else: result[name] = CFBoundaryVariable( name, variables[name] @@ -450,10 +441,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF climatology variable %r, referenced by netCDF variable %r" - warnings.warn( - message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, - ) + warnings.warn(message % (name, nc_var_name)) else: result[name] = CFClimatologyVariable( name, variables[name] @@ -594,8 +582,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if warn: message = "Missing CF-netCDF formula term variable %r, referenced by netCDF variable %r" warnings.warn( - message % (variable_name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + message % (variable_name, nc_var_name) ) else: if variable_name not in result: @@ -659,10 +646,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF grid mapping variable %r, referenced by netCDF variable %r" - warnings.warn( - message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, - ) + warnings.warn(message % (name, nc_var_name)) else: result[name] = CFGridMappingVariable( name, variables[name] @@ -701,10 +685,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if name not in variables: if warn: message = "Missing CF-netCDF label variable %r, referenced by netCDF variable %r" - warnings.warn( - message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, - ) + warnings.warn(message % (name, nc_var_name)) else: # Register variable, but only allow string type. var = variables[name] @@ -876,8 +857,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): if warn: message = "Missing CF-netCDF measure variable %r, referenced by netCDF variable %r" warnings.warn( - message % (variable_name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + message % (variable_name, nc_var_name) ) else: result[variable_name] = CFMeasureVariable( @@ -1089,8 +1069,7 @@ def __init__(self, file_source, warn=False, monotonic=False): ]: warnings.warn( "Optimise CF-netCDF loading by converting data from NetCDF3 " - 'to NetCDF4 file format using the "nccopy" command.', - category=iris.exceptions.IrisLoadWarning, + 'to NetCDF4 file format using the "nccopy" command.' ) self._check_monotonic = monotonic @@ -1231,10 +1210,7 @@ def _build(cf_variable): cf_variable.dimensions, ) ) - warnings.warn( - msg, - category=iris.exceptions.IrisCfNonSpanningVarWarning, - ) + warnings.warn(msg) # Build CF data variable relationships. if isinstance(cf_variable, CFDataVariable): @@ -1285,10 +1261,7 @@ def _build(cf_variable): cf_variable.dimensions, ) ) - warnings.warn( - msg, - category=iris.exceptions.IrisCfNonSpanningVarWarning, - ) + warnings.warn(msg) # Add the CF group to the variable. cf_variable.cf_group = cf_group diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index e3a4493fe8..50c02e4d04 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`. diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index 16f71a940f..9a779cc92d 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Provides NAME file format loading capabilities.""" diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 7cc7c61d81..0189a8806f 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """NAME file format loading functions.""" import collections @@ -16,7 +17,7 @@ import iris.coord_systems from iris.coords import AuxCoord, CellMethod, DimCoord import iris.cube -from iris.exceptions import IrisLoadWarning, TranslationError +from iris.exceptions import TranslationError import iris.util EARTH_RADIUS = 6371229.0 @@ -272,9 +273,7 @@ def _parse_units(units): try: units = cf_units.Unit(units) except ValueError: - warnings.warn( - "Unknown units: {!r}".format(units), category=IrisLoadWarning - ) + warnings.warn("Unknown units: {!r}".format(units)) units = cf_units.Unit(None) return units @@ -612,9 +611,7 @@ def _build_cell_methods(av_or_ints, coord): else: cell_method = None msg = "Unknown {} statistic: {!r}. Unable to create cell method." - warnings.warn( - msg.format(coord, av_or_int), category=IrisLoadWarning - ) + warnings.warn(msg.format(coord, av_or_int)) cell_methods.append(cell_method) # NOTE: this can be a None return cell_methods diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index cf550fbb57..b696b200ff 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -1,9 +1,11 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ -Support loading and saving NetCDF files using CF conventions for metadata interpretation. +Module to support the loading and saving of NetCDF files, also using the CF conventions +for metadata interpretation. See : `NetCDF User's Guide `_ and `netCDF4 python module `_. diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py index 82edbf202e..15ac117a8b 100644 --- a/lib/iris/fileformats/netcdf/_dask_locks.py +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -1,53 +1,50 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Module containing code to create locks enabling dask workers to co-operate. -This matter is complicated by needing different solutions for different dask -scheduler types, i.e. local 'threads' scheduler, local 'processes' or -distributed. +This matter is complicated by needing different solutions for different dask scheduler +types, i.e. local 'threads' scheduler, local 'processes' or distributed. -In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a -netCDF4.Dataset targeting an output file, and creates a Saver.file_write_lock -object to serialise write-accesses to the file from dask tasks : All dask-task -file writes go via a "iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, -which also contains a link to the Saver.file_write_lock, and uses it to prevent -workers from fouling each other. +In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a netCDF4.Dataset +targeting an output file, and creates a Saver.file_write_lock object to serialise +write-accesses to the file from dask tasks : All dask-task file writes go via a +"iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, which also contains a link +to the Saver.file_write_lock, and uses it to prevent workers from fouling each other. For each chunk written, the NetCDFWriteProxy acquires the common per-file lock; -opens a Dataset on the file; performs a write to the relevant variable; closes -the Dataset and then releases the lock. This process is obviously very similar -to what the NetCDFDataProxy does for reading lazy chunks. +opens a Dataset on the file; performs a write to the relevant variable; closes the +Dataset and then releases the lock. This process is obviously very similar to what the +NetCDFDataProxy does for reading lazy chunks. -For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The -workers (threads) execute tasks which contain a NetCDFWriteProxy, as above. -All of those contain the common lock, and this is simply **the same object** -for all workers, since they share an address space. +For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The workers +(threads) execute tasks which contain a NetCDFWriteProxy, as above. All of those +contain the common lock, and this is simply **the same object** for all workers, since +they share an address space. For a distributed scheduler, the Saver.lock is a `distributed.Lock()` which is identified with the output filepath. This is distributed to the workers by -serialising the task function arguments, which will include the -NetCDFWriteProxy. A worker behaves like a process, though it may execute on a -remote machine. When a distributed.Lock is deserialised to reconstruct the -worker task, this creates an object that communicates with the scheduler. -These objects behave as a single common lock, as they all have the same string -'identity', so the scheduler implements inter-process communication so that -they can mutually exclude each other. +serialising the task function arguments, which will include the NetCDFWriteProxy. +A worker behaves like a process, though it may execute on a remote machine. When a +distributed.Lock is deserialised to reconstruct the worker task, this creates an object +that communicates with the scheduler. These objects behave as a single common lock, +as they all have the same string 'identity', so the scheduler implements inter-process +communication so that they can mutually exclude each other. It is also *conceivable* that multiple processes could write to the same file in -parallel, if the operating system supports it. However, this also requires -that the libnetcdf C library is built with parallel access option, which is -not common. With the "ordinary" libnetcdf build, a process which attempts to -open for writing a file which is _already_ open for writing simply raises an -access error. In any case, Iris netcdf saver will not support this mode of -operation, at present. +parallel, if the operating system supports it. However, this also requires that the +libnetcdf C library is built with parallel access option, which is not common. +With the "ordinary" libnetcdf build, a process which attempts to open for writing a file +which is _already_ open for writing simply raises an access error. +In any case, Iris netcdf saver will not support this mode of operation, at present. We don't currently support a local "processes" type scheduler. If we did, the -behaviour should be very similar to a distributed scheduler. It would need to -use some other serialisable shared-lock solution in place of -'distributed.Lock', which requires a distributed scheduler to function. +behaviour should be very similar to a distributed scheduler. It would need to use some +other serialisable shared-lock solution in place of 'distributed.Lock', which requires +a distributed scheduler to function. """ import threading @@ -59,7 +56,7 @@ # A dedicated error class, allowing filtering and testing of errors raised here. -class DaskSchedulerTypeError(ValueError): # noqa: D101 +class DaskSchedulerTypeError(ValueError): pass @@ -86,13 +83,11 @@ def get_dask_array_scheduler_type(): Returns one of 'distributed', 'threads' or 'processes'. The return value is a valid argument for dask.config.set(scheduler=). - This cannot distinguish between distributed local and remote clusters -- - both of those simply return 'distributed'. + This cannot distinguish between distributed local and remote clusters -- both of + those simply return 'distributed'. - Notes - ----- - This takes account of how dask is *currently* configured. It will - be wrong if the config changes before the compute actually occurs. + NOTE: this takes account of how dask is *currently* configured. It will be wrong + if the config changes before the compute actually occurs. """ if dask_scheduler_is_distributed(): @@ -120,12 +115,8 @@ def get_worker_lock(identity: str): """ Return a mutex Lock which can be shared by multiple Dask workers. - The type of Lock generated depends on the dask scheduler type, which must - therefore be set up before this is called. - - Parameters - ---------- - identity : str + The type of Lock generated depends on the dask scheduler type, which must therefore + be set up before this is called. """ scheduler_type = get_dask_array_scheduler_type() diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 5abffb896f..21c697acab 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Module to ensure all calls to the netCDF4 library are thread-safe. @@ -24,10 +25,7 @@ class _ThreadSafeWrapper(ABC): """ - Contains a netCDF4 class instance, ensuring wrapping all API calls. - - Contains a netCDF4 class instance, ensuring wrapping all API calls within - _GLOBAL_NETCDF4_LOCK. + Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK. Designed to 'gate keep' all the instance's API calls, but allowing the same API as if working directly with the instance itself. @@ -120,7 +118,7 @@ class VariableWrapper(_ThreadSafeWrapper): def setncattr(self, *args, **kwargs) -> None: """ - Call netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. + Calls netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. Only defined explicitly in order to get some mocks to work. """ @@ -144,12 +142,11 @@ def dimensions(self) -> typing.List[str]: def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]: """ - Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK. + Calls netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. - Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, - returning DimensionWrappers. The original returned netCDF4.Dimensions - are simply replaced with their respective DimensionWrappers, ensuring - that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. + The original returned netCDF4.Dimensions are simply replaced with their + respective DimensionWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: dimensions_ = list( @@ -175,12 +172,11 @@ class GroupWrapper(_ThreadSafeWrapper): @property def dimensions(self) -> typing.Dict[str, DimensionWrapper]: """ - Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. - Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, - returning DimensionWrappers. The original returned netCDF4.Dimensions - are simply replaced with their respective DimensionWrappers, ensuring - that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. + The original returned netCDF4.Dimensions are simply replaced with their + respective DimensionWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: dimensions_ = self._contained_instance.dimensions @@ -191,13 +187,11 @@ def dimensions(self) -> typing.Dict[str, DimensionWrapper]: def createDimension(self, *args, **kwargs) -> DimensionWrapper: """ - Call createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + Calls createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. - Call createDimension() from netCDF4.Group/Dataset within - _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. The original returned - netCDF4.Dimension is simply replaced with its respective - DimensionWrapper, ensuring that downstream calls are also performed - within _GLOBAL_NETCDF4_LOCK. + The original returned netCDF4.Dimension is simply replaced with its + respective DimensionWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: new_dimension = self._contained_instance.createDimension( @@ -211,12 +205,11 @@ def createDimension(self, *args, **kwargs) -> DimensionWrapper: @property def variables(self) -> typing.Dict[str, VariableWrapper]: """ - Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. - Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, - returning VariableWrappers. The original returned netCDF4.Variables - are simply replaced with their respective VariableWrappers, ensuring - that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. + The original returned netCDF4.Variables are simply replaced with their + respective VariableWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: variables_ = self._contained_instance.variables @@ -226,13 +219,11 @@ def variables(self) -> typing.Dict[str, VariableWrapper]: def createVariable(self, *args, **kwargs) -> VariableWrapper: """ - Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + Calls createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. - Call createVariable() from netCDF4.Group/Dataset within - _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. The original - returned netCDF4.Variable is simply replaced with its respective - VariableWrapper, ensuring that downstream calls are also performed - within _GLOBAL_NETCDF4_LOCK. + The original returned netCDF4.Variable is simply replaced with its + respective VariableWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: new_variable = self._contained_instance.createVariable( @@ -244,10 +235,7 @@ def get_variables_by_attributes( self, *args, **kwargs ) -> typing.List[VariableWrapper]: """ - Call get_variables_by_attributes() from netCDF4.Group/Dataset. - - Call get_variables_by_attributes() from netCDF4.Group/Dataset - within_GLOBAL_NETCDF4_LOCK, returning VariableWrappers. + Calls get_variables_by_attributes() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. The original returned netCDF4.Variables are simply replaced with their respective VariableWrappers, ensuring that downstream calls are @@ -267,10 +255,7 @@ def get_variables_by_attributes( @property def groups(self): """ - Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. - - Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, - returning GroupWrappers. + Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers. The original returned netCDF4.Groups are simply replaced with their respective GroupWrappers, ensuring that downstream calls are @@ -283,10 +268,7 @@ def groups(self): @property def parent(self): """ - Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. - - Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, - returning a GroupWrapper. + Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper. The original returned netCDF4.Group is simply replaced with its respective GroupWrapper, ensuring that downstream calls are @@ -298,13 +280,11 @@ def parent(self): def createGroup(self, *args, **kwargs): """ - Call createGroup() from netCDF4.Group/Dataset. + Calls createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. - Call createGroup() from netCDF4.Group/Dataset within - _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. The original returned - netCDF4.Group is simply replaced with its respective GroupWrapper, - ensuring that downstream calls are also performed within - _GLOBAL_NETCDF4_LOCK. + The original returned netCDF4.Group is simply replaced with its + respective GroupWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: new_group = self._contained_instance.createGroup(*args, **kwargs) @@ -325,12 +305,11 @@ class DatasetWrapper(GroupWrapper): @classmethod def fromcdl(cls, *args, **kwargs): """ - Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK. + Calls netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper. - Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, - returning a DatasetWrapper. The original returned netCDF4.Dataset is - simply replaced with its respective DatasetWrapper, ensuring that - downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. + The original returned netCDF4.Dataset is simply replaced with its + respective DatasetWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. """ with _GLOBAL_NETCDF4_LOCK: instance = cls.CONTAINED_CLASS.fromcdl(*args, **kwargs) @@ -351,13 +330,12 @@ def __init__(self, shape, dtype, path, variable_name, fill_value): @property def ndim(self): - # noqa: D102 return len(self.shape) def __getitem__(self, keys): # Using a DatasetWrapper causes problems with invalid ID's and the - # netCDF4 library, presumably because __getitem__ gets called so many - # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. + # netCDF4 library, presumably because __getitem__ gets called so many + # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. with _GLOBAL_NETCDF4_LOCK: dataset = netCDF4.Dataset(self.path) try: @@ -386,14 +364,11 @@ def __setstate__(self, state): class NetCDFWriteProxy: """ - An object mimicking the data access of a netCDF4.Variable. - - The "opposite" of a NetCDFDataProxy : An object mimicking the data access - of a netCDF4.Variable, but where the data is to be ***written to***. + The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a + netCDF4.Variable, but where the data is to be ***written to***. - It encapsulates the netcdf file and variable which are actually to be - written to. This opens the file each time, to enable writing the data - chunk, then closes it. + It encapsulates the netcdf file and variable which are actually to be written to. + This opens the file each time, to enable writing the data chunk, then closes it. TODO: could be improved with a caching scheme, but this just about works. """ diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 84e04c1589..20d255ea44 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -1,9 +1,11 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ -Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. +Module to support the loading of Iris cubes from NetCDF files, also using the CF +conventions for metadata interpretation. See : `NetCDF User's Guide `_ and `netCDF4 python module `_. @@ -11,12 +13,7 @@ Also : `CF Conventions `_. """ -from collections.abc import Iterable, Mapping -from contextlib import contextmanager -from copy import deepcopy -from enum import Enum, auto -import threading -from typing import Union +from collections.abc import Iterable import warnings import numpy as np @@ -53,15 +50,6 @@ NetCDFDataProxy = _thread_safe_nc.NetCDFDataProxy -class _WarnComboIgnoringBoundsLoad( - iris.exceptions.IrisIgnoringBoundsWarning, - iris.exceptions.IrisLoadWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - def _actions_engine(): # Return an 'actions engine', which provides a pyke-rules-like interface to # the core cf translation code. @@ -149,6 +137,7 @@ def _actions_activation_stats(engine, cf_name): def _set_attributes(attributes, key, value): """Set attributes dictionary, converting unicode strings appropriately.""" + if isinstance(value, str): try: attributes[str(key)] = str(value) @@ -160,8 +149,6 @@ def _set_attributes(attributes, key, value): def _add_unused_attributes(iris_object, cf_var): """ - Populate the attributes of a cf element with the "unused" attributes. - Populate the attributes of a cf element with the "unused" attributes from the associated CF-netCDF variable. That is, all those that aren't CF reserved terms. @@ -172,13 +159,8 @@ def attribute_predicate(item): return item[0] not in _CF_ATTRS tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) - attrs_dict = iris_object.attributes - if hasattr(attrs_dict, "locals"): - # Treat cube attributes (i.e. a CubeAttrsDict) as a special case. - # These attrs are "local" (i.e. on the variable), so record them as such. - attrs_dict = attrs_dict.locals for attr_name, attr_value in tmpvar: - _set_attributes(attrs_dict, attr_name, attr_value) + _set_attributes(iris_object.attributes, attr_name, attr_value) def _get_actual_dtype(cf_var): @@ -209,7 +191,6 @@ def _get_cf_var_data(cf_var, filename): unnecessarily slow + wasteful of memory. """ - global CHUNK_CONTROL if hasattr(cf_var, "_data_array"): # The variable is not an actual netCDF4 file variable, but an emulating # object with an attached data array (either numpy or dask), which can be @@ -226,8 +207,6 @@ def _get_cf_var_data(cf_var, filename): else: # Get lazy chunked data out of a cf variable. - # Creates Dask wrappers around data arrays for any cube components which - # can have lazy values, e.g. Cube, Coord, CellMeasure, AuxiliaryVariable. dtype = _get_actual_dtype(cf_var) # Make a data-proxy that mimics array access and can fetch from the file. @@ -241,59 +220,21 @@ def _get_cf_var_data(cf_var, filename): ) # Get the chunking specified for the variable : this is either a shape, or # maybe the string "contiguous". - if CHUNK_CONTROL.mode is ChunkControl.Modes.AS_DASK: - result = as_lazy_data(proxy, chunks=None, dask_chunking=True) - else: - chunks = cf_var.cf_data.chunking() - # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. - if chunks == "contiguous": - if ( - CHUNK_CONTROL.mode is ChunkControl.Modes.FROM_FILE - and isinstance( - cf_var, iris.fileformats.cf.CFDataVariable - ) - ): - raise KeyError( - f"{cf_var.cf_name} does not contain pre-existing chunk specifications." - f" Instead, you might wish to use CHUNK_CONTROL.set(), or just use default" - f" behaviour outside of a context manager. " - ) - # Equivalent to chunks=None, but value required by chunking control - chunks = list(cf_var.shape) - - # Modify the chunking in the context of an active chunking control. - # N.B. settings specific to this named var override global ('*') ones. - dim_chunks = CHUNK_CONTROL.var_dim_chunksizes.get( - cf_var.cf_name - ) or CHUNK_CONTROL.var_dim_chunksizes.get("*") - dims = cf_var.cf_data.dimensions - if CHUNK_CONTROL.mode is ChunkControl.Modes.FROM_FILE: - dims_fixed = np.ones(len(dims), dtype=bool) - elif not dim_chunks: - dims_fixed = None - else: - # Modify the chunks argument, and pass in a list of 'fixed' dims, for - # any of our dims which are controlled. - dims_fixed = np.zeros(len(dims), dtype=bool) - for i_dim, dim_name in enumerate(dims): - dim_chunksize = dim_chunks.get(dim_name) - if dim_chunksize: - if dim_chunksize == -1: - chunks[i_dim] = cf_var.shape[i_dim] - else: - chunks[i_dim] = dim_chunksize - dims_fixed[i_dim] = True - if dims_fixed is None: - dims_fixed = [dims_fixed] - result = as_lazy_data( - proxy, chunks=chunks, dims_fixed=tuple(dims_fixed) - ) + chunks = cf_var.cf_data.chunking() + # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. + if chunks == "contiguous": + chunks = None + + # Return a dask array providing deferred access. + result = as_lazy_data(proxy, chunks=chunks) + return result class _OrderedAddableList(list): """ A custom container object for actions recording. + Used purely in actions debugging, to accumulate a record of which actions were activated. @@ -316,18 +257,6 @@ def add(self, msg): def _load_cube(engine, cf, cf_var, filename): - global CHUNK_CONTROL - - # Translate dimension chunk-settings specific to this cube (i.e. named by - # it's data-var) into global ones, for the duration of this load. - # Thus, by default, we will create any AuxCoords, CellMeasures et al with - # any per-dimension chunksizes specified for the cube. - these_settings = CHUNK_CONTROL.var_dim_chunksizes.get(cf_var.cf_name, {}) - with CHUNK_CONTROL.set(**these_settings): - return _load_cube_inner(engine, cf, cf_var, filename) - - -def _load_cube_inner(engine, cf, cf_var, filename): from iris.cube import Cube """Create the cube associated with the CF-netCDF data variable.""" @@ -398,7 +327,10 @@ def fix_attributes_all_elements(role_name): def _load_aux_factory(engine, cube): - """Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory.""" + """ + Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory. + + """ formula_type = engine.requires.get("formula_type") if formula_type in [ "atmosphere_sigma_coordinate", @@ -420,8 +352,7 @@ def coord_from_term(term): return coord warnings.warn( "Unable to find coordinate for variable " - "{!r}".format(name), - category=iris.exceptions.IrisFactoryCoordNotFoundWarning, + "{!r}".format(name) ) if formula_type == "atmosphere_sigma_coordinate": @@ -462,10 +393,7 @@ def coord_from_term(term): coord_p0.name() ) ) - warnings.warn( - msg, - category=_WarnComboIgnoringBoundsLoad, - ) + warnings.warn(msg) coord_a = coord_from_term("a") if coord_a is not None: if coord_a.units.is_unknown(): @@ -524,10 +452,9 @@ def _translate_constraints_to_var_callback(constraints): """ Translate load constraints into a simple data-var filter function, if possible. - Returns - ------- - function : (cf_var:CFDataVariable) - bool, or None. + Returns: + * function(cf_var:CFDataVariable): --> bool, + or None. For now, ONLY handles a single NameConstraint with no 'STASH' component. @@ -567,24 +494,25 @@ def inner(cf_datavar): def load_cubes(file_sources, callback=None, constraints=None): """ - Load cubes from a list of NetCDF filenames/OPeNDAP URLs. + Loads cubes from a list of NetCDF filenames/OPeNDAP URLs. - Parameters - ---------- - file_sources : str or list + Args: + + * file_sources (string/list): One or more NetCDF filenames/OPeNDAP URLs to load from. OR open datasets. - callback : function, optional + Kwargs: + + * callback (callable function): Function which can be passed on to :func:`iris.io.run_callback`. - Returns - ------- - Generator of loaded NetCDF :class:`iris.cube.Cube`. + Returns: + Generator of loaded NetCDF :class:`iris.cube.Cube`. """ # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded - # into standard behaviour. + # into standard behaviour. # Deferred import to avoid circular imports. from iris.experimental.ugrid.cf import CFUGridReader from iris.experimental.ugrid.load import ( @@ -656,10 +584,7 @@ def load_cubes(file_sources, callback=None, constraints=None): try: _load_aux_factory(engine, cube) except ValueError as e: - warnings.warn( - "{}".format(e), - category=iris.exceptions.IrisLoadWarning, - ) + warnings.warn("{}".format(e)) # Perform any user registered callback function. cube = run_callback(callback, cube, cf_var, file_source) @@ -669,168 +594,3 @@ def load_cubes(file_sources, callback=None, constraints=None): continue yield cube - - -class ChunkControl(threading.local): - class Modes(Enum): - DEFAULT = auto() - FROM_FILE = auto() - AS_DASK = auto() - - def __init__(self, var_dim_chunksizes=None): - """ - Provide user control of Dask chunking. - - The NetCDF loader is controlled by the single instance of this: the - :data:`~iris.fileformats.netcdf.loader.CHUNK_CONTROL` object. - - A chunk size can be set for a specific (named) file dimension, when - loading specific (named) variables, or for all variables. - - When a selected variable is a CF data-variable, which loads as a - :class:`~iris.cube.Cube`, then the given dimension chunk size is *also* - fixed for all variables which are components of that :class:`~iris.cube.Cube`, - i.e. any :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, - :class:`~iris.coords.AncillaryVariable` etc. - This can be overridden, if required, by variable-specific settings. - - For this purpose, :class:`~iris.experimental.ugrid.mesh.MeshCoord` and - :class:`~iris.experimental.ugrid.mesh.Connectivity` are not - :class:`~iris.cube.Cube` components, and chunk control on a - :class:`~iris.cube.Cube` data-variable will not affect them. - - """ - self.var_dim_chunksizes = var_dim_chunksizes or {} - self.mode = self.Modes.DEFAULT - - @contextmanager - def set( - self, - var_names: Union[str, Iterable[str]] = None, - **dimension_chunksizes: Mapping[str, int], - ) -> None: - """ - Control the Dask chunk sizes applied to NetCDF variables during loading. - - Parameters - ---------- - var_names : str or list of str, default=None - apply the `dimension_chunksizes` controls only to these variables, - or when building :class:`~iris.cube.Cube`\\ s from these data variables. - If ``None``, settings apply to all loaded variables. - dimension_chunksizes : dict of {str: int} - Kwargs specifying chunksizes for dimensions of file variables. - Each key-value pair defines a chunk size for a named file - dimension, e.g. ``{'time': 10, 'model_levels':1}``. - Values of ``-1`` will lock the chunk size to the full size of that - dimension. - - Notes - ----- - This function acts as a context manager, for use in a ``with`` block. - - >>> import iris - >>> from iris.fileformats.netcdf.loader import CHUNK_CONTROL - >>> with CHUNK_CONTROL.set("air_temperature", time=180, latitude=-1): - ... cube = iris.load(iris.sample_data_path("E1_north_america.nc"))[0] - - When `var_names` is present, the chunk size adjustments are applied - only to the selected variables. However, for a CF data variable, this - extends to all components of the (raw) :class:`~iris.cube.Cube` created - from it. - - **Un**-adjusted dimensions have chunk sizes set in the 'usual' way. - That is, according to the normal behaviour of - :func:`iris._lazy_data.as_lazy_data`, which is: chunk size is based on - the file variable chunking, or full variable shape; this is scaled up - or down by integer factors to best match the Dask default chunk size, - i.e. the setting configured by - ``dask.config.set({'array.chunk-size': '250MiB'})``. - - """ - old_mode = self.mode - old_var_dim_chunksizes = deepcopy(self.var_dim_chunksizes) - if var_names is None: - var_names = ["*"] - elif isinstance(var_names, str): - var_names = [var_names] - try: - for var_name in var_names: - # Note: here we simply treat '*' as another name. - # A specific name match should override a '*' setting, but - # that is implemented elsewhere. - if not isinstance(var_name, str): - msg = ( - "'var_names' should be an iterable of strings, " - f"not {var_names!r}." - ) - raise ValueError(msg) - dim_chunks = self.var_dim_chunksizes.setdefault(var_name, {}) - for dim_name, chunksize in dimension_chunksizes.items(): - if not ( - isinstance(dim_name, str) - and isinstance(chunksize, int) - ): - msg = ( - "'dimension_chunksizes' kwargs should be a dict " - f"of `str: int` pairs, not {dimension_chunksizes!r}." - ) - raise ValueError(msg) - dim_chunks[dim_name] = chunksize - yield - finally: - self.var_dim_chunksizes = old_var_dim_chunksizes - self.mode = old_mode - - @contextmanager - def from_file(self) -> None: - """ - Ensures the chunk sizes are loaded in from NetCDF file variables. - - Raises - ------ - KeyError - If any NetCDF data variables - those that become - :class:`~iris.cube.Cube`\\ s - do not specify chunk sizes. - - Notes - ----- - This function acts as a context manager, for use in a ``with`` block. - """ - old_mode = self.mode - old_var_dim_chunksizes = deepcopy(self.var_dim_chunksizes) - try: - self.mode = self.Modes.FROM_FILE - yield - finally: - self.mode = old_mode - self.var_dim_chunksizes = old_var_dim_chunksizes - - @contextmanager - def as_dask(self) -> None: - """ - Relies on Dask :external+dask:doc:`array` to control chunk sizes. - - Notes - ----- - This function acts as a context manager, for use in a ``with`` block. - """ - old_mode = self.mode - old_var_dim_chunksizes = deepcopy(self.var_dim_chunksizes) - try: - self.mode = self.Modes.AS_DASK - yield - finally: - self.mode = old_mode - self.var_dim_chunksizes = old_var_dim_chunksizes - - -# Note: the CHUNK_CONTROL object controls chunk sizing in the -# :meth:`_get_cf_var_data` method. -# N.B. :meth:`_load_cube` also modifies this when loading each cube, -# introducing an additional context in which any cube-specific settings are -# 'promoted' into being global ones. - -#: The global :class:`ChunkControl` object providing user-control of Dask chunking -#: when Iris loads NetCDF files. -CHUNK_CONTROL: ChunkControl = ChunkControl() diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index b0bff313e9..c0cfd3d10b 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1,10 +1,9 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ -Module to support the saving of Iris cubes to a NetCDF file. - Module to support the saving of Iris cubes to a NetCDF file, also using the CF conventions for metadata interpretation. @@ -29,7 +28,6 @@ from dask.delayed import Delayed import numpy as np -from iris._deprecation import warn_deprecated from iris._lazy_data import _co_realise_lazy_arrays, is_lazy_data from iris.aux_factory import ( AtmosphereSigmaFactory, @@ -104,9 +102,6 @@ # UKMO specific attributes that should not be global. _UKMO_DATA_ATTRS = ["STASH", "um_stash_source", "ukmo__process_flags"] -# TODO: whenever we advance to CF-1.11 we should then discuss a completion date -# for the deprecation of Rotated Mercator in coord_systems.py and -# _nc_load_rules/helpers.py . CF_CONVENTIONS_VERSION = "CF-1.7" _FactoryDefn = collections.namedtuple( @@ -162,15 +157,6 @@ } -class _WarnComboMaskSave( - iris.exceptions.IrisMaskValueMatchWarning, - iris.exceptions.IrisSaveWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - class CFNameCoordMap: """Provide a simple CF name to CF coordinate mapping.""" @@ -180,19 +166,19 @@ def __init__(self): self._map = [] def append(self, name, coord): - """Append the given name and coordinate pair to the mapping. + """ + Append the given name and coordinate pair to the mapping. - Parameters - ---------- - name: + Args: + + * name: CF name of the associated coordinate. - coord: + * coord: The coordinate of the associated CF name. - Returns - ------- - None. + Returns: + None. """ self._map.append(CFNameCoordMap._Map(name, coord)) @@ -200,24 +186,26 @@ def append(self, name, coord): @property def names(self): """Return all the CF names.""" + return [pair.name for pair in self._map] @property def coords(self): """Return all the coordinates.""" + return [pair.coord for pair in self._map] def name(self, coord): - """Return the CF name, given a coordinate, or None if not recognised. + """ + Return the CF name, given a coordinate, or None if not recognised. - Parameters - ---------- - coord: + Args: + + * coord: The coordinate of the associated CF name. - Returns - ------- - Coordinate or None. + Returns: + Coordinate or None. """ result = None @@ -228,16 +216,17 @@ def name(self, coord): return result def coord(self, name): - """Return the coordinate, given a CF name, or None if not recognised. + """ + Return the coordinate, given a CF name, or None if not recognised. - Parameters - ---------- - name: + Args: + + * name: CF name of the associated coordinate, or None if not recognised. - Returns - ------- - CF name or None. + Returns: + CF name or None. + """ result = None for pair in self._map: @@ -249,8 +238,6 @@ def coord(self, name): def _bytes_if_ascii(string): """ - Convert string to a byte string (str in py2k, bytes in py3k). - Convert the given string to a byte string (str in py2k, bytes in py3k) if the given string can be encoded to ascii, else maintain the type of the inputted string. @@ -269,8 +256,6 @@ def _bytes_if_ascii(string): def _setncattr(variable, name, attribute): """ - Put the given attribute on the given netCDF4 Data type. - Put the given attribute on the given netCDF4 Data type, casting attributes as we go to bytes rather than unicode. @@ -323,17 +308,12 @@ def _data_fillvalue_check(arraylib, data, check_value): return is_masked, contains_value -class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning): - """Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`.""" - - # TODO: remove at the next major release. +class SaverFillValueWarning(UserWarning): pass def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): """ - Work out whether there was a possible or actual fill-value collision. - From the given information, work out whether there was a possible or actual fill-value collision, and if so construct a warning. @@ -345,12 +325,12 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): whether the data array was masked contains_fill_value : bool whether the data array contained the fill-value - warn : bool, optional + warn : bool if True, also issue any resulting warning immediately. Returns ------- - None or :class:`Warning` + None or :class:`Warning` If not None, indicates a known or possible problem with filling """ @@ -379,10 +359,7 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): ) if warn and result is not None: - warnings.warn( - result, - category=_WarnComboMaskSave, - ) + warnings.warn(result) return result @@ -391,15 +368,15 @@ class Saver: def __init__(self, filename, netcdf_format, compute=True): """ - Manage saving netcdf files. + A manager for saving netcdf files. Parameters ---------- - filename : str or netCDF4.Dataset + filename : string or netCDF4.Dataset Name of the netCDF file to save the cube. OR a writeable object supporting the :class:`netCF4.Dataset` api. - netcdf_format : str + netcdf_format : string Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. @@ -550,53 +527,60 @@ def write( fill_value=None, ): """ - Wrap for saving cubes to a NetCDF file. + Wrapper for saving cubes to a NetCDF file. - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (:class:`iris.cube.Cube`): A :class:`iris.cube.Cube` to be saved to a netCDF file. - local_keys : iterable of str + + Kwargs: + + * local_keys (iterable of strings): An interable of cube attribute keys. Any cube attributes with matching keys will become attributes on the data variable rather than global attributes. - .. Note:: - - Has no effect if :attr:`iris.FUTURE.save_split_attrs` is ``True``. - - unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord` + * unlimited_dimensions (iterable of strings and/or + :class:`iris.coords.Coord` objects): List of coordinate names (or coordinate objects) corresponding to coordinate dimensions of `cube` to save with the NetCDF dimension variable length 'UNLIMITED'. By default, no unlimited dimensions are saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED' dimensions. - zlib : bool + + * zlib (bool): If `True`, the data will be compressed in the netCDF file using gzip compression (default `False`). - complevel : int + + * complevel (int): An integer between 1 and 9 describing the level of compression desired (default 4). Ignored if `zlib=False`. - shuffle : bool + + * shuffle (bool): If `True`, the HDF5 shuffle filter will be applied before compressing the data (default `True`). This significantly improves compression. Ignored if `zlib=False`. - fletcher32 : bool + + * fletcher32 (bool): If `True`, the Fletcher32 HDF5 checksum algorithm is activated to detect errors. Default `False`. - contiguous : bool + + * contiguous (bool): If `True`, the variable data is stored contiguously on disk. Default `False`. Setting to `True` for a variable with an unlimited dimension will trigger an error. - chunksizes : tuple of int + + * chunksizes (tuple of int): Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O - performance is available - `here `__. + performance is available here: + https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html. Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. - endian : str + + * endian (string): Used to control whether the data is stored in little or big endian format on disk. Possible values are 'little', 'big' or 'native' (default). The library will automatically handle endian conversions @@ -604,7 +588,8 @@ def write( on a computer with the opposite format as the one used to create the file, there may be some performance advantage to be gained by setting the endian-ness. - least_significant_digit : int + + * least_significant_digit (int): If `least_significant_digit` is specified, variable data will be truncated (quantized). In conjunction with `zlib=True` this produces 'lossy', but significantly more efficient compression. For @@ -612,16 +597,17 @@ def write( using `numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits` is determined so that a precision of 0.1 is retained (in this case `bits=4`). From - `here `__: + http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml: "least_significant_digit -- power of ten of the smallest decimal place in unpacked data that is a reliable value". Default is `None`, or no quantization, or 'lossless' compression. - packing : type or str or dict or list - A numpy integer datatype (signed or unsigned) or a string that - describes a numpy integer dtype(i.e. 'i2', 'short', 'u4') or a - dict of packing parameters as described below. This provides - support for netCDF data packing as described - `here `__. + + * packing (type or string or dict or list): A numpy integer datatype + (signed or unsigned) or a string that describes a numpy integer + dtype(i.e. 'i2', 'short', 'u4') or a dict of packing parameters as + described below. This provides support for netCDF data packing as + described in + https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values If this argument is a type (or type string), appropriate values of scale_factor and add_offset will be automatically calculated based on `cube.data` and possible masking. For more control, pass a dict @@ -631,25 +617,22 @@ def write( manually using a dict to avoid this. The default is `None`, in which case the datatype is determined from the cube and no packing will occur. - fill_value: + + * fill_value: The value to use for the `_FillValue` attribute on the netCDF variable. If `packing` is specified the value of `fill_value` should be in the domain of the packed data. - Returns - ------- - None. + Returns: + None. - Notes - ----- - The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, - `chunksizes` and `endian` keywords are silently ignored for netCDF - 3 files that do not use HDF5. + .. note:: + + The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, + `chunksizes` and `endian` keywords are silently ignored for netCDF + 3 files that do not use HDF5. """ - # TODO: when iris.FUTURE.save_split_attrs defaults to True, we can deprecate the - # "local_keys" arg, and finally remove it when we finally remove the - # save_split_attrs switch. if unlimited_dimensions is None: unlimited_dimensions = [] @@ -726,23 +709,20 @@ def write( # aux factory in the cube. self._add_aux_factories(cube, cf_var_cube, cube_dimensions) - if not iris.FUTURE.save_split_attrs: - # In the "old" way, we update global attributes as we go. - # Add data variable-only attribute names to local_keys. - if local_keys is None: - local_keys = set() - else: - local_keys = set(local_keys) - local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) - - # Add global attributes taking into account local_keys. - cube_attributes = cube.attributes - global_attributes = { - k: v - for k, v in cube_attributes.items() - if (k not in local_keys and k.lower() != "conventions") - } - self.update_global_attributes(global_attributes) + # Add data variable-only attribute names to local_keys. + if local_keys is None: + local_keys = set() + else: + local_keys = set(local_keys) + local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) + + # Add global attributes taking into account local_keys. + global_attributes = { + k: v + for k, v in cube.attributes.items() + if (k not in local_keys and k.lower() != "conventions") + } + self.update_global_attributes(global_attributes) if cf_profile_available: cf_patch = iris.site_configuration.get("cf_patch") @@ -753,12 +733,10 @@ def write( msg = "cf_profile is available but no {} defined.".format( "cf_patch" ) - warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) + warnings.warn(msg) @staticmethod def check_attribute_compliance(container, data_dtype): - """Check attributte complliance.""" - def _coerce_value(val_attr, val_attr_value, data_dtype): val_attr_tmp = np.array(val_attr_value, dtype=data_dtype) if (val_attr_tmp != val_attr_value).any(): @@ -790,19 +768,16 @@ def _coerce_value(val_attr, val_attr_value, data_dtype): container.attributes[val_attr] = new_val def update_global_attributes(self, attributes=None, **kwargs): - """Update the CF global attributes. - + """ Update the CF global attributes based on the provided iterable/dictionary and/or keyword arguments. - Parameters - ---------- - attributes : dict or iterable of key, value pairs + Args: + + * attributes (dict or iterable of key, value pairs): CF global attributes to be updated. + """ - # TODO: when when iris.FUTURE.save_split_attrs is removed, this routine will - # only be called once: it can reasonably be renamed "_set_global_attributes", - # and the 'kwargs' argument can be removed. if attributes is not None: # Handle sequence e.g. [('fruit', 'apple'), ...]. if not hasattr(attributes, "keys"): @@ -817,18 +792,23 @@ def update_global_attributes(self, attributes=None, **kwargs): def _create_cf_dimensions( self, cube, dimension_names, unlimited_dimensions=None ): - """Create the CF-netCDF data dimensions. + """ + Create the CF-netCDF data dimensions. - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (:class:`iris.cube.Cube`): A :class:`iris.cube.Cube` in which to lookup coordinates. - unlimited_dimensions : iterable of strings and/or :class:`iris.coords.Coord` objects): + + Kwargs: + + * unlimited_dimensions (iterable of strings and/or + :class:`iris.coords.Coord` objects): List of coordinates to make unlimited (None by default). - Returns - ------- - None. + Returns: + None. + """ unlimited_dim_names = [] if unlimited_dimensions is not None: @@ -855,8 +835,6 @@ def _create_cf_dimensions( def _add_mesh(self, cube_or_mesh): """ - Add the cube's mesh, and all related variables to the dataset. - Add the cube's mesh, and all related variables to the dataset. Includes all the mesh-element coordinate and connectivity variables. @@ -865,16 +843,17 @@ def _add_mesh(self, cube_or_mesh): Here, we do *not* add the relevant referencing attributes to the data-variable, because we want to create the data-variable later. - Parameters - ---------- - cube_or_mesh : :class:`iris.cube.Cube`or :class:`iris.experimental.ugrid.Mesh` + Args: + + * cube_or_mesh (:class:`iris.cube.Cube` + or :class:`iris.experimental.ugrid.Mesh`): The Cube or Mesh being saved to the netCDF file. - Returns - ------- - cf_mesh_name : str or None + Returns: + * cf_mesh_name (string or None): The name of the mesh variable created, or None if the cube does not have a mesh. + """ cf_mesh_name = None @@ -992,8 +971,6 @@ def _add_inner_related_vars( self, cube, cf_var_cube, dimension_names, coordlike_elements ): """ - Create a set of variables for aux-coords, ancillaries or cell-measures. - Create a set of variables for aux-coords, ancillaries or cell-measures, and attach them to the parent data variable. @@ -1015,7 +992,7 @@ def _add_inner_related_vars( for element in sorted( coordlike_elements, key=lambda element: element.name() ): - # Reuse, or create, the associated CF-netCDF variable. + # Re-use, or create, the associated CF-netCDF variable. cf_name = self._name_coord_map.name(element) if cf_name is None: # Not already present : create it @@ -1038,16 +1015,17 @@ def _add_inner_related_vars( def _add_aux_coords(self, cube, cf_var_cube, dimension_names): """ - Add aux. coordinate to the dataset and associate with the data variable. + Add aux. coordinate to the dataset and associate with the data variable - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (:class:`iris.cube.Cube`): A :class:`iris.cube.Cube` to be saved to a netCDF file. - cf_var_cube : :class:`netcdf.netcdf_variable` + * cf_var_cube (:class:`netcdf.netcdf_variable`): cf variable cube representation. - dimension_names : list + * dimension_names (list): Names associated with the dimensions of the cube. + """ from iris.experimental.ugrid.mesh import ( Mesh, @@ -1079,16 +1057,17 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names): def _add_cell_measures(self, cube, cf_var_cube, dimension_names): """ - Add cell measures to the dataset and associate with the data variable. + Add cell measures to the dataset and associate with the data variable - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (:class:`iris.cube.Cube`): A :class:`iris.cube.Cube` to be saved to a netCDF file. - cf_var_cube : :class:`netcdf.netcdf_variable` + * cf_var_cube (:class:`netcdf.netcdf_variable`): cf variable cube representation. - dimension_names : list + * dimension_names (list): Names associated with the dimensions of the cube. + """ return self._add_inner_related_vars( cube, @@ -1099,16 +1078,18 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names): def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names): """ - Add ancillary variables measures to the dataset and associate with the data variable. + Add ancillary variables measures to the dataset and associate with the + data variable - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (:class:`iris.cube.Cube`): A :class:`iris.cube.Cube` to be saved to a netCDF file. - cf_var_cube : :class:`netcdf.netcdf_variable` + * cf_var_cube (:class:`netcdf.netcdf_variable`): cf variable cube representation. - dimension_names : list + * dimension_names (list): Names associated with the dimensions of the cube. + """ return self._add_inner_related_vars( cube, @@ -1121,12 +1102,13 @@ def _add_dim_coords(self, cube, dimension_names): """ Add coordinate variables to NetCDF dataset. - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (:class:`iris.cube.Cube`): A :class:`iris.cube.Cube` to be saved to a netCDF file. - dimension_names : list + * dimension_names (list): Names associated with the dimensions of the cube. + """ # Ensure we create the netCDF coordinate variables first. for coord in cube.dim_coords: @@ -1140,20 +1122,19 @@ def _add_dim_coords(self, cube, dimension_names): def _add_aux_factories(self, cube, cf_var_cube, dimension_names): """ - Represent the presence of dimensionless vertical coordinates. - - Modify the variables of the NetCDF dataset to represent + Modifies the variables of the NetCDF dataset to represent the presence of dimensionless vertical coordinates based on the aux factories of the cube (if any). - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (:class:`iris.cube.Cube`): A :class:`iris.cube.Cube` to be saved to a netCDF file. - cf_var_cube: :class:`netcdf.netcdf_variable` + * cf_var_cube (:class:`netcdf.netcdf_variable`) CF variable cube representation. - dimension_names : list + * dimension_names (list): Names associated with the dimensions of the cube. + """ primaries = [] for factory in cube.aux_factories: @@ -1163,7 +1144,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): "Unable to determine formula terms " "for AuxFactory: {!r}".format(factory) ) - warnings.warn(msg, category=iris.exceptions.IrisSaveWarning) + warnings.warn(msg) else: # Override `standard_name`, `long_name`, and `axis` of the # primary coord that signals the presence of a dimensionless @@ -1239,23 +1220,23 @@ def _get_dim_names(self, cube_or_mesh): """ Determine suitable CF-netCDF data dimension names. - Parameters - ---------- - cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh` + Args: + + * cube_or_mesh (:class:`iris.cube.Cube` + or :class:`iris.experimental.ugrid.Mesh`): The Cube or Mesh being saved to the netCDF file. - Returns - ------- - mesh_dimensions : list of str - A list of the mesh dimensions of the attached mesh, if any. - cube_dimensions : list of str - A lists of dimension names for each dimension of the cube + Returns: + mesh_dimensions, cube_dimensions + * mesh_dimensions (list of string): + A list of the mesh dimensions of the attached mesh, if any. + * cube_dimensions (list of string): + A lists of dimension names for each dimension of the cube - Notes - ----- - The returned lists are in the preferred file creation order. - One of the mesh dimensions will typically also appear in the cube - dimensions. + ..note:: + The returned lists are in the preferred file creation order. + One of the mesh dimensions will typically also appear in the cube + dimensions. """ @@ -1263,8 +1244,6 @@ def record_dimension( names_list, dim_name, length, matching_coords=None ): """ - Record a file dimension, its length and associated "coordinates". - Record a file dimension, its length and associated "coordinates" (which may in fact also be connectivities). @@ -1463,17 +1442,16 @@ def record_dimension( @staticmethod def cf_valid_var_name(var_name): - """Return a valid CF var_name given a potentially invalid name. + """ + Return a valid CF var_name given a potentially invalid name. - Parameters - ---------- - var_name : str + Args: + + * var_name (str): The var_name to normalise - Returns - ------- - str - var_name suitable for passing through for variable creation. + Returns: + A var_name suitable for passing through for variable creation. """ # Replace invalid characters with an underscore ("_"). @@ -1488,17 +1466,17 @@ def _cf_coord_standardised_units(coord): """ Determine a suitable units from a given coordinate. - Parameters - ---------- - coord : :class:`iris.coords.Coord` + Args: + + * coord (:class:`iris.coords.Coord`): A coordinate of a cube. - Returns - ------- - units + Returns: The (standard_name, long_name, unit) of the given :class:`iris.coords.Coord` instance. + """ + units = str(coord.units) # Set the 'units' of 'latitude' and 'longitude' coordinates specified # in 'degrees' to 'degrees_north' and 'degrees_east' respectively, @@ -1550,18 +1528,17 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): """ Create the associated CF-netCDF bounds variable. - Parameters - ---------- - coord : :class:`iris.coords.Coord` + Args: + + * coord (:class:`iris.coords.Coord`): A coordinate of a cube. - cf_var: + * cf_var: CF-netCDF variable - cf_name : str + * cf_name (string): name of the CF-NetCDF variable. - Returns - ------- - None + Returns: + None """ if hasattr(coord, "has_bounds") and coord.has_bounds(): @@ -1609,17 +1586,15 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): def _get_cube_variable_name(self, cube): """ - Return a CF-netCDF variable name for the given cube. + Returns a CF-netCDF variable name for the given cube. - Parameters - ---------- - cube : :class:`iris.cube.Cube` + Args: + + * cube (class:`iris.cube.Cube`): An instance of a cube for which a CF-netCDF variable name is required. - Returns - ------- - str + Returns: A CF-netCDF variable name as a string. """ @@ -1634,19 +1609,18 @@ def _get_cube_variable_name(self, cube): def _get_coord_variable_name(self, cube_or_mesh, coord): """ - Return a CF-netCDF variable name for a given coordinate-like element. + Returns a CF-netCDF variable name for a given coordinate-like element. - Parameters - ---------- - cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh` + Args: + + * cube_or_mesh (:class:`iris.cube.Cube` + or :class:`iris.experimental.ugrid.Mesh`): The Cube or Mesh being saved to the netCDF file. - coord : :class:`iris.coords._DimensionalMetadata` + * coord (:class:`iris.coords._DimensionalMetadata`): An instance of a coordinate (or similar), for which a CF-netCDF variable name is required. - Returns - ------- - str + Returns: A CF-netCDF variable name as a string. """ @@ -1697,17 +1671,15 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): def _get_mesh_variable_name(self, mesh): """ - Return a CF-netCDF variable name for the mesh. + Returns a CF-netCDF variable name for the mesh. - Parameters - ---------- - mesh : :class:`iris.experimental.ugrid.mesh.Mesh` + Args: + + * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`): An instance of a Mesh for which a CF-netCDF variable name is required. - Returns - ------- - str + Returns: A CF-netCDF variable name as a string. """ @@ -1726,14 +1698,12 @@ def _create_mesh(self, mesh): """ Create a mesh variable in the netCDF dataset. - Parameters - ---------- - mesh : :class:`iris.experimental.ugrid.mesh.Mesh` + Args: + + * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`): The Mesh to be saved to CF-netCDF file. - Returns - ------- - str + Returns: The string name of the associated CF-netCDF variable saved. """ @@ -1808,8 +1778,6 @@ def _create_generic_cf_array_var( fill_value=None, ): """ - Create theCF-netCDF variable given dimensional_metadata. - Create the associated CF-netCDF variable in the netCDF dataset for the given dimensional_metadata. @@ -1817,32 +1785,33 @@ def _create_generic_cf_array_var( If the metadata element is a coord, it may also contain bounds. In which case, an additional var is created and linked to it. - Parameters - ---------- - cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh` + Args: + + * cube_or_mesh (:class:`iris.cube.Cube` + or :class:`iris.experimental.ugrid.Mesh`): The Cube or Mesh being saved to the netCDF file. - cube_dim_names : list of str + * cube_dim_names (list of string): The name of each dimension of the cube. - element : :class:`iris.coords._DimensionalMetadata` + * element: An Iris :class:`iris.coords._DimensionalMetadata`, belonging to the cube. Provides data, units and standard/long/var names. Not used if 'element_dims' is not None. - element_dims : list of str, or None + * element_dims (list of string, or None): If set, contains the variable dimension (names), otherwise these are taken from `element.cube_dims[cube]`. For Mesh components (element coordinates and connectivities), this *must* be passed in, as "element.cube_dims" does not function. - fill_value : number or None + * fill_value (number or None): If set, create the variable with this fill-value, and fill any masked data points with this value. If not set, standard netcdf4-python behaviour : the variable has no '_FillValue' property, and uses the "standard" fill-value for its type. - Returns - ------- - str - The name of the CF-netCDF variable created. + Returns: + var_name (string): + The name of the CF-netCDF variable created. + """ # Support cube or mesh save. from iris.cube import Cube @@ -1958,17 +1927,16 @@ def _create_cf_cell_methods(self, cube, dimension_names): """ Create CF-netCDF string representation of a cube cell methods. - Parameters - ---------- - cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` + Args: + + * cube (:class:`iris.cube.Cube`) or cubelist + (:class:`iris.cube.CubeList`): A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of cubes to be saved to a netCDF file. - dimension_names : list + * dimension_names (list): Names associated with the dimensions of the cube. - Returns - ------- - str + Returns: CF-netCDF string representation of a cube cell methods. """ @@ -2008,22 +1976,20 @@ def _create_cf_cell_methods(self, cube, dimension_names): def _create_cf_grid_mapping(self, cube, cf_var_cube): """ - Create CF-netCDF grid mapping and associated CF-netCDF variable. - Create CF-netCDF grid mapping variable and associated CF-netCDF data variable grid mapping attribute. - Parameters - ---------- - cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` + Args: + + * cube (:class:`iris.cube.Cube`) or cubelist + (:class:`iris.cube.CubeList`): A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of cubes to be saved to a netCDF file. - cf_var_cube : :class:`netcdf.netcdf_variable` + * cf_var_cube (:class:`netcdf.netcdf_variable`): cf variable cube representation. - Returns - ------- - None + Returns: + None """ cs = cube.coord_system("CoordSystem") @@ -2160,10 +2126,7 @@ def add_ellipsoid(ellipsoid): # osgb (a specific tmerc) elif isinstance(cs, iris.coord_systems.OSGB): - warnings.warn( - "OSGB coordinate system not yet handled", - category=iris.exceptions.IrisSaveWarning, - ) + warnings.warn("OSGB coordinate system not yet handled") # lambert azimuthal equal area elif isinstance( @@ -2227,41 +2190,12 @@ def add_ellipsoid(ellipsoid): ) cf_var_grid.sweep_angle_axis = cs.sweep_angle_axis - # oblique mercator (and rotated variant) - # Use duck-typing over isinstance() - subclasses (i.e. - # RotatedMercator) upset mock tests. - elif ( - getattr(cs, "grid_mapping_name", None) - == "oblique_mercator" - ): - # RotatedMercator subclasses ObliqueMercator, and RM - # instances are implicitly saved as OM due to inherited - # properties. This is correct because CF 1.11 is removing - # all mention of RM. - if cs.ellipsoid: - add_ellipsoid(cs.ellipsoid) - cf_var_grid.azimuth_of_central_line = ( - cs.azimuth_of_central_line - ) - cf_var_grid.latitude_of_projection_origin = ( - cs.latitude_of_projection_origin - ) - cf_var_grid.longitude_of_projection_origin = ( - cs.longitude_of_projection_origin - ) - cf_var_grid.false_easting = cs.false_easting - cf_var_grid.false_northing = cs.false_northing - cf_var_grid.scale_factor_at_projection_origin = ( - cs.scale_factor_at_projection_origin - ) - # other else: warnings.warn( "Unable to represent the horizontal " "coordinate system. The coordinate system " - "type %r is not yet implemented." % type(cs), - category=iris.exceptions.IrisSaveWarning, + "type %r is not yet implemented." % type(cs) ) self._coord_systems.append(cs) @@ -2279,34 +2213,32 @@ def _create_cf_data_variable( **kwargs, ): """ - Create CF-netCDF data variable for the cube and any associated grid mapping. + Create CF-netCDF data variable for the cube and any associated grid + mapping. - # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can - # be removed. + Args: - Parameters - ---------- - cube : :class:`iris.cube.Cube` + * cube (:class:`iris.cube.Cube`): The associated cube being saved to CF-netCDF file. - dimension_names : list + * dimension_names (list): String names for each dimension of the cube. - local_keys : iterable of str, optional - See :func:`iris.fileformats.netcdf.Saver.write` - packing : type or str or dict or list, optional - See :func:`iris.fileformats.netcdf.Saver.write` - fill_value : optional - See :func:`iris.fileformats.netcdf.Saver.write` + + Kwargs: + + * local_keys (iterable of strings): + * see :func:`iris.fileformats.netcdf.Saver.write` + * packing (type or string or dict or list): + * see :func:`iris.fileformats.netcdf.Saver.write` + * fill_value: + * see :func:`iris.fileformats.netcdf.Saver.write` All other keywords are passed through to the dataset's `createVariable` method. - Returns - ------- - The newly created CF-netCDF data variable. + Returns: + The newly created CF-netCDF data variable. """ - # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can - # be removed. # Get the values in a form which is valid for the file format. data = self._ensure_valid_dtype(cube.core_data(), "cube", cube) @@ -2395,20 +2327,16 @@ def set_packing_ncattrs(cfvar): if cube.units.calendar: _setncattr(cf_var, "calendar", cube.units.calendar) - if iris.FUTURE.save_split_attrs: - attr_names = cube.attributes.locals.keys() + # Add data variable-only attribute names to local_keys. + if local_keys is None: + local_keys = set() else: - # Add data variable-only attribute names to local_keys. - if local_keys is None: - local_keys = set() - else: - local_keys = set(local_keys) - local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) - - # Add any cube attributes whose keys are in local_keys as - # CF-netCDF data variable attributes. - attr_names = set(cube.attributes).intersection(local_keys) + local_keys = set(local_keys) + local_keys.update(_CF_DATA_ATTRS, _UKMO_DATA_ATTRS) + # Add any cube attributes whose keys are in local_keys as + # CF-netCDF data variable attributes. + attr_names = set(cube.attributes).intersection(local_keys) for attr_name in sorted(attr_names): # Do not output 'conventions' attribute. if attr_name.lower() == "conventions": @@ -2431,7 +2359,7 @@ def set_packing_ncattrs(cfvar): "attribute, but {attr_name!r} should only be a CF " "global attribute.".format(attr_name=attr_name) ) - warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) + warnings.warn(msg) _setncattr(cf_var, attr_name, value) @@ -2453,14 +2381,13 @@ def _increment_name(self, varname): Avoidance of conflicts between variable names, where the name is incremented to distinguish it from others. - Parameters - ---------- - varname : str + Args: + + * varname (string): Variable name to increment. - Returns - ------- - Incremented varname. + Returns: + Incremented varname. """ num = 0 @@ -2567,19 +2494,18 @@ def store(data, cf_var, fill_info): def delayed_completion(self) -> Delayed: """ - Perform file completion for delayed saves. - - Create and return a :class:`dask.delayed.Delayed` to perform file - completion for delayed saves. + Create and return a :class:`dask.delayed.Delayed` to perform file completion + for delayed saves. - This contains all the delayed writes, which complete the file by - filling out the data of variables initially created empty, and also the - checks for potential fill-value collisions. When computed, it returns - a list of any warnings which were generated in the save operation. + This contains all the delayed writes, which complete the file by filling out + the data of variables initially created empty, and also the checks for + potential fill-value collisions. + When computed, it returns a list of any warnings which were generated in the + save operation. Returns ------- - :class:`dask.delayed.Delayed` + completion : :class:`dask.delayed.Delayed` Notes ----- @@ -2667,9 +2593,7 @@ def complete(self, issue_warnings=True) -> List[Warning]: if issue_warnings: # Issue any delayed warnings from the compute. for delayed_warning in result_warnings: - warnings.warn( - delayed_warning, category=iris.exceptions.IrisSaveWarning - ) + warnings.warn(delayed_warning) return result_warnings @@ -2692,19 +2616,13 @@ def save( fill_value=None, compute=True, ): - r""" + """ Save cube(s) to a netCDF file, given the cube and the filename. * Iris will write CF 1.7 compliant NetCDF files. - * **If split-attribute saving is disabled**, i.e. - :data:`iris.FUTURE`\\ ``.save_split_attrs`` is ``False``, then attributes - dictionaries on each cube in the saved cube list will be compared, and common - attributes saved as NetCDF global attributes where appropriate. - - Or, **when split-attribute saving is enabled**, then ``cube.attributes.locals`` - are always saved as attributes of data-variables, and ``cube.attributes.globals`` - are saved as global (dataset) attributes, where possible. - Since the 2 types are now distinguished : see :class:`~iris.cube.CubeAttrsDict`. + * The attributes dictionaries on each cube in the saved cube list + will be compared and common attributes saved as NetCDF global + attributes where appropriate. * Keyword arguments specifying how to save the data are applied to each cube. To use different settings for different cubes, use the NetCDF Context manager (:class:`~Saver`) directly. @@ -2713,12 +2631,13 @@ def save( status of the cube's data payload, unless the netcdf_format is explicitly specified to be 'NETCDF3' or 'NETCDF3_CLASSIC'. - Parameters - ---------- - cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` + Args: + + * cube (:class:`iris.cube.Cube` or :class:`iris.cube.CubeList`): A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or other iterable of cubes to be saved to a netCDF file. - filename : str + + * filename (string): Name of the netCDF file to save the cube(s). **Or** an open, writeable :class:`netCDF4.Dataset`, or compatible object. @@ -2726,50 +2645,56 @@ def save( When saving to a dataset, ``compute`` **must** be ``False`` : See the ``compute`` parameter. - netcdf_format : str + Kwargs: + + * netcdf_format (string): Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. - local_keys : iterable of str, optional + + * local_keys (iterable of strings): An interable of cube attribute keys. Any cube attributes with matching keys will become attributes on the data variable rather than global attributes. - .. note:: - This is *ignored* if 'split-attribute saving' is **enabled**, - i.e. when ``iris.FUTURE.save_split_attrs`` is ``True``. - - unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord` objects, optional + * unlimited_dimensions (iterable of strings and/or + :class:`iris.coords.Coord` objects): List of coordinate names (or coordinate objects) corresponding to coordinate dimensions of `cube` to save with the NetCDF dimension variable length 'UNLIMITED'. By default, no unlimited dimensions are saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED' dimensions. - zlib : bool, optional + + * zlib (bool): If `True`, the data will be compressed in the netCDF file using gzip compression (default `False`). - complevel : int + + * complevel (int): An integer between 1 and 9 describing the level of compression desired (default 4). Ignored if `zlib=False`. - shuffle : bool, optional + + * shuffle (bool): If `True`, the HDF5 shuffle filter will be applied before compressing the data (default `True`). This significantly improves compression. Ignored if `zlib=False`. - fletcher32 : bool, optional + + * fletcher32 (bool): If `True`, the Fletcher32 HDF5 checksum algorithm is activated to detect errors. Default `False`. - contiguous : bool, optional + + * contiguous (bool): If `True`, the variable data is stored contiguously on disk. Default `False`. Setting to `True` for a variable with an unlimited dimension will trigger an error. - chunksizes : tuple of int, optional + + * chunksizes (tuple of int): Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O performance is - available - `here `__. + available here: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html. Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. - endian : str + + * endian (string): Used to control whether the data is stored in little or big endian format on disk. Possible values are 'little', 'big' or 'native' (default). The library will automatically handle endian conversions @@ -2777,7 +2702,8 @@ def save( computer with the opposite format as the one used to create the file, there may be some performance advantage to be gained by setting the endian-ness. - least_significant_digit : int, optional + + * least_significant_digit (int): If `least_significant_digit` is specified, variable data will be truncated (quantized). In conjunction with `zlib=True` this produces 'lossy', but significantly more efficient compression. For example, if @@ -2785,17 +2711,17 @@ def save( `numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits` is determined so that a precision of 0.1 is retained (in this case `bits=4`). From - + http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml: "least_significant_digit -- power of ten of the smallest decimal place in unpacked data that is a reliable value". Default is `None`, or no quantization, or 'lossless' compression. - packing : type or str or dict or list, optional - A numpy integer datatype (signed or unsigned) or a string that - describes a numpy integer dtype (i.e. 'i2', 'short', 'u4') or a dict - of packing parameters as described below or an iterable of such types, - strings, or dicts. This provides support for netCDF data packing as - described in - `here `__ + + * packing (type or string or dict or list): A numpy integer datatype + (signed or unsigned) or a string that describes a numpy integer dtype + (i.e. 'i2', 'short', 'u4') or a dict of packing parameters as described + below or an iterable of such types, strings, or dicts. + This provides support for netCDF data packing as described in + https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values If this argument is a type (or type string), appropriate values of scale_factor and add_offset will be automatically calculated based on `cube.data` and possible masking. For more control, pass a dict with @@ -2805,16 +2731,18 @@ def save( avoid this. The default is `None`, in which case the datatype is determined from the cube and no packing will occur. If this argument is a list it must have the same number of elements as `cube` if `cube` is - a :class:`iris.cube.CubeList`, or one element, and each element of + a `:class:`iris.cube.CubeList`, or one element, and each element of this argument will be applied to each cube separately. - fill_value : numeric or list, optional + + * fill_value (numeric or list): The value to use for the `_FillValue` attribute on the netCDF variable. If `packing` is specified the value of `fill_value` should be in the domain of the packed data. If this argument is a list it must have the same number of elements as `cube` if `cube` is a - :class:`iris.cube.CubeList`, or a single element, and each element of + `:class:`iris.cube.CubeList`, or a single element, and each element of this argument will be applied to each cube separately. - compute : bool, optional + + * compute (bool): Default is ``True``, meaning complete the file immediately, and return ``None``. When ``False``, create the output file but don't write any lazy array content to @@ -2826,7 +2754,7 @@ def save( .. Note:: when computed, the returned :class:`dask.delayed.Delayed` object returns - a list of :class:`Warning` : These are any warnings which *would* have + a list of :class:`Warning`\\s : These are any warnings which *would* have been issued in the save call, if ``compute`` had been ``True``. .. Note:: @@ -2837,18 +2765,21 @@ def save( must (re-)open the dataset for writing, which will fail if the file is still open for writing by the caller. - Returns - ------- - result: None or dask.delayed.Delayed - If `compute=True`, returns `None`. - Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed - writing to fill in the variables data. + Returns: + result (None, or dask.delayed.Delayed): + If `compute=True`, returns `None`. + Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed + writing to fill in the variables data. - Notes - ----- - The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, - `chunksizes` and `endian` keywords are silently ignored for netCDF 3 - files that do not use HDF5. + .. note:: + + The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`, + `chunksizes` and `endian` keywords are silently ignored for netCDF 3 + files that do not use HDF5. + + .. seealso:: + + NetCDF Context manager (:class:`~Saver`). """ from iris.cube import Cube, CubeList @@ -2862,127 +2793,26 @@ def save( else: cubes = cube - # Decide which cube attributes will be saved as "global" attributes - # NOTE: in 'legacy' mode, when iris.FUTURE.save_split_attrs == False, this code - # section derives a common value for 'local_keys', which is passed to 'Saver.write' - # when saving each input cube. The global attributes are then created by a call - # to "Saver.update_global_attributes" within each 'Saver.write' call (which is - # obviously a bit redundant!), plus an extra one to add 'Conventions'. - # HOWEVER, in `split_attrs` mode (iris.FUTURE.save_split_attrs == False), this code - # instead constructs a 'global_attributes' dictionary, and outputs that just once, - # after writing all the input cubes. - if iris.FUTURE.save_split_attrs: - # We don't actually use 'local_keys' in this case. - # TODO: can remove this when the iris.FUTURE.save_split_attrs is removed. + if local_keys is None: local_keys = set() - - # Find any collisions in the cube global attributes and "demote" all those to - # local attributes (where possible, else warn they are lost). - # N.B. "collision" includes when not all cubes *have* that attribute. - global_names = set() - for cube in cubes: - global_names |= set(cube.attributes.globals.keys()) - - # Fnd any global attributes which are not the same on *all* cubes. - def attr_values_equal(val1, val2): - # An equality test which also works when some values are numpy arrays (!) - # As done in :meth:`iris.common.mixin.LimitedAttributeDict.__eq__`. - match = val1 == val2 - try: - match = bool(match) - except ValueError: - match = match.all() - return match - - cube0 = cubes[0] - invalid_globals = set( - [ - attrname - for attrname in global_names - if not all( - attr_values_equal( - cube.attributes.globals.get(attrname), - cube0.attributes.globals.get(attrname), - ) - for cube in cubes[1:] - ) - ] - ) - - # Establish all the global attributes which we will write to the file (at end). - global_attributes = { - attr: cube0.attributes.globals.get(attr) - for attr in global_names - invalid_globals - } - if invalid_globals: - # Some cubes have different global attributes: modify cubes as required. - warnings.warn( - f"Saving the cube global attributes {sorted(invalid_globals)} as local " - "(i.e. data-variable) attributes, where possible, since they are not " - "the same on all input cubes.", - category=iris.exceptions.IrisSaveWarning, - ) - cubes = cubes.copy() # avoiding modifying the actual input arg. - for i_cube in range(len(cubes)): - # We iterate over cube *index*, so we can replace the list entries with - # with cube *copies* -- just to avoid changing our call args. - cube = cubes[i_cube] - demote_attrs = set(cube.attributes.globals) & invalid_globals - if any(demote_attrs): - # Catch any demoted attrs where there is already a local version - blocked_attrs = demote_attrs & set(cube.attributes.locals) - if blocked_attrs: - warnings.warn( - f"Global cube attributes {sorted(blocked_attrs)} " - f'of cube "{cube.name()}" were not saved, overlaid ' - "by existing local attributes with the same names.", - category=iris.exceptions.IrisSaveWarning, - ) - demote_attrs -= blocked_attrs - if demote_attrs: - # This cube contains some 'demoted' global attributes. - # Replace input cube with a copy, so we can modify attributes. - cube = cube.copy() - cubes[i_cube] = cube - for attr in demote_attrs: - # move global to local - value = cube.attributes.globals.pop(attr) - cube.attributes.locals[attr] = value - else: - # Legacy mode: calculate "local_keys" to control which attributes are local - # and which global. - # TODO: when iris.FUTURE.save_split_attrs is removed, this section can also be - # removed - message = ( - "Saving to netcdf with legacy-style attribute handling for backwards " - "compatibility.\n" - "This mode is deprecated since Iris 3.8, and will eventually be removed.\n" - "Please consider enabling the new split-attributes handling mode, by " - "setting 'iris.FUTURE.save_split_attrs = True'." - ) - warn_deprecated(message) - - if local_keys is None: - local_keys = set() - else: - local_keys = set(local_keys) - - # Determine the attribute keys that are common across all cubes and - # thereby extend the collection of local_keys for attributes - # that should be attributes on data variables. - attributes = cubes[0].attributes - common_keys = set(attributes) - for cube in cubes[1:]: - keys = set(cube.attributes) - local_keys.update(keys.symmetric_difference(common_keys)) - common_keys.intersection_update(keys) - different_value_keys = [] - for key in common_keys: - if np.any(attributes[key] != cube.attributes[key]): - different_value_keys.append(key) - common_keys.difference_update(different_value_keys) - local_keys.update(different_value_keys) + local_keys = set(local_keys) + + # Determine the attribute keys that are common across all cubes and + # thereby extend the collection of local_keys for attributes + # that should be attributes on data variables. + attributes = cubes[0].attributes + common_keys = set(attributes) + for cube in cubes[1:]: + keys = set(cube.attributes) + local_keys.update(keys.symmetric_difference(common_keys)) + common_keys.intersection_update(keys) + different_value_keys = [] + for key in common_keys: + if np.any(attributes[key] != cube.attributes[key]): + different_value_keys.append(key) + common_keys.difference_update(different_value_keys) + local_keys.update(different_value_keys) def is_valid_packspec(p): """Only checks that the datatype is valid.""" @@ -3081,15 +2911,10 @@ def is_valid_packspec(p): msg = "cf_profile is available but no {} defined.".format( "cf_patch_conventions" ) - warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) + warnings.warn(msg) # Add conventions attribute. - if iris.FUTURE.save_split_attrs: - # In the "new way", we just create all the global attributes at once. - global_attributes["Conventions"] = conventions - sman.update_global_attributes(global_attributes) - else: - sman.update_global_attributes(Conventions=conventions) + sman.update_global_attributes(Conventions=conventions) if compute: # No more to do, since we used Saver(compute=True). diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index d4e86502bd..6f39ca87fa 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Provides NIMROD file format capabilities.""" import glob diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 7347135422..fd1ccb0e95 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Rules for converting NIMROD fields into cubes.""" import re @@ -15,11 +16,7 @@ import iris import iris.coord_systems from iris.coords import DimCoord -from iris.exceptions import ( - CoordinateNotFoundError, - IrisNimrodTranslationWarning, - TranslationError, -) +from iris.exceptions import CoordinateNotFoundError, TranslationError __all__ = ["run"] @@ -31,12 +28,7 @@ ) -class TranslationWarning(IrisNimrodTranslationWarning): - """ - Backwards compatible form of :class:`iris.exceptions.IrisNimrodTranslationWarning`. - """ - - # TODO: remove at the next major release. +class TranslationWarning(Warning): pass @@ -189,8 +181,7 @@ def units(cube, field): warnings.warn( "Unhandled units '{0}' recorded in cube attributes.".format( field_units - ), - category=IrisNimrodTranslationWarning, + ) ) cube.attributes["invalid_units"] = field_units @@ -426,8 +417,7 @@ def coord_system(field, handle_metadata_errors): if any([is_missing(field, v) for v in crs_args]): warnings.warn( "Coordinate Reference System is not completely defined. " - "Plotting and reprojection may be impaired.", - category=IrisNimrodTranslationWarning, + "Plotting and reprojection may be impaired." ) coord_sys = iris.coord_systems.TransverseMercator( *crs_args, iris.coord_systems.GeogCS(**ellipsoid) @@ -549,7 +539,7 @@ def vertical_coord(cube, field): f"{field.vertical_coord_type} != {field.reference_vertical_coord_type}. " f"Assuming {field.vertical_coord_type}" ) - warnings.warn(msg, category=IrisNimrodTranslationWarning) + warnings.warn(msg) coord_point = field.vertical_coord if coord_point == 8888.0: @@ -596,7 +586,7 @@ def vertical_coord(cube, field): warnings.warn( "Vertical coord {!r} not yet handled" "".format(field.vertical_coord_type), - category=TranslationWarning, + TranslationWarning, ) @@ -841,8 +831,7 @@ def probability_coord(cube, field, handle_metadata_errors): ) warnings.warn( f"No default units for {coord_name} coord of {cube.name()}. " - "Meta-data may be incomplete.", - category=IrisNimrodTranslationWarning, + "Meta-data may be incomplete." ) new_coord = iris.coords.AuxCoord( np.array(coord_val, dtype=np.float32), bounds=bounds, **coord_keys diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 4b2b7eeae0..65e0e16d72 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides UK Met Office Post Process (PP) format specific capabilities. @@ -26,7 +27,6 @@ from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data import iris.config import iris.coord_systems -import iris.exceptions # NOTE: this is for backwards-compatitibility *ONLY* # We could simply remove it for v2.0 ? @@ -220,33 +220,6 @@ } -class _WarnComboLoadingMask( - iris.exceptions.IrisLoadWarning, - iris.exceptions.IrisMaskValueMatchWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - -class _WarnComboLoadingDefaulting( - iris.exceptions.IrisDefaultingWarning, - iris.exceptions.IrisLoadWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - -class _WarnComboIgnoringLoad( - iris.exceptions.IrisIgnoringWarning, - iris.exceptions.IrisLoadWarning, -): - """One-off combination of warning classes - enhances user filtering.""" - - pass - - class STASH(collections.namedtuple("STASH", "model section item")): """ A class to hold a single STASH code. @@ -1192,10 +1165,7 @@ def save(self, file_handle): "missing data. To save these as normal values, please " "set the field BMDI not equal to any valid data points." ) - warnings.warn( - msg.format(mdi), - category=_WarnComboLoadingMask, - ) + warnings.warn(msg.format(mdi)) if isinstance(data, ma.MaskedArray): if ma.is_masked(data): data = data.filled(fill_value=mdi) @@ -1320,8 +1290,7 @@ def save(self, file_handle): warnings.warn( "Downcasting array precision from float64 to float32" " for save.If float64 precision is required then" - " please save in a different format", - category=_WarnComboLoadingDefaulting, + " please save in a different format" ) data = data.astype(">f4") lb[self.HEADER_DICT["lbuser"][0]] = 1 @@ -1763,8 +1732,7 @@ def _interpret_fields(fields): warnings.warn( "Landmask compressed fields existed without a " "landmask to decompress with. The data will have " - "a shape of (0, 0) and will not read.", - category=iris.exceptions.IrisLoadWarning, + "a shape of (0, 0) and will not read." ) mask_shape = (0, 0) else: @@ -1933,10 +1901,7 @@ def _field_gen(filename, read_data_bytes, little_ended=False): "Unable to interpret field {}. {}. Skipping " "the remainder of the file.".format(field_count, str(e)) ) - warnings.warn( - msg, - category=_WarnComboIgnoringLoad, - ) + warnings.warn(msg) break # Skip the trailing 4-byte word containing the header length @@ -1956,8 +1921,7 @@ def _field_gen(filename, read_data_bytes, little_ended=False): warnings.warn( wmsg.format( pp_field.lblrec * PP_WORD_DEPTH, len_of_data_plus_extra - ), - category=_WarnComboIgnoringLoad, + ) ) break diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 1aed25311d..11d03e978a 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # Historically this was auto-generated from diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 9effba3c0a..0369fc9fd0 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import warnings @@ -9,7 +10,6 @@ import iris from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.exceptions import IrisPpClimModifiedWarning from iris.fileformats._ff_cross_references import STASH_TRANS from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP from iris.fileformats.rules import ( @@ -614,7 +614,7 @@ def _non_std_cross_section_rules(cube, pp): def _lbproc_rules(cube, pp): """ - Rules for setting the processing code of the PP field. + Rules for setting the horizontal grid and pole location of the PP field. Note: `pp.lbproc` must be set to 0 before these rules are run. @@ -844,10 +844,7 @@ def _vertical_rules(cube, pp): def _all_other_rules(cube, pp): """ - Fields currently managed by these rules: - - * lbfc (field code) - * lbrsvd[3] (ensemble member number) + Rules for setting the horizontal grid and pole location of the PP field. Args: cube: the cube being saved as a series of PP fields. @@ -862,18 +859,13 @@ def _all_other_rules(cube, pp): if check_items in CF_TO_LBFC: pp.lbfc = CF_TO_LBFC[check_items] - # Set field code. + # Set STASH code. if ( "STASH" in cube.attributes and str(cube.attributes["STASH"]) in STASH_TRANS ): pp.lbfc = STASH_TRANS[str(cube.attributes["STASH"])].field_code - # Set ensemble member number. - real_coord = scalar_coord(cube, "realization") - if real_coord is not None: - pp.lbrsvd[3] = real_coord.points[0] - return pp @@ -898,4 +890,4 @@ def verify(cube, field): def _conditional_warning(condition, warning): if condition: - warnings.warn(warning, category=IrisPpClimModifiedWarning) + warnings.warn(warning) diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index bcfd4f8323..707fd58757 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Generalised mechanisms for metadata translation and cube construction. @@ -46,8 +47,7 @@ def as_cube(self): src_cubes = src_cubes.merge(unique=False) if len(src_cubes) > 1: warnings.warn( - "Multiple reference cubes for {}".format(self.name), - category=iris.exceptions.IrisUserWarning, + "Multiple reference cubes for {}".format(self.name) ) src_cube = src_cubes[-1] @@ -329,7 +329,7 @@ def _make_cube(field, converter): cube.units = metadata.units except ValueError: msg = "Ignoring PP invalid units {!r}".format(metadata.units) - warnings.warn(msg, category=iris.exceptions.IrisIgnoringWarning) + warnings.warn(msg) cube.attributes["invalid_units"] = metadata.units cube.units = cf_units._UNKNOWN_UNIT_STRING @@ -350,10 +350,7 @@ def _resolve_factory_references( except _ReferenceError as e: msg = "Unable to create instance of {factory}. " + str(e) factory_name = factory.factory_class.__name__ - warnings.warn( - msg.format(factory=factory_name), - category=iris.exceptions.IrisUserWarning, - ) + warnings.warn(msg.format(factory=factory_name)) else: aux_factory = factory.factory_class(*args) cube.add_aux_factory(aux_factory) diff --git a/lib/iris/fileformats/um/__init__.py b/lib/iris/fileformats/um/__init__.py index ac38e45de5..c01e8301e2 100644 --- a/lib/iris/fileformats/um/__init__.py +++ b/lib/iris/fileformats/um/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides iris loading support for UM Fieldsfile-like file types, and PP. diff --git a/lib/iris/fileformats/um/_fast_load.py b/lib/iris/fileformats/um/_fast_load.py index ce9d183586..e29025c169 100644 --- a/lib/iris/fileformats/um/_fast_load.py +++ b/lib/iris/fileformats/um/_fast_load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Support for "fast" loading of structured UM files in iris load functions, i.e. :meth:`iris.load` and its associates. diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index 2a41cf99ba..64b7f8e891 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Code for fast loading of structured UM data. diff --git a/lib/iris/fileformats/um/_ff_replacement.py b/lib/iris/fileformats/um/_ff_replacement.py index 33ab2fbb68..0a661081c7 100644 --- a/lib/iris/fileformats/um/_ff_replacement.py +++ b/lib/iris/fileformats/um/_ff_replacement.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Support for UM "fieldsfile-like" files. diff --git a/lib/iris/fileformats/um/_optimal_array_structuring.py b/lib/iris/fileformats/um/_optimal_array_structuring.py index 3fd892808b..2793d47187 100644 --- a/lib/iris/fileformats/um/_optimal_array_structuring.py +++ b/lib/iris/fileformats/um/_optimal_array_structuring.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """A module to provide an optimal array structure calculation.""" diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index d2e51a3257..b93b192bbd 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides UM/CF phenomenon translations. diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 87725789e5..4e5004ff10 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -1,9 +1,12 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Provides an interface to manage URI scheme support in iris. -"""Provides an interface to manage URI scheme support in iris.""" +""" import collections from collections import OrderedDict @@ -37,27 +40,29 @@ def __setitem__(self, key, value): def run_callback(callback, cube, field, filename): """ - Run the callback mechanism given the appropriate arguments. + Runs the callback mechanism given the appropriate arguments. - Parameters - ---------- - callback : + Args: + + * callback: A function to add metadata from the originating field and/or URI which obeys the following rules: - 1. Function signature must be: ``(cube, field, filename)``. - 2. Modifies the given cube inplace, unless a new cube is - returned by the function. - 3. If the cube is to be rejected the callback must raise - an :class:`iris.exceptions.IgnoreCubeException`. + 1. Function signature must be: ``(cube, field, filename)``. + 2. Modifies the given cube inplace, unless a new cube is + returned by the function. + 3. If the cube is to be rejected the callback must raise + an :class:`iris.exceptions.IgnoreCubeException`. - Notes - ----- - It is possible that this function returns None for certain callbacks, - the caller of this function should handle this case. + .. note:: - This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + It is possible that this function returns None for certain callbacks, + the caller of this function should handle this case. + + .. note:: + + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ from iris.cube import Cube @@ -84,7 +89,7 @@ def run_callback(callback, cube, field, filename): def decode_uri(uri, default="file"): r""" - Decode a single URI into scheme and scheme-specific parts. + Decodes a single URI into scheme and scheme-specific parts. In addition to well-formed URIs, it also supports bare file paths as strings or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are @@ -96,26 +101,25 @@ def decode_uri(uri, default="file"): from iris.io import * - Examples - -------- - >>> from iris.io import decode_uri - >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b')) - ('http', '//www.thing.com:8080/resource?id=a:b') + Examples: + >>> from iris.io import decode_uri + >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b')) + ('http', '//www.thing.com:8080/resource?id=a:b') - >>> print(decode_uri('file:///data/local/dataZoo/...')) - ('file', '///data/local/dataZoo/...') + >>> print(decode_uri('file:///data/local/dataZoo/...')) + ('file', '///data/local/dataZoo/...') - >>> print(decode_uri('/data/local/dataZoo/...')) - ('file', '/data/local/dataZoo/...') + >>> print(decode_uri('/data/local/dataZoo/...')) + ('file', '/data/local/dataZoo/...') - >>> print(decode_uri('file:///C:\data\local\dataZoo\...')) - ('file', '///C:\\data\\local\\dataZoo\\...') + >>> print(decode_uri('file:///C:\data\local\dataZoo\...')) + ('file', '///C:\\data\\local\\dataZoo\\...') - >>> print(decode_uri('C:\data\local\dataZoo\...')) - ('file', 'C:\\data\\local\\dataZoo\\...') + >>> print(decode_uri('C:\data\local\dataZoo\...')) + ('file', 'C:\\data\\local\\dataZoo\\...') - >>> print(decode_uri('dataZoo/...')) - ('file', 'dataZoo/...') + >>> print(decode_uri('dataZoo/...')) + ('file', 'dataZoo/...') >>> print(decode_uri({})) ('data', {}) @@ -153,7 +157,7 @@ def expand_filespecs(file_specs, files_expected=True): ---------- file_specs : iterable of str File paths which may contain ``~`` elements or wildcards. - files_expected : bool, optional, default=True + files_expected : bool, default=True Whether file is expected to exist (i.e. for load). Returns @@ -202,16 +206,14 @@ def expand_filespecs(file_specs, files_expected=True): def load_files(filenames, callback, constraints=None): """ - Create a generator of Cubes from given files. - - Take a list of filenames which may also be globs, and optionally a + Takes a list of filenames which may also be globs, and optionally a constraint set and a callback function, and returns a generator of Cubes from the given files. - Notes - ----- - Typically, this function should not be called directly; instead, the - intended interface for loading is :func:`iris.load`. + .. note:: + + Typically, this function should not be called directly; instead, the + intended interface for loading is :func:`iris.load`. """ from iris.fileformats import FORMAT_AGENT @@ -242,15 +244,13 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): """ - Create generator of Cubes from the given OPeNDAP URLs. - - Take a list of OPeNDAP URLs and a callback function, and returns a generator + Takes a list of OPeNDAP URLs and a callback function, and returns a generator of Cubes from the given URLs. - Notes - ----- - Typically, this function should not be called directly; instead, the - intended interface for loading is :func:`iris.load`. + .. note:: + + Typically, this function should not be called directly; instead, the + intended interface for loading is :func:`iris.load`. """ # @@ -277,8 +277,8 @@ def load_http(urls, callback): def load_data_objects(urls, callback): """ - Take a list of data-source objects and a callback function, returns a generator of Cubes. - + Takes a list of data-source objects and a callback function, and returns a + generator of Cubes. The 'objects' take the place of 'uris' in the load calls. The appropriate types of the data-source objects are expected to be recognised by the handlers : This is done in the usual way by passing the @@ -346,16 +346,12 @@ def add_saver(file_extension, new_saver): """ Add a custom saver to the Iris session. - Parameters - ---------- - file_extension : str - A string such as "pp" or "my_format". - new_saver : function - A function of the form ``my_saver(cube, target)``. + Args: - See Also - -------- - :func:`iris.io.save` + * file_extension: A string such as "pp" or "my_format". + * new_saver: A function of the form ``my_saver(cube, target)``. + + See also :func:`iris.io.save` """ # Make sure it's a func with 2+ args @@ -373,16 +369,14 @@ def find_saver(filespec): """ Find the saver function appropriate to the given filename or extension. - Parameters - ---------- - filespec : str - A string such as "my_file.pp" or "PP". + Args: - Returns - ------- - Save function - Save functions can be passed to :func:`iris.io.save`. Value may also - be None. + * filespec + A string such as "my_file.pp" or "PP". + + Returns: + A save function or None. + Save functions can be passed to :func:`iris.io.save`. """ _check_init_savers() @@ -407,12 +401,12 @@ def save(source, target, saver=None, **kwargs): Iris currently supports three file formats for saving, which it can recognise by filename extension: - * netCDF - the Unidata network Common Data Format: - * see :func:`iris.fileformats.netcdf.save` - * GRIB2 - the WMO GRIdded Binary data format: - * see :func:`iris_grib.save_grib2`. - * PP - the Met Office UM Post Processing Format: - * see :func:`iris.fileformats.pp.save` + * netCDF - the Unidata network Common Data Format: + * see :func:`iris.fileformats.netcdf.save` + * GRIB2 - the WMO GRIdded Binary data format: + * see :func:`iris_grib.save_grib2`. + * PP - the Met Office UM Post Processing Format: + * see :func:`iris.fileformats.pp.save` A custom saver can be provided to the function to write to a different file format. @@ -476,7 +470,8 @@ def save(source, target, saver=None, **kwargs): >>> iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC") Notes - ----- + ------ + This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index da64345cf3..9def0ada98 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -1,12 +1,12 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ -Provide convenient file format identification. +A module to provide convenient file format identification through a combination of filename extension +and file based *magic* numbers. -A module to provide convenient file format identification through a combination -of filename extension and file based *magic* numbers. To manage a collection of FormatSpecifications for loading:: @@ -25,11 +25,9 @@ with open(png_filename, 'rb') as png_fh: handling_spec = fagent.get_spec(png_filename, png_fh) -In the example, handling_spec will now be the png_spec previously added to the -agent. +In the example, handling_spec will now be the png_spec previously added to the agent. -Now that a specification has been found, if a handler has been given with the -specification, then the file can be handled:: +Now that a specification has been found, if a handler has been given with the specification, then the file can be handled:: handler = handling_spec.handler if handler is None: @@ -37,8 +35,8 @@ else: result = handler(filename) -The calling sequence of handler is dependent on the function given in the -original specification and can be customised to your project's needs. +The calling sequence of handler is dependent on the function given in the original specification and can be customised to your project's needs. + """ @@ -50,14 +48,10 @@ class FormatAgent: """ - Identifies format of a given file by interrogating its children instances. - - The FormatAgent class is the containing object which is responsible for - identifying the format of a given file by interrogating its children - FormatSpecification instances. + The FormatAgent class is the containing object which is responsible for identifying the format of a given file + by interrogating its children FormatSpecification instances. - Typically a FormatAgent will be created empty and then extended with the - :meth:`FormatAgent.add_spec` method:: + Typically a FormatAgent will be created empty and then extended with the :meth:`FormatAgent.add_spec` method:: agent = FormatAgent() agent.add_spec(NetCDF_specification) @@ -69,11 +63,12 @@ class FormatAgent: """ def __init__(self, format_specs=None): + """ """ self._format_specs = list(format_specs or []) self._format_specs.sort() def add_spec(self, format_spec): - """Add a FormatSpecification instance to this agent for format.""" + """Add a FormatSpecification instance to this agent for format consideration.""" self._format_specs.append(format_spec) self._format_specs.sort() @@ -88,22 +83,15 @@ def __str__(self): def get_spec(self, basename, buffer_obj): """ - Pick the first FormatSpecification. - Pick the first FormatSpecification which can handle the given filename and file/buffer object. - Parameters - ---------- - basename : TBD - buffer_obj : TBD + .. note:: - Notes - ----- - ``buffer_obj`` may be ``None`` when a seekable file handle is not - feasible (such as over the http protocol). In these cases only the - format specifications which do not require a file handle are - tested. + ``buffer_obj`` may be ``None`` when a seekable file handle is not + feasible (such as over the http protocol). In these cases only the + format specifications which do not require a file handle are + tested. """ element_cache = {} @@ -158,10 +146,8 @@ class FormatSpecification: """ Provides the base class for file type definition. - Every FormatSpecification instance has a name which can be accessed with - the :attr:`FormatSpecification.name` property and a FileElement, such as - filename extension or 32-bit magic number, with an associated value for - format identification. + Every FormatSpecification instance has a name which can be accessed with the :attr:`FormatSpecification.name` property and + a FileElement, such as filename extension or 32-bit magic number, with an associated value for format identification. """ @@ -175,26 +161,20 @@ def __init__( constraint_aware_handler=False, ): """ - Construct a new FormatSpecification. - - Parameters - ---------- - format_name : str - string name of fileformat being described - file_element : - FileElement instance of the element which identifies this - FormatSpecification - file_element_value : - The value that the file_element should take if a file matches this - FormatSpecification - handler : optional - function which will be called when the specification has been - identified and is required to handler a format. If None, then the - file can still be identified but no handling can be done. - priority: int - Integer giving a priority for considering this specification where - higher priority means sooner consideration. - constraint_aware_handler: optional, default=False + Constructs a new FormatSpecification given the format_name and particular FileElements + + Args: + + * format_name - string name of fileformat being described + * file_element - FileElement instance of the element which identifies this FormatSpecification + * file_element_value - The value that the file_element should take if a file matches this FormatSpecification + + Kwargs: + + * handler - function which will be called when the specification has been identified and is required to handler a format. + If None, then the file can still be identified but no handling can be done. + * priority - Integer giving a priority for considering this specification where higher priority means sooner consideration. + """ if not isinstance(file_element, FileElement): raise ValueError( @@ -210,29 +190,26 @@ def __init__( self.constraint_aware_handler = constraint_aware_handler def __hash__(self): - # Hashed by specification for consistent ordering in FormatAgent - # (including self._handler in this hash for example would order - # randomly according to object id) + # Hashed by specification for consistent ordering in FormatAgent (including self._handler in this hash + # for example would order randomly according to object id) return hash(self._file_element) @property def file_element(self): - # noqa D102 return self._file_element @property def file_element_value(self): - # noqa D102 return self._file_element_value @property def name(self): - """The name of this FileFormat. (Read only).""" + """The name of this FileFormat. (Read only)""" return self._format_name @property def handler(self): - """The handler function of this FileFormat. (Read only).""" + """The handler function of this FileFormat. (Read only)""" return self._handler def _sort_key(self): @@ -254,8 +231,7 @@ def __ne__(self, other): return not (self == other) def __repr__(self): - # N.B. loader is not always going to provide a nice repr if it is a - # lambda function, hence a prettier version is available in __str__ + # N.B. loader is not always going to provide a nice repr if it is a lambda function, hence a prettier version is available in __str__ return "FormatSpecification(%r, %r, %r, handler=%r, priority=%s)" % ( self._format_name, self._file_element, @@ -274,27 +250,23 @@ def __str__(self): class FileElement: """ - Represents a specific aspect of a FileFormat. - - Represents a specific aspect of a FileFormat which can be identified using - the given element getter function. + Represents a specific aspect of a FileFormat which can be identified using the given element getter function. """ def __init__(self, requires_fh=True): """ - Construct a new file element, which may require a file buffer. + Constructs a new file element, which may require a file buffer. - Parameters - ---------- - requires_fh : optional - Whether this FileElement needs a file buffer. + Kwargs: + + * requires_fh - Whether this FileElement needs a file buffer. """ self.requires_fh = requires_fh def get_element(self, basename, file_handle): - """Identify the element of a file that this FileElement is representing.""" + """Called when identifying the element of a file that this FileElement is representing.""" raise NotImplementedError("get_element must be defined in a subclass") def __hash__(self): @@ -315,7 +287,6 @@ def __init__(self, num_bytes, offset=None): self._offset = offset def get_element(self, basename, file_handle): - # noqa D102 if self._offset is not None: file_handle.seek(self._offset) bytes = file_handle.read(self._num_bytes) @@ -336,7 +307,6 @@ class FileExtension(FileElement): """A :class:`FileElement` that returns the extension from the filename.""" def get_element(self, basename, file_handle): - # noqa D102 return os.path.splitext(basename)[1] @@ -344,14 +314,11 @@ class LeadingLine(FileElement): """A :class:`FileElement` that returns the first line from the file.""" def get_element(self, basename, file_handle): - # noqa: D102 return file_handle.readline() class UriProtocol(FileElement): """ - Return the scheme and part from a URI, using :func:`~iris.io.decode_uri`. - A :class:`FileElement` that returns the "scheme" and "part" from a URI, using :func:`~iris.io.decode_uri`. @@ -361,7 +328,6 @@ def __init__(self): FileElement.__init__(self, requires_fh=False) def get_element(self, basename, file_handle): - # noqa: D102 from iris.io import decode_uri return decode_uri(basename)[0] @@ -380,10 +346,7 @@ def __init__(self): super().__init__(requires_fh=False) def get_element(self, basename, file_handle): - """ - In this context, there should *not* be a file opened by the handler. - - Just return 'basename', which in this case is not a name, or even a - string, but a passed 'data object'. - """ + # In this context, there should *not* be a file opened by the handler. + # Just return 'basename', which in this case is not a name, or even a + # string, but a passed 'data object'. return basename diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index cd950828be..cf16c9cbe6 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Cube functions for iteration in step. @@ -13,8 +14,6 @@ import numpy as np -from iris.exceptions import IrisUserWarning - __all__ = ["izip"] @@ -165,8 +164,7 @@ def izip(*cubes, **kwargs): warnings.warn( "Iterating over coordinate '%s' in step whose " "definitions match but whose values " - "differ." % coord_a.name(), - category=IrisUserWarning, + "differ." % coord_a.name() ) return _ZipSlicesIterator( diff --git a/lib/iris/palette.py b/lib/iris/palette.py index f640cf5687..3ba17ffc97 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Load, configure and register color map palettes and initialise color map meta-data mappings. diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 535bed3a64..4d6681e94e 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provide conversion to and from Pandas data structures. @@ -28,7 +29,6 @@ from iris._deprecation import warn_deprecated from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube, CubeList -from iris.exceptions import IrisIgnoringWarning def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): @@ -82,7 +82,7 @@ def _add_iris_coord(cube, name, points, dim, calendar=None): Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array. """ # Most functionality has been abstracted to _get_dimensional_metadata, - # allowing reuse in as_cube() and as_cubes(). + # allowing re-use in as_cube() and as_cubes(). coord = _get_dimensional_metadata(name, points, calendar) if coord.__class__ == DimCoord: @@ -398,7 +398,7 @@ def as_cubes( cube_shape = getattr(pandas_index, "levshape", (pandas_index.nunique(),)) n_rows = len(pandas_structure) - if np.prod(cube_shape) > n_rows: + if np.product(cube_shape) > n_rows: message = ( f"Not all index values have a corresponding row - {n_rows} rows " f"cannot be reshaped into {cube_shape}. Consider padding with NaN " @@ -446,7 +446,7 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): if columns_ignored: ignored_args = ", ".join([t[2] for t in class_arg_mapping]) message = f"The input pandas_structure is a Series; ignoring arguments: {ignored_args} ." - warnings.warn(message, category=IrisIgnoringWarning) + warnings.warn(message) class_arg_mapping = [] non_data_names = [] @@ -896,7 +896,7 @@ def merge_metadata(meta_var_list): "'iris.FUTURE.pandas_ndim = True'. More info is in the " "documentation." ) - warnings.warn(message, category=FutureWarning) + warnings.warn(message, FutureWarning) # The legacy behaviour. data = cube.data diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 977cbbcfc2..ebcb5c3bcb 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` interface. @@ -21,6 +22,7 @@ import matplotlib.animation as animation import matplotlib.axes import matplotlib.collections as mpl_collections +import matplotlib.dates as mpl_dates from matplotlib.offsetbox import AnchoredText import matplotlib.pyplot as plt import matplotlib.ticker as mpl_ticker @@ -32,7 +34,7 @@ import iris.coord_systems import iris.coords import iris.cube -from iris.exceptions import IrisError, IrisUnsupportedPlottingWarning +from iris.exceptions import IrisError # Importing iris.palette to register the brewer palettes. import iris.palette @@ -452,6 +454,10 @@ def _draw_2d_from_bounds(draw_method_name, cube, *args, **kwargs): else: values = coord.contiguous_bounds() values = _fixup_dates(coord, values) + if values.dtype == np.dtype(object) and isinstance( + values[0], datetime.datetime + ): + values = mpl_dates.date2num(values) plot_arrays.append(values) @@ -551,6 +557,10 @@ def _draw_2d_from_points(draw_method_name, arg_func, cube, *args, **kwargs): ) plot_arrays.append(np.arange(values.size)) string_axes[axis_name] = values + elif values.dtype == np.dtype(object) and isinstance( + values[0], datetime.datetime + ): + plot_arrays.append(mpl_dates.date2num(values)) else: plot_arrays.append(values) @@ -2013,7 +2023,7 @@ def update_animation_iris(i, cubes, vmin, vmax, coords): "use: {}." ) msg = msg.format(plot_func.__module__, supported) - warnings.warn(msg, category=IrisUnsupportedPlottingWarning) + warnings.warn(msg, UserWarning) supported = ["contour", "contourf", "pcolor", "pcolormesh"] if plot_func.__name__ not in supported: @@ -2022,7 +2032,7 @@ def update_animation_iris(i, cubes, vmin, vmax, coords): "use: {}." ) msg = msg.format(plot_func.__name__, supported) - warnings.warn(msg, category=IrisUnsupportedPlottingWarning) + warnings.warn(msg, UserWarning) # Determine plot range. vmin = kwargs.pop("vmin", min([cc.data.min() for cc in cubes])) diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 15f4cf11e2..9209d4b3b7 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ High-level plotting extensions to :mod:`iris.plot`. @@ -13,7 +14,9 @@ """ import cf_units +from matplotlib import __version__ as _mpl_version import matplotlib.pyplot as plt +from packaging import version import iris.config import iris.coords @@ -41,11 +44,18 @@ def _title(cube_or_coord, with_units): units.is_unknown() or units.is_no_unit() or units == cf_units.Unit("1") - or units.is_time_reference() ): if _use_symbol(units): units = units.symbol + elif units.is_time_reference(): + # iris.plot uses matplotlib.dates.date2num, which is fixed to the below unit. + if version.parse(_mpl_version) >= version.parse("3.3"): + days_since = "1970-01-01" + else: + days_since = "0001-01-01" + units = "days since {}".format(days_since) title += " / {}".format(units) + return title @@ -107,8 +117,10 @@ def _label_with_points(cube, result=None, ndims=2, coords=None, axes=None): def _get_titles(u_object, v_object): if u_object is None: u_object = iplt._u_object_from_v_object(v_object) - xlabel = _title(u_object, with_units=True) - ylabel = _title(v_object, with_units=True) + xunits = u_object is not None and not u_object.units.is_time_reference() + yunits = not v_object.units.is_time_reference() + xlabel = _title(u_object, with_units=xunits) + ylabel = _title(v_object, with_units=yunits) title = "" if u_object is None: title = _title(v_object, with_units=False) diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index 7bbbca83a9..3d00d3bb3b 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Contains symbol definitions for use with :func:`iris.plot.symbols`. diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 83fdb6af89..5529b899c5 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Provides testing capabilities and customisations specific to Iris. diff --git a/lib/iris/tests/experimental/__init__.py b/lib/iris/tests/experimental/__init__.py index d31931720c..fa2390c45b 100644 --- a/lib/iris/tests/experimental/__init__.py +++ b/lib/iris/tests/experimental/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Experimental code is tested in this package. diff --git a/lib/iris/tests/experimental/regrid/__init__.py b/lib/iris/tests/experimental/regrid/__init__.py index 6837b12e91..653505e3d5 100644 --- a/lib/iris/tests/experimental/regrid/__init__.py +++ b/lib/iris/tests/experimental/regrid/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Regridding code is tested in this package. diff --git a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 9190548b15..07961a319a 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test area weighted regridding. @@ -601,20 +602,6 @@ def test_circular_subset(self): @tests.skip_data def test_non_circular_subset(self): - """ - Test regridding behaviour when the source grid has circular latitude. - - This tests the specific case when the longitude coordinate of the - source grid has the `circular` attribute as `False` but otherwise spans - the full 360 degrees. - - Note: the previous behaviour was to always mask target cells when they - spanned the boundary of max/min longitude and `circular` was `False`, - however this has been changed so that such cells will only be masked - when there is a gap between max longitude and min longitude. In this - test these cells are expected to be unmasked and therefore the result - will be equal to the above test for circular longitudes. - """ src = iris.tests.stock.global_pp() src.coord("latitude").guess_bounds() src.coord("longitude").guess_bounds() @@ -633,53 +620,9 @@ def test_non_circular_subset(self): dest.add_dim_coord(dest_lat, 0) dest.add_dim_coord(dest_lon, 1) - res = regrid_area_weighted(src, dest) - self.assertArrayShapeStats(res, (40, 7), 285.653960, 15.212710) - - @tests.skip_data - def test__proper_non_circular_subset(self): - """ - Test regridding behaviour when the source grid has circular latitude. - - This tests the specific case when the longitude coordinate of the - source grid does not span the full 360 degrees. Target cells which span - the boundary of max/min longitude will contain a section which is out - of bounds from the source grid and are therefore expected to be masked. - """ - src = iris.tests.stock.global_pp() - src.coord("latitude").guess_bounds() - src.coord("longitude").guess_bounds() - src_lon_bounds = src.coord("longitude").bounds.copy() - # Leave a small gap between the first and last longitude value. - src_lon_bounds[0, 0] += 0.001 - src_lon = src.coord("longitude").copy( - points=src.coord("longitude").points, bounds=src_lon_bounds - ) - src.remove_coord("longitude") - src.add_dim_coord(src_lon, 1) - dest_lat = src.coord("latitude")[0:40] - dest_lon = iris.coords.DimCoord( - [-15.0, -10.0, -5.0, 0.0, 5.0, 10.0, 15.0], - standard_name="longitude", - units="degrees", - coord_system=dest_lat.coord_system, - ) - # Note target grid (in -180 to 180) src in 0 to 360 - dest_lon.guess_bounds() - data = np.zeros((dest_lat.shape[0], dest_lon.shape[0])) - dest = iris.cube.Cube(data) - dest.add_dim_coord(dest_lat, 0) - dest.add_dim_coord(dest_lon, 1) - res = regrid_area_weighted(src, dest) self.assertArrayShapeStats(res, (40, 7), 285.550814, 15.190245) - # The target cells straddling the gap between min and max source - # longitude should be masked. - expected_mask = np.zeros(res.shape) - expected_mask[:, 3] = 1 - assert np.array_equal(expected_mask, res.data.mask) - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py index 2c7bad59ff..467c040eb3 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`. diff --git a/lib/iris/tests/experimental/test_raster.py b/lib/iris/tests/experimental/test_raster.py index 736263f196..ffd03e6f4d 100644 --- a/lib/iris/tests/experimental/test_raster.py +++ b/lib/iris/tests/experimental/test_raster.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import iris.tests as tests # isort:skip import PIL.Image diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 3c440264f9..5ee555cb6e 100755 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # !/usr/bin/env python """ Contains Iris graphic testing utilities diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 1c29d4e551..a355f2cf82 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # !/usr/bin/env python """ Provides "diff-like" comparison of images. @@ -27,7 +28,6 @@ from matplotlib.testing.exceptions import ImageComparisonFailure # noqa import matplotlib.widgets as mwidget # noqa -from iris.exceptions import IrisIgnoringWarning # noqa import iris.tests # noqa import iris.tests.graphics as graphics # noqa @@ -73,12 +73,17 @@ def diff_viewer( repo = graphics.read_repo_json() def accept(event): - repo[test_id] = phash - graphics.write_repo_json(repo) - out_file = result_dir / (test_id + ".png") - result_path.rename(out_file) - msg = f"ACCEPTED: {result_path.name} -> {out_file.name}" - print(msg) + if test_id not in repo: + repo[test_id] = phash + graphics.write_repo_json(repo) + out_file = result_dir / (test_id + ".png") + result_path.rename(out_file) + msg = f"ACCEPTED: {result_path.name} -> {out_file.name}" + print(msg) + else: + msg = f"DUPLICATE: {result_path.name} -> {expected_path.name} (ignored)" + print(msg) + result_path.unlink() diff_fname.unlink() plt.close() @@ -151,7 +156,7 @@ def step_over_diffs(result_dir, display=True): distance = graphics.get_phash(reference_image_path) - phash except FileNotFoundError: wmsg = "Ignoring unregistered test result {!r}." - warnings.warn(wmsg.format(test_key), category=IrisIgnoringWarning) + warnings.warn(wmsg.format(test_key)) continue processed = True diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index cd4c83e9b1..02ddaad2cb 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # !/usr/bin/env python """ Updates imagerepo.json based on the baseline images diff --git a/lib/iris/tests/integration/__init__.py b/lib/iris/tests/integration/__init__.py index 29a99030dd..71b911cbb0 100644 --- a/lib/iris/tests/integration/__init__.py +++ b/lib/iris/tests/integration/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for the :mod:`iris` package.""" diff --git a/lib/iris/tests/integration/analysis/__init__.py b/lib/iris/tests/integration/analysis/__init__.py index 4ec86f2d5a..20b6250b70 100644 --- a/lib/iris/tests/integration/analysis/__init__.py +++ b/lib/iris/tests/integration/analysis/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for the :mod:`iris.analysis` package.""" diff --git a/lib/iris/tests/integration/analysis/test_area_weighted.py b/lib/iris/tests/integration/analysis/test_area_weighted.py index 49c80d7bba..d01da79a56 100644 --- a/lib/iris/tests/integration/analysis/test_area_weighted.py +++ b/lib/iris/tests/integration/analysis/test_area_weighted.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for area weighted regridding.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/attrs_matrix_results_load.json b/lib/iris/tests/integration/attrs_matrix_results_load.json deleted file mode 100644 index a1d37708a9..0000000000 --- a/lib/iris/tests/integration/attrs_matrix_results_load.json +++ /dev/null @@ -1,1019 +0,0 @@ -{ - "case_singlevar_localonly": { - "input": "G-La", - "localstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "G-La" - ] - }, - "globalstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "G-La" - ] - }, - "userstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "G-La" - ] - } - }, - "case_singlevar_globalonly": { - "input": "GaL-", - "localstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "GaL-" - ] - }, - "globalstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "GaL-" - ] - }, - "userstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "GaL-" - ] - } - }, - "case_singlevar_glsame": { - "input": "GaLa", - "localstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "GaLa" - ] - }, - "globalstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "GaLa" - ] - }, - "userstyle": { - "legacy": [ - "G-La" - ], - "newstyle": [ - "GaLa" - ] - } - }, - "case_singlevar_gldiffer": { - "input": "GaLb", - "localstyle": { - "legacy": [ - "G-Lb" - ], - "newstyle": [ - "GaLb" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lb" - ], - "newstyle": [ - "GaLb" - ] - }, - "userstyle": { - "legacy": [ - "G-Lb" - ], - "newstyle": [ - "GaLb" - ] - } - }, - "case_multivar_same_noglobal": { - "input": "G-Laa", - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-Laa" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-Laa" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-Laa" - ] - } - }, - "case_multivar_same_sameglobal": { - "input": "GaLaa", - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLaa" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLaa" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLaa" - ] - } - }, - "case_multivar_same_diffglobal": { - "input": "GaLbb", - "localstyle": { - "legacy": [ - "G-Lbb" - ], - "newstyle": [ - "GaLbb" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lbb" - ], - "newstyle": [ - "GaLbb" - ] - }, - "userstyle": { - "legacy": [ - "G-Lbb" - ], - "newstyle": [ - "GaLbb" - ] - } - }, - "case_multivar_differ_noglobal": { - "input": "G-Lab", - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lab" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lab" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lab" - ] - } - }, - "case_multivar_differ_diffglobal": { - "input": "GaLbc", - "localstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLbc" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLbc" - ] - }, - "userstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLbc" - ] - } - }, - "case_multivar_differ_sameglobal": { - "input": "GaLab", - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLab" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLab" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLab" - ] - } - }, - "case_multivar_1none_noglobal": { - "input": "G-La-", - "localstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-La-" - ] - }, - "globalstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-La-" - ] - }, - "userstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-La-" - ] - } - }, - "case_multivar_1none_diffglobal": { - "input": "GaLb-", - "localstyle": { - "legacy": [ - "G-Lba" - ], - "newstyle": [ - "GaLb-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lba" - ], - "newstyle": [ - "GaLb-" - ] - }, - "userstyle": { - "legacy": [ - "G-Lba" - ], - "newstyle": [ - "GaLb-" - ] - } - }, - "case_multivar_1none_sameglobal": { - "input": "GaLa-", - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLa-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLa-" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLa-" - ] - } - }, - "case_multisource_gsame_lnone": { - "input": [ - "GaL-", - "GaL-" - ], - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaL--" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaL--" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaL--" - ] - } - }, - "case_multisource_gsame_lallsame": { - "input": [ - "GaLa", - "GaLa" - ], - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLaa" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLaa" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLaa" - ] - } - }, - "case_multisource_gsame_l1same1none": { - "input": [ - "GaLa", - "GaL-" - ], - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLa-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLa-" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "GaLa-" - ] - } - }, - "case_multisource_gsame_l1same1other": { - "input": [ - "GaLa", - "GaLb" - ], - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLab" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLab" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLab" - ] - } - }, - "case_multisource_gsame_lallother": { - "input": [ - "GaLb", - "GaLb" - ], - "localstyle": { - "legacy": [ - "G-Lbb" - ], - "newstyle": [ - "GaLbb" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lbb" - ], - "newstyle": [ - "GaLbb" - ] - }, - "userstyle": { - "legacy": [ - "G-Lbb" - ], - "newstyle": [ - "GaLbb" - ] - } - }, - "case_multisource_gsame_lalldiffer": { - "input": [ - "GaLb", - "GaLc" - ], - "localstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLbc" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLbc" - ] - }, - "userstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLbc" - ] - } - }, - "case_multisource_gnone_l1one1none": { - "input": [ - "G-La", - "G-L-" - ], - "localstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-La-" - ] - }, - "globalstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-La-" - ] - }, - "userstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-La-" - ] - } - }, - "case_multisource_gnone_l1one1same": { - "input": [ - "G-La", - "G-La" - ], - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-Laa" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-Laa" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-Laa" - ] - } - }, - "case_multisource_gnone_l1one1other": { - "input": [ - "G-La", - "G-Lb" - ], - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lab" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lab" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lab" - ] - } - }, - "case_multisource_g1none_lnone": { - "input": [ - "GaL-", - "G-L-" - ], - "localstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-L-", - "GaL-" - ] - }, - "globalstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-L-", - "GaL-" - ] - }, - "userstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-L-", - "GaL-" - ] - } - }, - "case_multisource_g1none_l1same1none": { - "input": [ - "GaLa", - "G-L-" - ], - "localstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-L-", - "GaLa" - ] - }, - "globalstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-L-", - "GaLa" - ] - }, - "userstyle": { - "legacy": [ - "G-La-" - ], - "newstyle": [ - "G-L-", - "GaLa" - ] - } - }, - "case_multisource_g1none_l1none1same": { - "input": [ - "GaL-", - "G-La" - ], - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-La", - "GaL-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-La", - "GaL-" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-La", - "GaL-" - ] - } - }, - "case_multisource_g1none_l1diff1none": { - "input": [ - "GaLb", - "G-L-" - ], - "localstyle": { - "legacy": [ - "G-Lb-" - ], - "newstyle": [ - "G-L-", - "GaLb" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lb-" - ], - "newstyle": [ - "G-L-", - "GaLb" - ] - }, - "userstyle": { - "legacy": [ - "G-Lb-" - ], - "newstyle": [ - "G-L-", - "GaLb" - ] - } - }, - "case_multisource_g1none_l1none1diff": { - "input": [ - "GaL-", - "G-Lb" - ], - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lb", - "GaL-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lb", - "GaL-" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "G-Lb", - "GaL-" - ] - } - }, - "case_multisource_g1none_lallsame": { - "input": [ - "GaLa", - "G-La" - ], - "localstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-La", - "GaLa" - ] - }, - "globalstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-La", - "GaLa" - ] - }, - "userstyle": { - "legacy": [ - "G-Laa" - ], - "newstyle": [ - "G-La", - "GaLa" - ] - } - }, - "case_multisource_g1none_lallother": { - "input": [ - "GaLc", - "G-Lc" - ], - "localstyle": { - "legacy": [ - "G-Lcc" - ], - "newstyle": [ - "G-Lc", - "GaLc" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lcc" - ], - "newstyle": [ - "G-Lc", - "GaLc" - ] - }, - "userstyle": { - "legacy": [ - "G-Lcc" - ], - "newstyle": [ - "G-Lc", - "GaLc" - ] - } - }, - "case_multisource_gdiff_lnone": { - "input": [ - "GaL-", - "GbL-" - ], - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaL-", - "GbL-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaL-", - "GbL-" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaL-", - "GbL-" - ] - } - }, - "case_multisource_gdiff_l1same1none": { - "input": [ - "GaLa", - "GbL-" - ], - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLa", - "GbL-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLa", - "GbL-" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLa", - "GbL-" - ] - } - }, - "case_multisource_gdiff_l1diff1none": { - "input": [ - "GaLb", - "GcL-" - ], - "localstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLb", - "GcL-" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLb", - "GcL-" - ] - }, - "userstyle": { - "legacy": [ - "G-Lbc" - ], - "newstyle": [ - "GaLb", - "GcL-" - ] - } - }, - "case_multisource_gdiff_lallsame": { - "input": [ - "GaLa", - "GbLb" - ], - "localstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLa", - "GbLb" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLa", - "GbLb" - ] - }, - "userstyle": { - "legacy": [ - "G-Lab" - ], - "newstyle": [ - "GaLa", - "GbLb" - ] - } - }, - "case_multisource_gdiff_lallother": { - "input": [ - "GaLc", - "GbLc" - ], - "localstyle": { - "legacy": [ - "G-Lcc" - ], - "newstyle": [ - "GaLc", - "GbLc" - ] - }, - "globalstyle": { - "legacy": [ - "G-Lcc" - ], - "newstyle": [ - "GaLc", - "GbLc" - ] - }, - "userstyle": { - "legacy": [ - "G-Lcc" - ], - "newstyle": [ - "GaLc", - "GbLc" - ] - } - } -} \ No newline at end of file diff --git a/lib/iris/tests/integration/attrs_matrix_results_roundtrip.json b/lib/iris/tests/integration/attrs_matrix_results_roundtrip.json deleted file mode 100644 index 3446c7f312..0000000000 --- a/lib/iris/tests/integration/attrs_matrix_results_roundtrip.json +++ /dev/null @@ -1,983 +0,0 @@ -{ - "case_singlevar_localonly": { - "input": "G-La", - "localstyle": { - "unsplit": [ - "G-La" - ], - "split": [ - "G-La" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "G-La" - ] - }, - "userstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "G-La" - ] - } - }, - "case_singlevar_globalonly": { - "input": "GaL-", - "localstyle": { - "unsplit": [ - "G-La" - ], - "split": [ - "GaL-" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaL-" - ] - }, - "userstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaL-" - ] - } - }, - "case_singlevar_glsame": { - "input": "GaLa", - "localstyle": { - "unsplit": [ - "G-La" - ], - "split": [ - "GaLa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaLa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaLa" - ] - } - }, - "case_singlevar_gldiffer": { - "input": "GaLb", - "localstyle": { - "unsplit": [ - "G-Lb" - ], - "split": [ - "GaLb" - ] - }, - "globalstyle": { - "unsplit": [ - "GbL-" - ], - "split": [ - "GaLb" - ] - }, - "userstyle": { - "unsplit": [ - "GbL-" - ], - "split": [ - "GaLb" - ] - } - }, - "case_multivar_same_noglobal": { - "input": "G-Laa", - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multivar_same_sameglobal": { - "input": "GaLaa", - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLaa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - } - }, - "case_multivar_same_diffglobal": { - "input": "GaLbb", - "localstyle": { - "unsplit": [ - "G-Lbb" - ], - "split": [ - "GaLbb" - ] - }, - "globalstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - }, - "userstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - } - }, - "case_multivar_differ_noglobal": { - "input": "G-Lab", - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multivar_differ_diffglobal": { - "input": "GaLbc", - "localstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - } - }, - "case_multivar_differ_sameglobal": { - "input": "GaLab", - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - } - }, - "case_multivar_1none_noglobal": { - "input": "G-La-", - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multivar_1none_diffglobal": { - "input": "GaLb-", - "localstyle": { - "unsplit": [ - "G-Lba" - ], - "split": [ - "GaLb-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lba" - ], - "split": [ - "GaLb-" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lba" - ], - "split": [ - "GaLb-" - ] - } - }, - "case_multivar_1none_sameglobal": { - "input": "GaLa-", - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLa-" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - } - }, - "case_multisource_gsame_lnone": { - "input": [ - "GaL-", - "GaL-" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaL--" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaL--" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaL--" - ] - } - }, - "case_multisource_gsame_lallsame": { - "input": [ - "GaLa", - "GaLa" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLaa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - } - }, - "case_multisource_gsame_l1same1none": { - "input": [ - "GaLa", - "GaL-" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLa-" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - } - }, - "case_multisource_gsame_l1same1other": { - "input": [ - "GaLa", - "GaLb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - } - }, - "case_multisource_gsame_lallother": { - "input": [ - "GaLb", - "GaLb" - ], - "localstyle": { - "unsplit": [ - "G-Lbb" - ], - "split": [ - "GaLbb" - ] - }, - "globalstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - }, - "userstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - } - }, - "case_multisource_gsame_lalldiffer": { - "input": [ - "GaLb", - "GaLc" - ], - "localstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - } - }, - "case_multisource_gnone_l1one1none": { - "input": [ - "G-La", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multisource_gnone_l1one1same": { - "input": [ - "G-La", - "G-La" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multisource_gnone_l1one1other": { - "input": [ - "G-La", - "G-Lb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_g1none_lnone": { - "input": [ - "GaL-", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multisource_g1none_l1same1none": { - "input": [ - "GaLa", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multisource_g1none_l1none1same": { - "input": [ - "GaL-", - "G-La" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multisource_g1none_l1diff1none": { - "input": [ - "GaLb", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-Lb-" - ], - "split": [ - "G-Lb-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lb-" - ], - "split": [ - "G-Lb-" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lb-" - ], - "split": [ - "G-Lb-" - ] - } - }, - "case_multisource_g1none_l1none1diff": { - "input": [ - "GaL-", - "G-Lb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_g1none_lallsame": { - "input": [ - "GaLa", - "G-La" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multisource_g1none_lallother": { - "input": [ - "GaLc", - "G-Lc" - ], - "localstyle": { - "unsplit": [ - "G-Lcc" - ], - "split": [ - "G-Lcc" - ] - }, - "globalstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - }, - "userstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - } - }, - "case_multisource_gdiff_lnone": { - "input": [ - "GaL-", - "GbL-" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_gdiff_l1same1none": { - "input": [ - "GaLa", - "GbL-" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_gdiff_l1diff1none": { - "input": [ - "GaLb", - "GcL-" - ], - "localstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "G-Lbc" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "G-Lbc" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "G-Lbc" - ] - } - }, - "case_multisource_gdiff_lallsame": { - "input": [ - "GaLa", - "GbLb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_gdiff_lallother": { - "input": [ - "GaLc", - "GbLc" - ], - "localstyle": { - "unsplit": [ - "G-Lcc" - ], - "split": [ - "G-Lcc" - ] - }, - "globalstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - }, - "userstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - } - } -} \ No newline at end of file diff --git a/lib/iris/tests/integration/attrs_matrix_results_save.json b/lib/iris/tests/integration/attrs_matrix_results_save.json deleted file mode 100644 index 3446c7f312..0000000000 --- a/lib/iris/tests/integration/attrs_matrix_results_save.json +++ /dev/null @@ -1,983 +0,0 @@ -{ - "case_singlevar_localonly": { - "input": "G-La", - "localstyle": { - "unsplit": [ - "G-La" - ], - "split": [ - "G-La" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "G-La" - ] - }, - "userstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "G-La" - ] - } - }, - "case_singlevar_globalonly": { - "input": "GaL-", - "localstyle": { - "unsplit": [ - "G-La" - ], - "split": [ - "GaL-" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaL-" - ] - }, - "userstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaL-" - ] - } - }, - "case_singlevar_glsame": { - "input": "GaLa", - "localstyle": { - "unsplit": [ - "G-La" - ], - "split": [ - "GaLa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaLa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL-" - ], - "split": [ - "GaLa" - ] - } - }, - "case_singlevar_gldiffer": { - "input": "GaLb", - "localstyle": { - "unsplit": [ - "G-Lb" - ], - "split": [ - "GaLb" - ] - }, - "globalstyle": { - "unsplit": [ - "GbL-" - ], - "split": [ - "GaLb" - ] - }, - "userstyle": { - "unsplit": [ - "GbL-" - ], - "split": [ - "GaLb" - ] - } - }, - "case_multivar_same_noglobal": { - "input": "G-Laa", - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multivar_same_sameglobal": { - "input": "GaLaa", - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLaa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - } - }, - "case_multivar_same_diffglobal": { - "input": "GaLbb", - "localstyle": { - "unsplit": [ - "G-Lbb" - ], - "split": [ - "GaLbb" - ] - }, - "globalstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - }, - "userstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - } - }, - "case_multivar_differ_noglobal": { - "input": "G-Lab", - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multivar_differ_diffglobal": { - "input": "GaLbc", - "localstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - } - }, - "case_multivar_differ_sameglobal": { - "input": "GaLab", - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - } - }, - "case_multivar_1none_noglobal": { - "input": "G-La-", - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multivar_1none_diffglobal": { - "input": "GaLb-", - "localstyle": { - "unsplit": [ - "G-Lba" - ], - "split": [ - "GaLb-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lba" - ], - "split": [ - "GaLb-" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lba" - ], - "split": [ - "GaLb-" - ] - } - }, - "case_multivar_1none_sameglobal": { - "input": "GaLa-", - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLa-" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - } - }, - "case_multisource_gsame_lnone": { - "input": [ - "GaL-", - "GaL-" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaL--" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaL--" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaL--" - ] - } - }, - "case_multisource_gsame_lallsame": { - "input": [ - "GaLa", - "GaLa" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLaa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLaa" - ] - } - }, - "case_multisource_gsame_l1same1none": { - "input": [ - "GaLa", - "GaL-" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "GaLa-" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "GaLa-" - ] - } - }, - "case_multisource_gsame_l1same1other": { - "input": [ - "GaLa", - "GaLb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "GaLab" - ] - } - }, - "case_multisource_gsame_lallother": { - "input": [ - "GaLb", - "GaLb" - ], - "localstyle": { - "unsplit": [ - "G-Lbb" - ], - "split": [ - "GaLbb" - ] - }, - "globalstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - }, - "userstyle": { - "unsplit": [ - "GbL--" - ], - "split": [ - "GaLbb" - ] - } - }, - "case_multisource_gsame_lalldiffer": { - "input": [ - "GaLb", - "GaLc" - ], - "localstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "GaLbc" - ] - } - }, - "case_multisource_gnone_l1one1none": { - "input": [ - "G-La", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multisource_gnone_l1one1same": { - "input": [ - "G-La", - "G-La" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multisource_gnone_l1one1other": { - "input": [ - "G-La", - "G-Lb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_g1none_lnone": { - "input": [ - "GaL-", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multisource_g1none_l1same1none": { - "input": [ - "GaLa", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - }, - "userstyle": { - "unsplit": [ - "G-La-" - ], - "split": [ - "G-La-" - ] - } - }, - "case_multisource_g1none_l1none1same": { - "input": [ - "GaL-", - "G-La" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multisource_g1none_l1diff1none": { - "input": [ - "GaLb", - "G-L-" - ], - "localstyle": { - "unsplit": [ - "G-Lb-" - ], - "split": [ - "G-Lb-" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lb-" - ], - "split": [ - "G-Lb-" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lb-" - ], - "split": [ - "G-Lb-" - ] - } - }, - "case_multisource_g1none_l1none1diff": { - "input": [ - "GaL-", - "G-Lb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_g1none_lallsame": { - "input": [ - "GaLa", - "G-La" - ], - "localstyle": { - "unsplit": [ - "G-Laa" - ], - "split": [ - "G-Laa" - ] - }, - "globalstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - }, - "userstyle": { - "unsplit": [ - "GaL--" - ], - "split": [ - "G-Laa" - ] - } - }, - "case_multisource_g1none_lallother": { - "input": [ - "GaLc", - "G-Lc" - ], - "localstyle": { - "unsplit": [ - "G-Lcc" - ], - "split": [ - "G-Lcc" - ] - }, - "globalstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - }, - "userstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - } - }, - "case_multisource_gdiff_lnone": { - "input": [ - "GaL-", - "GbL-" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_gdiff_l1same1none": { - "input": [ - "GaLa", - "GbL-" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_gdiff_l1diff1none": { - "input": [ - "GaLb", - "GcL-" - ], - "localstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "G-Lbc" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "G-Lbc" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lbc" - ], - "split": [ - "G-Lbc" - ] - } - }, - "case_multisource_gdiff_lallsame": { - "input": [ - "GaLa", - "GbLb" - ], - "localstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "globalstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - }, - "userstyle": { - "unsplit": [ - "G-Lab" - ], - "split": [ - "G-Lab" - ] - } - }, - "case_multisource_gdiff_lallother": { - "input": [ - "GaLc", - "GbLc" - ], - "localstyle": { - "unsplit": [ - "G-Lcc" - ], - "split": [ - "G-Lcc" - ] - }, - "globalstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - }, - "userstyle": { - "unsplit": [ - "GcL--" - ], - "split": [ - "G-Lcc" - ] - } - } -} \ No newline at end of file diff --git a/lib/iris/tests/integration/aux_factory/__init__.py b/lib/iris/tests/integration/aux_factory/__init__.py index 3ee14d5add..58ba6fb82b 100644 --- a/lib/iris/tests/integration/aux_factory/__init__.py +++ b/lib/iris/tests/integration/aux_factory/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for the :mod:`iris.aux_factory` package.""" diff --git a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py index 15f65d52ad..4b2464b272 100644 --- a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integratation tests for the `iris.aux_factory.OceanSigmaZFactory` class. diff --git a/lib/iris/tests/integration/concatenate/__init__.py b/lib/iris/tests/integration/concatenate/__init__.py index d15b201abe..fb136098ee 100644 --- a/lib/iris/tests/integration/concatenate/__init__.py +++ b/lib/iris/tests/integration/concatenate/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for the :mod:`iris._concatenate` package.""" diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py index 9bd6bcb0c5..2543e2931b 100644 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ b/lib/iris/tests/integration/concatenate/test_concatenate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integration tests for concatenating cubes with differing time coord epochs using :func:`iris.util.unify_time_units`. diff --git a/lib/iris/tests/integration/experimental/__init__.py b/lib/iris/tests/integration/experimental/__init__.py index 79722df7a3..269cf3dd9a 100644 --- a/lib/iris/tests/integration/experimental/__init__.py +++ b/lib/iris/tests/integration/experimental/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for the :mod:`iris.experimental` package.""" diff --git a/lib/iris/tests/integration/experimental/test_CubeRepresentation.py b/lib/iris/tests/integration/experimental/test_CubeRepresentation.py index 0c1386d59f..48a3e51b52 100644 --- a/lib/iris/tests/integration/experimental/test_CubeRepresentation.py +++ b/lib/iris/tests/integration/experimental/test_CubeRepresentation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for cube html representation.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py index 4ae48fe6f9..742adc8c15 100644 --- a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py +++ b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for experimental regridding.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py index 1bd39695ec..af97458ded 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integration tests for NetCDF-UGRID file loading. @@ -15,10 +16,8 @@ from collections.abc import Iterable -import pytest - from iris import Constraint, load -from iris.exceptions import IrisCfWarning +from iris.experimental.ugrid import logger from iris.experimental.ugrid.load import ( PARSE_UGRID_ON_LOAD, load_mesh, @@ -169,8 +168,8 @@ def create_synthetic_file(self, **create_kwargs): def test_mesh_bad_topology_dimension(self): # Check that the load generates a suitable warning. - warn_regex = r"topology_dimension.* ignoring" - with pytest.warns(IrisCfWarning, match=warn_regex): + log_regex = r"topology_dimension.* ignoring" + with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): template = "minimal_bad_topology_dim" dim_line = "mesh_var:topology_dimension = 1 ;" # which is wrong ! cube = self.create_synthetic_test_cube( @@ -182,8 +181,8 @@ def test_mesh_bad_topology_dimension(self): def test_mesh_no_topology_dimension(self): # Check that the load generates a suitable warning. - warn_regex = r"Mesh variable.* has no 'topology_dimension'" - with pytest.warns(IrisCfWarning, match=warn_regex): + log_regex = r"Mesh variable.* has no 'topology_dimension'" + with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): template = "minimal_bad_topology_dim" dim_line = "" # don't create ANY topology_dimension property cube = self.create_synthetic_test_cube( @@ -195,8 +194,8 @@ def test_mesh_no_topology_dimension(self): def test_mesh_bad_cf_role(self): # Check that the load generates a suitable warning. - warn_regex = r"inappropriate cf_role" - with pytest.warns(IrisCfWarning, match=warn_regex): + log_regex = r"inappropriate cf_role" + with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): template = "minimal_bad_mesh_cf_role" dim_line = 'mesh_var:cf_role = "foo" ;' _ = self.create_synthetic_test_cube( @@ -205,8 +204,8 @@ def test_mesh_bad_cf_role(self): def test_mesh_no_cf_role(self): # Check that the load generates a suitable warning. - warn_regex = r"no cf_role attribute" - with pytest.warns(IrisCfWarning, match=warn_regex): + log_regex = r"no cf_role attribute" + with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): template = "minimal_bad_mesh_cf_role" dim_line = "" _ = self.create_synthetic_test_cube( diff --git a/lib/iris/tests/integration/experimental/test_ugrid_save.py b/lib/iris/tests/integration/experimental/test_ugrid_save.py index 710ed6941d..803ac71caa 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_save.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integration tests for NetCDF-UGRID file saving. diff --git a/lib/iris/tests/integration/fast_load/__init__.py b/lib/iris/tests/integration/fast_load/__init__.py index 40fc56f129..a94785ca58 100644 --- a/lib/iris/tests/integration/fast_load/__init__.py +++ b/lib/iris/tests/integration/fast_load/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :mod:`iris.fileformats.um` fast load functions.""" diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index a37f1eef07..318292615b 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for fast-loading FF and PP files.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/integration/merge/__init__.py b/lib/iris/tests/integration/merge/__init__.py index ae16d4fe45..9374976532 100644 --- a/lib/iris/tests/integration/merge/__init__.py +++ b/lib/iris/tests/integration/merge/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for the :mod:`iris._merge` package.""" diff --git a/lib/iris/tests/integration/merge/test_merge.py b/lib/iris/tests/integration/merge/test_merge.py index 87b52fd85d..f5f92a7a7d 100644 --- a/lib/iris/tests/integration/merge/test_merge.py +++ b/lib/iris/tests/integration/merge/test_merge.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integration tests for merging cubes. diff --git a/lib/iris/tests/integration/netcdf/__init__.py b/lib/iris/tests/integration/netcdf/__init__.py index bd62b4d988..f500b52520 100644 --- a/lib/iris/tests/integration/netcdf/__init__.py +++ b/lib/iris/tests/integration/netcdf/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for loading and saving netcdf files.""" diff --git a/lib/iris/tests/integration/netcdf/test__dask_locks.py b/lib/iris/tests/integration/netcdf/test__dask_locks.py index 70891bc40c..c41af1b356 100644 --- a/lib/iris/tests/integration/netcdf/test__dask_locks.py +++ b/lib/iris/tests/integration/netcdf/test__dask_locks.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :mod:`iris.fileformats.netcdf._dask_locks` package. diff --git a/lib/iris/tests/integration/netcdf/test_attributes.py b/lib/iris/tests/integration/netcdf/test_attributes.py index aab91bcb31..a73d6c7d49 100644 --- a/lib/iris/tests/integration/netcdf/test_attributes.py +++ b/lib/iris/tests/integration/netcdf/test_attributes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for attribute-related loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/netcdf/test_aux_factories.py b/lib/iris/tests/integration/netcdf/test_aux_factories.py index 6b3dde6fd1..d89f275336 100644 --- a/lib/iris/tests/integration/netcdf/test_aux_factories.py +++ b/lib/iris/tests/integration/netcdf/test_aux_factories.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for aux-factory-related loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index b7b21911e5..3175664b4c 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for coord-system-related loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index 177e9ce325..616feb3b0e 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -1,11 +1,11 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integration tests for delayed saving. """ -import re import warnings from cf_units import Unit @@ -17,20 +17,13 @@ import pytest import iris -from iris.exceptions import IrisSaverFillValueWarning from iris.fileformats.netcdf._thread_safe_nc import default_fillvals +from iris.fileformats.netcdf.saver import SaverFillValueWarning import iris.tests from iris.tests.stock import realistic_4d class Test__lazy_stream_data: - # Ensure all saves are done with split-atttribute saving, - # -- because some of these tests are sensitive to unexpected warnings. - @pytest.fixture(autouse=True) - def all_saves_with_split_attrs(self): - with iris.FUTURE.context(save_split_attrs=True): - yield - @pytest.fixture(autouse=True) def output_path(self, tmp_path): # A temporary output netcdf-file path, **unique to each test call**. @@ -198,36 +191,19 @@ def test_scheduler_types( if not save_is_delayed: assert result is None + assert len(logged_warnings) == 2 issued_warnings = [log.message for log in logged_warnings] else: assert result is not None assert len(logged_warnings) == 0 - with warnings.catch_warnings(record=True) as logged_warnings: - # The compute *returns* warnings from the delayed operations. - issued_warnings = result.compute() - issued_warnings = [ - log.message for log in logged_warnings - ] + issued_warnings - - warning_messages = [warning.args[0] for warning in issued_warnings] - if scheduler_type == "DistributedScheduler": - # Ignore any "large data transfer" messages generated, - # specifically when testing with the Distributed scheduler. - # These may not always occur and don't reflect something we want to - # test for. - large_transfer_message_regex = re.compile( - "Sending large graph.* may cause some slowdown", re.DOTALL - ) - warning_messages = [ - message - for message in warning_messages - if not large_transfer_message_regex.search(message) - ] + warnings.simplefilter("error") + issued_warnings = result.compute() - # In all cases, should get 2 fill value warnings overall. - assert len(warning_messages) == 2 + assert len(issued_warnings) == 2 expected_msg = "contains unmasked data points equal to the fill-value" - assert all(expected_msg in message for message in warning_messages) + assert all( + expected_msg in warning.args[0] for warning in issued_warnings + ) def test_time_of_writing( self, save_is_delayed, output_path, scheduler_type @@ -335,7 +311,7 @@ def test_fill_warnings(self, warning_type, output_path, save_is_delayed): result_warnings = [ log.message for log in logged_warnings - if isinstance(log.message, IrisSaverFillValueWarning) + if isinstance(log.message, SaverFillValueWarning) ] if save_is_delayed: @@ -344,9 +320,7 @@ def test_fill_warnings(self, warning_type, output_path, save_is_delayed): # Complete the operation now with warnings.catch_warnings(): # NOTE: warnings should *not* be issued here, instead they are returned. - warnings.simplefilter( - "error", category=IrisSaverFillValueWarning - ) + warnings.simplefilter("error", category=SaverFillValueWarning) result_warnings = result.compute() # Either way, we should now have 2 similar warnings. diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index 0fc619e4cb..dc0c29455f 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before # importing anything else. @@ -24,7 +25,7 @@ from iris.coords import CellMethod from iris.cube import Cube, CubeList import iris.exceptions -from iris.fileformats.netcdf import Saver +from iris.fileformats.netcdf import Saver, UnknownCellMethodWarning # Get the netCDF4 module, but in a sneaky way that avoids triggering the "do not import # netCDF4" check in "iris.tests.test_coding_standards.test_netcdf4_import()". @@ -140,9 +141,7 @@ def test_unknown_method(self): warning_messages = [ warn for warn in warning_messages - if isinstance( - warn, iris.exceptions.IrisUnknownCellMethodWarning - ) + if isinstance(warn, UnknownCellMethodWarning) ] self.assertEqual(len(warning_messages), 1) message = warning_messages[0].args[0] diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py index 4e5da18bbd..3395296e11 100644 --- a/lib/iris/tests/integration/netcdf/test_self_referencing.py +++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for iris#3367 - loading a self-referencing NetCDF file.""" # Import iris.tests first so that some things can be initialised before @@ -15,7 +16,6 @@ import numpy as np import iris -from iris.exceptions import IrisCfMissingVarWarning from iris.fileformats.netcdf import _thread_safe_nc @@ -46,9 +46,7 @@ def test_cmip6_volcello_load_issue_3367(self): with mock.patch("warnings.warn") as warn: # ensure file loads without failure cube = iris.load_cube(self.fname) - warn.assert_has_calls( - [mock.call(expected_msg, category=IrisCfMissingVarWarning)] - ) + warn.assert_has_calls([mock.call(expected_msg)]) # extra check to ensure correct variable was found assert cube.standard_name == "ocean_volume" @@ -115,9 +113,7 @@ def test_self_referencing_load_issue_3367(self): with mock.patch("warnings.warn") as warn: # ensure file loads without failure cube = iris.load_cube(self.temp_dir_path) - warn.assert_called_with( - expected_msg, category=IrisCfMissingVarWarning - ) + warn.assert_called_with(expected_msg) # extra check to ensure correct variable was found assert cube.standard_name == "ocean_volume" diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py index 916cbf67e1..5ed32d0671 100644 --- a/lib/iris/tests/integration/netcdf/test_thread_safety.py +++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integration tests covering thread safety during loading/saving netcdf files. @@ -37,7 +38,7 @@ def tiny_chunks(): def _check_tiny_loaded_chunks(cube: Cube): assert cube.has_lazy_data() cube_lazy_data = cube.core_data() - assert np.prod(cube_lazy_data.chunksize) < cube_lazy_data.size + assert np.product(cube_lazy_data.chunksize) < cube_lazy_data.size with dask.config.set({"array.chunk-size": "1KiB"}): yield _check_tiny_loaded_chunks diff --git a/lib/iris/tests/integration/plot/__init__.py b/lib/iris/tests/integration/plot/__init__.py index c67303c0f3..aafa488e2d 100644 --- a/lib/iris/tests/integration/plot/__init__.py +++ b/lib/iris/tests/integration/plot/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for the :mod:`iris.plot` package.""" diff --git a/lib/iris/tests/integration/plot/test_animate.py b/lib/iris/tests/integration/plot/test_animate.py index 1354ef4289..ef19dbb108 100644 --- a/lib/iris/tests/integration/plot/test_animate.py +++ b/lib/iris/tests/integration/plot/test_animate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Integration tests for :func:`iris.plot.animate`. diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py index c742564c7d..a306e6c82f 100644 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ b/lib/iris/tests/integration/plot/test_colorbar.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test interaction between :mod:`iris.plot` and :func:`matplotlib.pyplot.colorbar` diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index 2b3a59d093..d438c09bd5 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test plot of time coord with non-standard calendar. diff --git a/lib/iris/tests/integration/plot/test_nzdateline.py b/lib/iris/tests/integration/plot/test_nzdateline.py index 2c9360e9ea..0051549794 100644 --- a/lib/iris/tests/integration/plot/test_nzdateline.py +++ b/lib/iris/tests/integration/plot/test_nzdateline.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test set up of limited area map extents which bridge the date line. diff --git a/lib/iris/tests/integration/plot/test_plot_2d_coords.py b/lib/iris/tests/integration/plot/test_plot_2d_coords.py index 673f8817d6..1b95899803 100644 --- a/lib/iris/tests/integration/plot/test_plot_2d_coords.py +++ b/lib/iris/tests/integration/plot/test_plot_2d_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test plots with two dimensional coordinates. diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py index 652a205fd8..37f506bd17 100644 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ b/lib/iris/tests/integration/plot/test_vector_plots.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test some key usages of :func:`iris.plot.quiver`. diff --git a/lib/iris/tests/integration/test_Datums.py b/lib/iris/tests/integration/test_Datums.py index 8325ad901a..43287c7040 100755 --- a/lib/iris/tests/integration/test_Datums.py +++ b/lib/iris/tests/integration/test_Datums.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :class:`iris.coord_systems` datum support.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_PartialDateTime.py b/lib/iris/tests/integration/test_PartialDateTime.py index ed995beda7..563af1035c 100644 --- a/lib/iris/tests/integration/test_PartialDateTime.py +++ b/lib/iris/tests/integration/test_PartialDateTime.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :class:`iris.time.PartialDateTime`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_climatology.py b/lib/iris/tests/integration/test_climatology.py index f15428eb9d..54d43858fb 100644 --- a/lib/iris/tests/integration/test_climatology.py +++ b/lib/iris/tests/integration/test_climatology.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for loading and saving netcdf files.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_cube.py b/lib/iris/tests/integration/test_cube.py index 8f3ac5fb48..ad6666d28e 100644 --- a/lib/iris/tests/integration/test_cube.py +++ b/lib/iris/tests/integration/test_cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :class:`iris.cube.Cube`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_ff.py b/lib/iris/tests/integration/test_ff.py index b613da385f..0b0ccf4c5c 100644 --- a/lib/iris/tests/integration/test_ff.py +++ b/lib/iris/tests/integration/test_ff.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for loading LBC fieldsfiles.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py deleted file mode 100644 index b09b408827..0000000000 --- a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py +++ /dev/null @@ -1,1678 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -""" -Integration tests for loading and saving netcdf file attributes. - -Notes: -(1) attributes in netCDF files can be either "global attributes", or variable -("local") type. - -(2) in CF terms, this testcode classifies specific attributes (names) as either -"global" = names recognised by convention as normally stored in a file-global -setting; "local" = recognised names specifying details of variable data -encoding, which only make sense as a "local" attribute (i.e. on a variable), -and "user" = any additional attributes *not* recognised in conventions, which -might be recorded either globally or locally. - -""" -import inspect -import json -import os -from pathlib import Path -import re -from typing import Iterable, List, Optional, Union -import warnings - -import numpy as np -import pytest - -import iris -import iris.coord_systems -from iris.coords import DimCoord -from iris.cube import Cube -import iris.fileformats.netcdf -import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc4 - -# First define the known controlled attribute names defined by netCDf and CF conventions -# -# Note: certain attributes are "normally" global (e.g. "Conventions"), whilst others -# will only usually appear on a data-variable (e.g. "scale_factor"", "coordinates"). -# I'm calling these 'global-style' and 'local-style'. -# Any attributes either belongs to one of these 2 groups, or neither. Those 3 distinct -# types may then have different behaviour in Iris load + save. - -# A list of "global-style" attribute names : those which should be global attributes by -# default (i.e. file- or group-level, *not* attached to a variable). - -_GLOBAL_TEST_ATTRS = set(iris.fileformats.netcdf.saver._CF_GLOBAL_ATTRS) -# Remove this one, which has peculiar behaviour + is tested separately -# N.B. this is not the same as 'Conventions', but is caught in the crossfire when that -# one is processed. -_GLOBAL_TEST_ATTRS -= set(["conventions"]) -_GLOBAL_TEST_ATTRS = sorted(_GLOBAL_TEST_ATTRS) - - -# Define a fixture to parametrise tests over the 'global-style' test attributes. -# This just provides a more concise way of writing parametrised tests. -@pytest.fixture(params=_GLOBAL_TEST_ATTRS) -def global_attr(request): - # N.B. "request" is a standard PyTest fixture - return request.param # Return the name of the attribute to test. - - -# A list of "local-style" attribute names : those which should be variable attributes -# by default (aka "local", "variable" or "data" attributes) . -_LOCAL_TEST_ATTRS = ( - iris.fileformats.netcdf.saver._CF_DATA_ATTRS - + iris.fileformats.netcdf.saver._UKMO_DATA_ATTRS -) - - -# Define a fixture to parametrise over the 'local-style' test attributes. -# This just provides a more concise way of writing parametrised tests. -@pytest.fixture(params=_LOCAL_TEST_ATTRS) -def local_attr(request): - # N.B. "request" is a standard PyTest fixture - return request.param # Return the name of the attribute to test. - - -# Define whether to parametrise over split-attribute saving -# Just for now, so that we can run against legacy code. -_SPLIT_SAVE_SUPPORTED = hasattr(iris.FUTURE, "save_split_attrs") -_SPLIT_PARAM_VALUES = [False, True] -_SPLIT_PARAM_IDS = ["nosplit", "split"] -_MATRIX_LOAD_RESULTSTYLES = ["legacy", "newstyle"] -if not _SPLIT_SAVE_SUPPORTED: - _SPLIT_PARAM_VALUES.remove(True) - _SPLIT_PARAM_IDS.remove("split") - _MATRIX_LOAD_RESULTSTYLES.remove("newstyle") - - -_SKIP_WARNCHECK = "_no_warnings_check" - - -def check_captured_warnings( - expected_keys: List[str], - captured_warnings: List[warnings.WarningMessage], - allow_possible_legacy_warning: bool = False, -): - """ - Compare captured warning messages with a list of regexp-matches. - - We allow them to occur in any order, and replace each actual result in the list - with its matching regexp, if any, as this makes failure results much easier to - comprehend. - - """ - # TODO: when iris.FUTURE.save_split_attrs is removed, we can remove the - # 'allow_possible_legacy_warning' arg. - - if expected_keys is None: - expected_keys = [] - elif hasattr(expected_keys, "upper"): - # Handle a single string - if expected_keys == _SKIP_WARNCHECK: - # No check at all in this case - return - expected_keys = [expected_keys] - - if allow_possible_legacy_warning: - # Allow but do not require a "saving without split-attributes" warning. - legacy_message_key = ( - "Saving to netcdf with legacy-style attribute handling for backwards " - "compatibility." - ) - expected_keys.append(legacy_message_key) - - expected_keys = [re.compile(key) for key in expected_keys] - found_results = [str(warning.message) for warning in captured_warnings] - remaining_keys = expected_keys.copy() - for i_message, message in enumerate(found_results.copy()): - for key in remaining_keys: - if key.search(message): - # Hit : replace one message in the list with its matching "key" - found_results[i_message] = key - # remove the matching key - remaining_keys.remove(key) - # skip on to next message - break - - if allow_possible_legacy_warning: - # Remove any unused "legacy attribute saving" key. - # N.B. this is the *only* key we will tolerate not being used. - expected_keys = [ - key for key in expected_keys if key != legacy_message_key - ] - - assert set(found_results) == set(expected_keys) - - -class MixinAttrsTesting: - @staticmethod - def _calling_testname(): - """ - Search up the callstack for a function named "test_*", and return the name for - use as a test identifier. - - Idea borrowed from :meth:`iris.tests.IrisTest.result_path`. - - Returns - ------- - test_name : str - Returns a string, with the initial "test_" removed. - """ - test_name = None - stack = inspect.stack() - for frame in stack[1:]: - full_name = frame[3] - if full_name.startswith("test_"): - # Return the name with the initial "test_" removed. - test_name = full_name.replace("test_", "") - break - # Search should not fail, unless we were called from an inappropriate place? - assert test_name is not None - return test_name - - @pytest.fixture(autouse=True) - def make_tempdir(self, tmp_path_factory): - """ - Automatically-run fixture to activate the 'tmp_path_factory' fixture on *every* - test: Make a directory for temporary files, and record it on the test instance. - - N.B. "tmp_path_factory" is a standard PyTest fixture, which provides a dirpath - *shared* by all tests. This is a bit quicker and more debuggable than having a - directory per-testcase. - """ - # Store the temporary directory path on the test instance - self.tmpdir = str(tmp_path_factory.getbasetemp()) - - def _testfile_path(self, basename: str) -> str: - # Make a filepath in the temporary directory, based on the name of the calling - # test method, and the "self.attrname" it sets up. - testname = self._calling_testname() - # Turn that into a suitable temporary filename - ext_name = getattr(self, "testname_extension", "") - if ext_name: - basename = basename + "_" + ext_name - path_str = f"{self.tmpdir}/{self.__class__.__name__}__test_{testname}-{self.attrname}__{basename}.nc" - return path_str - - @staticmethod - def _default_vars_and_attrvalues(vars_and_attrvalues): - # Simple default strategy : turn a simple value into {'var': value} - if not isinstance(vars_and_attrvalues, dict): - # Treat single non-dict argument as a value for a single variable - vars_and_attrvalues = {"var": vars_and_attrvalues} - return vars_and_attrvalues - - def create_testcase_files_or_cubes( - self, - attr_name: str, - global_value_file1: Optional[str] = None, - var_values_file1: Union[None, str, dict] = None, - global_value_file2: Optional[str] = None, - var_values_file2: Union[None, str, dict] = None, - cubes: bool = False, - ): - """ - Create temporary input netcdf files, or cubes, with specific content. - - Creates a temporary netcdf test file (or two) with the given global and - variable-local attributes. Or build cubes, similarly. - If ``cubes`` is ``True``, save cubes in ``self.input_cubes``. - Else save filepaths in ``self.input_filepaths``. - - Note: 'var_values_file' args are dictionaries. The named variables are - created, with an attribute = the dictionary value, *except* that a dictionary - value of None means that a local attribute is _not_ created on the variable. - """ - # save attribute on the instance - self.attrname = attr_name - - if not cubes: - # Make some input file paths. - filepath1 = self._testfile_path("testfile") - filepath2 = self._testfile_path("testfile2") - - def make_file( - filepath: str, global_value=None, var_values=None - ) -> str: - ds = threadsafe_nc4.DatasetWrapper(filepath, "w") - if global_value is not None: - ds.setncattr(attr_name, global_value) - ds.createDimension("x", 3) - # Rationalise the per-variable requirements - # N.B. this *always* makes at least one variable, as otherwise we would - # load no cubes. - var_values = self._default_vars_and_attrvalues(var_values) - for var_name, value in var_values.items(): - v = ds.createVariable(var_name, int, ("x",)) - if value is not None: - v.setncattr(attr_name, value) - ds.close() - return filepath - - def make_cubes(var_name, global_value=None, var_values=None): - cubes = [] - var_values = self._default_vars_and_attrvalues(var_values) - for varname, local_value in var_values.items(): - cube = Cube(np.arange(3.0), var_name=var_name) - cubes.append(cube) - dimco = DimCoord(np.arange(3.0), var_name="x") - cube.add_dim_coord(dimco, 0) - if not hasattr(cube.attributes, "globals"): - # N.B. For now, also support oldstyle "single" cube attribute - # dictionaries, so that we can generate legacy results to compore - # with the "new world" results. - single_value = global_value - if local_value is not None: - single_value = local_value - if single_value is not None: - cube.attributes[attr_name] = single_value - else: - if global_value is not None: - cube.attributes.globals[attr_name] = global_value - if local_value is not None: - cube.attributes.locals[attr_name] = local_value - return cubes - - if cubes: - results = make_cubes("v1", global_value_file1, var_values_file1) - if global_value_file2 is not None or var_values_file2 is not None: - results.extend( - make_cubes("v2", global_value_file2, var_values_file2) - ) - else: - results = [ - make_file(filepath1, global_value_file1, var_values_file1) - ] - if global_value_file2 is not None or var_values_file2 is not None: - # Make a second testfile and add it to files-to-be-loaded. - results.append( - make_file(filepath2, global_value_file2, var_values_file2) - ) - - # Save results on the instance - if cubes: - self.input_cubes = results - else: - self.input_filepaths = results - return results - - def run_testcase( - self, - attr_name: str, - values: Union[List, List[List]], - create_cubes_or_files: str = "files", - ) -> None: - """ - Create testcase inputs (files or cubes) with specified attributes. - - Parameters - ---------- - attr_name : str - name for all attributes created in this testcase. - Also saved as ``self.attrname``, as used by ``fetch_results``. - values : list - list, or lists, of values for created attributes, each containing one global - and one-or-more local attribute values as [global, local1, local2...] - create_cubes_or_files : str, default "files" - create either cubes or testfiles. - - If ``create_cubes_or_files`` == "files", create one temporary netCDF file per - values-list, and record in ``self.input_filepaths``. - Else if ``create_cubes_or_files`` == "cubes", create sets of cubes with common - global values and store all of them to ``self.input_cubes``. - - """ - # Save common attribute-name on the instance - self.attrname = attr_name - - # Standardise input to a list-of-lists, each inner list = [global, *locals] - assert isinstance(values, list) - if not isinstance(values[0], list): - values = [values] - assert len(values) in (1, 2) - assert len(values[0]) > 1 - - # Decode into global1, *locals1, and optionally global2, *locals2 - global1 = values[0][0] - vars1 = {} - i_var = 0 - for value in values[0][1:]: - vars1[f"var_{i_var}"] = value - i_var += 1 - if len(values) == 1: - global2 = None - vars2 = None - else: - assert len(values) == 2 - global2 = values[1][0] - vars2 = {} - for value in values[1][1:]: - vars2[f"var_{i_var}"] = value - i_var += 1 - - # Create test files or cubes (and store data on the instance) - assert create_cubes_or_files in ("cubes", "files") - make_cubes = create_cubes_or_files == "cubes" - self.create_testcase_files_or_cubes( - attr_name=attr_name, - global_value_file1=global1, - var_values_file1=vars1, - global_value_file2=global2, - var_values_file2=vars2, - cubes=make_cubes, - ) - - def fetch_results( - self, - filepath: str = None, - cubes: Iterable[Cube] = None, - oldstyle_combined: bool = False, - ): - """ - Return testcase results from an output file or cubes in a standardised form. - - Unpick the global+local values of the attribute ``self.attrname``, resulting - from a test operation. - A file result is always [global_value, *local_values] - A cubes result is [*[global_value, *local_values]] (over different global vals) - - When ``oldstyle_combined`` is ``True``, simulate the "legacy" style results, - that is when each cube had a single combined attribute dictionary. - This enables us to check against former behaviour, by combining results into a - single dictionary. N.B. per-cube single results are then returned in the form: - [None, cube1, cube2...]. - N.B. if results are from a *file*, this key has **no effect**. - - """ - attr_name = self.attrname - if filepath is not None: - # Fetch global and local values from a file - try: - ds = threadsafe_nc4.DatasetWrapper(filepath) - global_result = ( - ds.getncattr(attr_name) - if attr_name in ds.ncattrs() - else None - ) - # Fetch local attr value from all data variables : In our testcases, - # that is all *except* dimcoords (ones named after dimensions). - local_vars_results = [ - ( - var.name, - ( - var.getncattr(attr_name) - if attr_name in var.ncattrs() - else None - ), - ) - for var in ds.variables.values() - if var.name not in ds.dimensions - ] - finally: - ds.close() - # This version always returns a single result set [global, local1[, local2]] - # Return global, plus locals sorted by varname - local_vars_results = sorted(local_vars_results, key=lambda x: x[0]) - results = [global_result] + [val for _, val in local_vars_results] - else: - assert cubes is not None - # Sort result cubes according to a standard ordering. - cubes = sorted(cubes, key=lambda cube: cube.name()) - # Fetch globals and locals from cubes. - # This way returns *multiple* result 'sets', one for each global value - if oldstyle_combined or not _SPLIT_SAVE_SUPPORTED: - # Use all-combined dictionaries in place of actual cubes' attributes - cube_attr_dicts = [dict(cube.attributes) for cube in cubes] - # Return results as if all cubes had global=None - results = [ - [None] - + [ - cube_attr_dict.get(attr_name, None) - for cube_attr_dict in cube_attr_dicts - ] - ] - else: - # Return a result-set for each occurring global value (possibly - # including a 'None'). - global_values = set( - cube.attributes.globals.get(attr_name, None) - for cube in cubes - ) - results = [ - [globalval] - + [ - cube.attributes.locals.get(attr_name, None) - for cube in cubes - if cube.attributes.globals.get(attr_name, None) - == globalval - ] - for globalval in sorted(global_values, key=str) - ] - return results - - -# Define all the testcases for different parameter input structures : -# - combinations of matching+differing, global+local params -# - these are interpreted differently for the 3 main test types : Load/Save/Roundtrip -_MATRIX_TESTCASE_INPUTS = { - "case_singlevar_localonly": "G-La", - "case_singlevar_globalonly": "GaL-", - "case_singlevar_glsame": "GaLa", - "case_singlevar_gldiffer": "GaLb", - "case_multivar_same_noglobal": "G-Laa", - "case_multivar_same_sameglobal": "GaLaa", - "case_multivar_same_diffglobal": "GaLbb", - "case_multivar_differ_noglobal": "G-Lab", - "case_multivar_differ_diffglobal": "GaLbc", - "case_multivar_differ_sameglobal": "GaLab", - "case_multivar_1none_noglobal": "G-La-", - "case_multivar_1none_diffglobal": "GaLb-", - "case_multivar_1none_sameglobal": "GaLa-", - # Note: the multi-set input cases are more complex. - # These are encoded as *pairs* of specs, for 2 different files, or cubes with - # independent global values. - # We assume that there can be nothing "special" about a var's interaction with - # another one from the same (as opposed to the "other") file. - "case_multisource_gsame_lnone": ["GaL-", "GaL-"], - "case_multisource_gsame_lallsame": ["GaLa", "GaLa"], - "case_multisource_gsame_l1same1none": ["GaLa", "GaL-"], - "case_multisource_gsame_l1same1other": ["GaLa", "GaLb"], - "case_multisource_gsame_lallother": ["GaLb", "GaLb"], - "case_multisource_gsame_lalldiffer": ["GaLb", "GaLc"], - "case_multisource_gnone_l1one1none": ["G-La", "G-L-"], - "case_multisource_gnone_l1one1same": ["G-La", "G-La"], - "case_multisource_gnone_l1one1other": ["G-La", "G-Lb"], - "case_multisource_g1none_lnone": ["GaL-", "G-L-"], - "case_multisource_g1none_l1same1none": ["GaLa", "G-L-"], - "case_multisource_g1none_l1none1same": ["GaL-", "G-La"], - "case_multisource_g1none_l1diff1none": ["GaLb", "G-L-"], - "case_multisource_g1none_l1none1diff": ["GaL-", "G-Lb"], - "case_multisource_g1none_lallsame": ["GaLa", "G-La"], - "case_multisource_g1none_lallother": ["GaLc", "G-Lc"], - "case_multisource_gdiff_lnone": ["GaL-", "GbL-"], - "case_multisource_gdiff_l1same1none": ["GaLa", "GbL-"], - "case_multisource_gdiff_l1diff1none": ["GaLb", "GcL-"], - "case_multisource_gdiff_lallsame": ["GaLa", "GbLb"], - "case_multisource_gdiff_lallother": ["GaLc", "GbLc"], -} -_MATRIX_TESTCASES = list(_MATRIX_TESTCASE_INPUTS.keys()) - -# -# Define the attrs against which all matrix tests are run -# -max_param_attrs = None -# max_param_attrs = 5 - -_MATRIX_ATTRNAMES = _LOCAL_TEST_ATTRS[:max_param_attrs] -_MATRIX_ATTRNAMES += _GLOBAL_TEST_ATTRS[:max_param_attrs] -_MATRIX_ATTRNAMES += ["user"] - -# remove special-cases, for now : all these behave irregularly (i.e. unlike the known -# "globalstyle", or "localstyle" generic cases). -# N.B. not including "Conventions", which is not in the globals list, so won't be -# matrix-tested unless we add it specifically. -# TODO: decide if any of these need to be tested, as separate test-styles. -_SPECIAL_ATTRS = [ - "ukmo__process_flags", - "missing_value", - "standard_error_multiplier", - "STASH", - "um_stash_source", -] -_MATRIX_ATTRNAMES = [ - attr for attr in _MATRIX_ATTRNAMES if attr not in _SPECIAL_ATTRS -] - - -# -# A routine to work "backwards" from an attribute name to its "style", i.e. type category. -# Possible styles are "globalstyle", "localstyle", "userstyle". -# -_ATTR_STYLES = ["localstyle", "globalstyle", "userstyle"] - - -def deduce_attr_style(attrname: str) -> str: - # Extract the attribute "style type" from an attr_param name - if attrname in _LOCAL_TEST_ATTRS: - style = "localstyle" - elif attrname in _GLOBAL_TEST_ATTRS: - style = "globalstyle" - else: - assert attrname == "user" - style = "userstyle" - return style - - -# -# Decode a matrix "input spec" to codes for global + local values. -# -def decode_matrix_input(input_spec): - # Decode a matrix-test input specification, like "GaLbc", into lists of values. - # E.G. "GaLbc" -> ["a", "b", "c"] - # ["GaLbc", "GbLbc"] -> [["a", "b", "c"], ["b", "b", c"]] - # N.B. in this form "values" are all one-character strings. - def decode_specstring(spec: str) -> List[Union[str, None]]: - # Decode an input spec-string to input/output attribute values - assert spec[0] == "G" and spec[2] == "L" - allvals = spec[1] + spec[3:] - result = [None if valchar == "-" else valchar for valchar in allvals] - return result - - if isinstance(input_spec, str): - # Single-source spec (one cube or one file) - vals = decode_specstring(input_spec) - result = [vals] - else: - # Dual-source spec (two files, or sets of cubes with a common global value) - vals_A = decode_specstring(input_spec[0]) - vals_B = decode_specstring(input_spec[1]) - result = [vals_A, vals_B] - - return result - - -def encode_matrix_result(results: List[List[str]]) -> List[str]: - # Re-code a set of output results, [*[global-value, *local-values]] as a list of - # strings, like ["GaL-b"] or ["GaLabc", "GbLabc"]. - # N.B. again assuming that all values are just one-character strings, or None. - assert isinstance(results, Iterable) and len(results) >= 1 - if not isinstance(results[0], list): - results = [results] - assert all( - all(val is None or isinstance(val, str) for val in vals) - for vals in results - ) - - # Translate "None" values to "-" - def valrep(val): - return "-" if val is None else val - - results = list( - "".join(["G", valrep(vals[0]), "L"] + list(map(valrep, vals[1:]))) - for vals in results - ) - return results - - -# -# The "expected" matrix test results are stored in JSON files (one for each test-type). -# We can also save the found results. -# -_MATRIX_TESTTYPES = ("load", "save", "roundtrip") - - -@pytest.fixture(autouse=True, scope="session") -def matrix_results(): - matrix_filepaths = { - testtype: ( - Path(__file__).parent / f"attrs_matrix_results_{testtype}.json" - ) - for testtype in _MATRIX_TESTTYPES - } - # An environment variable can trigger saving of the results. - save_matrix_results = bool( - int(os.environ.get("SAVEALL_MATRIX_RESULTS", "0")) - ) - - matrix_results = {} - for testtype in _MATRIX_TESTTYPES: - # Either fetch from file, or initialise, a results matrix for each test type - # (load/save/roundtrip). - input_path = matrix_filepaths[testtype] - if input_path.exists(): - # Load from file with json. - with open(input_path) as file_in: - testtype_results = json.load(file_in) - # Check compatibility (in case we changed the test-specs list) - assert set(testtype_results.keys()) == set(_MATRIX_TESTCASES) - assert all( - testtype_results[key]["input"] == _MATRIX_TESTCASE_INPUTS[key] - for key in _MATRIX_TESTCASES - ) - else: - # Create empty matrix results content (for one test-type) - testtype_results = {} - for testcase in _MATRIX_TESTCASES: - test_case_results = {} - testtype_results[testcase] = test_case_results - # Every testcase dict has an "input" slot with the test input spec, - # basically just to help human readability. - test_case_results["input"] = _MATRIX_TESTCASE_INPUTS[testcase] - for attrstyle in _ATTR_STYLES: - if testtype == "load": - # "load" test results have a "legacy" result (as for a single - # combined attrs dictionary), and a "newstyle" result (with - # the new split dictionary). - test_case_results[attrstyle] = { - "legacy": None, - "newstyle": None, - } - else: - # "save"/"roundtrip"-type results record 2 result sets, - # (unsplit/split) for each attribute-style - # - i.e. when saved without/with split_attrs_saving enabled. - test_case_results[attrstyle] = { - "unsplit": None, - "split": None, - } - - # Build complete data: matrix_results[TESTTYPES][TESTCASES][ATTR_STYLES] - matrix_results[testtype] = testtype_results - - # Pass through to all the tests : they can also update it, if enabled. - yield save_matrix_results, matrix_results - - if save_matrix_results: - for testtype in _MATRIX_TESTTYPES: - output_path = matrix_filepaths[testtype] - results = matrix_results[testtype] - with open(output_path, "w") as file_out: - json.dump(results, file_out, indent=2) - - -class TestRoundtrip(MixinAttrsTesting): - """ - Test handling of attributes in roundtrip netcdf-iris-netcdf. - - This behaviour should be (almost) unchanged by the adoption of - split-attribute handling. - - NOTE: the tested combinations in the 'TestLoad' test all match tests here, but not - *all* of the tests here are useful there. To avoid confusion (!) the ones which are - paralleled in TestLoad there have the identical test-names. However, as the tests - are all numbered that means there are missing numbers there. - The tests are numbered only so it is easier to review the discovered test list - (which is sorted). - - """ - - # Parametrise all tests over split/unsplit saving. - @pytest.fixture( - params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True - ) - def do_split(self, request): - do_split = request.param - self.save_split_attrs = do_split - return do_split - - def run_roundtrip_testcase(self, attr_name, values): - """ - Initialise the testcase from the passed-in controls, configure the input - files and run a save-load roundtrip to produce the output file. - - The name of the attribute, and the input and output temporary filepaths are - stored on the instance, where "self.check_roundtrip_results()" can get them. - - """ - self.run_testcase( - attr_name=attr_name, values=values, create_cubes_or_files="files" - ) - self.result_filepath = self._testfile_path("result") - - with warnings.catch_warnings(record=True) as captured_warnings: - # Do a load+save to produce a testable output result in a new file. - cubes = iris.load(self.input_filepaths) - # Ensure stable result order. - cubes = sorted(cubes, key=lambda cube: cube.name()) - do_split = getattr(self, "save_split_attrs", False) - kwargs = ( - dict(save_split_attrs=do_split) - if _SPLIT_SAVE_SUPPORTED - else dict() - ) - with iris.FUTURE.context(**kwargs): - iris.save(cubes, self.result_filepath) - - self.captured_warnings = captured_warnings - - def check_roundtrip_results(self, expected, expected_warnings=None): - """ - Run checks on the generated output file. - - The counterpart to :meth:`run_roundtrip_testcase`, with similar arguments. - Check existence (or not) of a global attribute, and a number of local - (variable) attributes. - Values of 'None' mean to check that the relevant global/local attribute does - *not* exist. - - Also check the warnings captured during the testcase run. - """ - # N.B. there is only ever one result-file, but it can contain various variables - # which came from different input files. - results = self.fetch_results(filepath=self.result_filepath) - assert results == expected - check_captured_warnings( - expected_warnings, - self.captured_warnings, - # N.B. only allow a legacy-attributes warning when NOT saving split-attrs - allow_possible_legacy_warning=not self.save_split_attrs, - ) - - ####################################################### - # Tests on "user-style" attributes. - # This means any arbitrary attribute which a user might have added -- i.e. one with - # a name which is *not* recognised in the netCDF or CF conventions. - # - - def test_01_userstyle_single_global(self): - self.run_roundtrip_testcase( - attr_name="myname", values=["single-value", None] - ) - # Default behaviour for a general global user-attribute. - # It simply remains global. - self.check_roundtrip_results(["single-value", None]) - - def test_02_userstyle_single_local(self, do_split): - # Default behaviour for a general local user-attribute. - # It results in a "promoted" global attribute. - self.run_roundtrip_testcase( - attr_name="myname", # A generic "user" attribute with no special handling - values=[None, "single-value"], - ) - if do_split: - expected = [None, "single-value"] - else: - expected = ["single-value", None] - self.check_roundtrip_results(expected) - - def test_03_userstyle_multiple_different(self, do_split): - # Default behaviour for general user-attributes. - # The global attribute is lost because there are local ones. - self.run_roundtrip_testcase( - attr_name="random", # A generic "user" attribute with no special handling - values=[ - ["common_global", "f1v1", "f1v2"], - ["common_global", "x1", "x2"], - ], - ) - expected_result = ["common_global", "f1v1", "f1v2", "x1", "x2"] - if not do_split: - # in legacy mode, global is lost - expected_result[0] = None - # just check they are all there and distinct - self.check_roundtrip_results(expected_result) - - def test_04_userstyle_matching_promoted(self, do_split): - # matching local user-attributes are "promoted" to a global one. - # (but not when saving split attributes) - input_values = ["global_file1", "same-value", "same-value"] - self.run_roundtrip_testcase( - attr_name="random", - values=input_values, - ) - if do_split: - expected = input_values - else: - expected = ["same-value", None, None] - self.check_roundtrip_results(expected) - - def test_05_userstyle_matching_crossfile_promoted(self, do_split): - # matching user-attributes are promoted, even across input files. - # (but not when saving split attributes) - self.run_roundtrip_testcase( - attr_name="random", - values=[ - ["global_file1", "same-value", "same-value"], - [None, "same-value", "same-value"], - ], - ) - if do_split: - # newstyle saves: locals are preserved, mismathced global is *lost* - expected_result = [ - None, - "same-value", - "same-value", - "same-value", - "same-value", - ] - # warnings about the clash - expected_warnings = [ - "Saving.* global attributes.* as local", - 'attributes.* of cube "var_0" were not saved', - 'attributes.* of cube "var_1" were not saved', - ] - else: - # oldstyle saves: matching locals promoted, override original global - expected_result = ["same-value", None, None, None, None] - expected_warnings = None - - self.check_roundtrip_results(expected_result, expected_warnings) - - def test_06_userstyle_nonmatching_remainlocal(self, do_split): - # Non-matching user attributes remain 'local' to the individual variables. - input_values = ["global_file1", "value-1", "value-2"] - if do_split: - # originals are preserved - expected_result = input_values - else: - # global is lost - expected_result = [None, "value-1", "value-2"] - self.run_roundtrip_testcase(attr_name="random", values=input_values) - self.check_roundtrip_results(expected_result) - - ####################################################### - # Tests on "Conventions" attribute. - # Note: the usual 'Conventions' behaviour is already tested elsewhere - # - see :class:`TestConventionsAttributes` above - # - # TODO: the name 'conventions' (lower-case) is also listed in _CF_GLOBAL_ATTRS, but - # we have excluded it from the global-attrs testing here. We probably still need to - # test what that does, though it's inclusion might simply be a mistake. - # - - def test_07_conventions_var_local(self): - # What happens if 'Conventions' appears as a variable-local attribute. - # N.B. this is not good CF, but we'll see what happens anyway. - self.run_roundtrip_testcase( - attr_name="Conventions", - values=[None, "user_set"], - ) - self.check_roundtrip_results(["CF-1.7", None]) - - def test_08_conventions_var_both(self): - # What happens if 'Conventions' appears as both global + local attribute. - self.run_roundtrip_testcase( - attr_name="Conventions", - values=["global-setting", "local-setting"], - ) - # standard content from Iris save - self.check_roundtrip_results(["CF-1.7", None]) - - ####################################################### - # Tests on "global" style attributes - # = those specific ones which 'ought' only to be global (except on collisions) - # - def test_09_globalstyle__global(self, global_attr): - attr_content = f"Global tracked {global_attr}" - self.run_roundtrip_testcase( - attr_name=global_attr, - values=[attr_content, None], - ) - self.check_roundtrip_results([attr_content, None]) - - def test_10_globalstyle__local(self, global_attr, do_split): - # Strictly, not correct CF, but let's see what it does with it. - attr_content = f"Local tracked {global_attr}" - input_values = [None, attr_content] - self.run_roundtrip_testcase( - attr_name=global_attr, - values=input_values, - ) - if do_split: - # remains local as supplied, but there is a warning - expected_result = input_values - expected_warning = f"'{global_attr}'.* should only be a CF global" - else: - # promoted to global - expected_result = [attr_content, None] - expected_warning = None - self.check_roundtrip_results(expected_result, expected_warning) - - def test_11_globalstyle__both(self, global_attr, do_split): - attr_global = f"Global-{global_attr}" - attr_local = f"Local-{global_attr}" - input_values = [attr_global, attr_local] - self.run_roundtrip_testcase( - attr_name=global_attr, - values=input_values, - ) - if do_split: - # remains local as supplied, but there is a warning - expected_result = input_values - expected_warning = "should only be a CF global" - else: - # promoted to global, no local value, original global lost - expected_result = [attr_local, None] - expected_warning = None - self.check_roundtrip_results(expected_result, expected_warning) - - def test_12_globalstyle__multivar_different(self, global_attr): - # Multiple *different* local settings are retained, not promoted - attr_1 = f"Local-{global_attr}-1" - attr_2 = f"Local-{global_attr}-2" - expect_warning = "should only be a CF global attribute" - # A warning should be raised when writing the result. - self.run_roundtrip_testcase( - attr_name=global_attr, - values=[None, attr_1, attr_2], - ) - self.check_roundtrip_results([None, attr_1, attr_2], expect_warning) - - def test_13_globalstyle__multivar_same(self, global_attr, do_split): - # Multiple *same* local settings are promoted to a common global one - attrval = f"Locally-defined-{global_attr}" - input_values = [None, attrval, attrval] - self.run_roundtrip_testcase( - attr_name=global_attr, - values=input_values, - ) - if do_split: - # remains local, but with a warning - expected_warning = "should only be a CF global" - expected_result = input_values - else: - # promoted to global - expected_warning = None - expected_result = [attrval, None, None] - self.check_roundtrip_results(expected_result, expected_warning) - - def test_14_globalstyle__multifile_different(self, global_attr, do_split): - # Different global attributes from multiple files are retained as local ones - attr_1 = f"Global-{global_attr}-1" - attr_2 = f"Global-{global_attr}-2" - self.run_roundtrip_testcase( - attr_name=global_attr, - values=[[attr_1, None], [attr_2, None]], - ) - # A warning should be raised when writing the result. - expected_warnings = ["should only be a CF global attribute"] - if do_split: - # An extra warning, only when saving with split-attributes. - expected_warnings = ["Saving.* as local"] + expected_warnings - self.check_roundtrip_results([None, attr_1, attr_2], expected_warnings) - - def test_15_globalstyle__multifile_same(self, global_attr): - # Matching global-type attributes in multiple files are retained as global - attrval = f"Global-{global_attr}" - self.run_roundtrip_testcase( - attr_name=global_attr, values=[[attrval, None], [attrval, None]] - ) - self.check_roundtrip_results([attrval, None, None]) - - ####################################################### - # Tests on "local" style attributes - # = those specific ones which 'ought' to appear attached to a variable, rather than - # being global - # - - @pytest.mark.parametrize("origin_style", ["input_global", "input_local"]) - def test_16_localstyle(self, local_attr, origin_style, do_split): - # local-style attributes should *not* get 'promoted' to global ones - # Set the name extension to avoid tests with different 'style' params having - # collisions over identical testfile names - self.testname_extension = origin_style - - attrval = f"Attr-setting-{local_attr}" - if local_attr == "missing_value": - # Special-cases : 'missing_value' type must be compatible with the variable - attrval = 303 - elif local_attr == "ukmo__process_flags": - # What this does when a GLOBAL attr seems to be weird + unintended. - # 'this' --> 't h i s' - attrval = "process" - # NOTE: it's also supposed to handle vector values - which we are not - # testing. - - # NOTE: results *should* be the same whether the original attribute is written - # as global or a variable attribute - if origin_style == "input_global": - # Record in source as a global attribute - values = [attrval, None] - else: - assert origin_style == "input_local" - # Record in source as a variable-local attribute - values = [None, attrval] - self.run_roundtrip_testcase(attr_name=local_attr, values=values) - - if ( - local_attr in ("missing_value", "standard_error_multiplier") - and origin_style == "input_local" - ): - # These ones are actually discarded by roundtrip. - # Not clear why, but for now this captures the facts. - expect_global = None - expect_var = None - else: - expect_global = None - if ( - local_attr == "ukmo__process_flags" - and origin_style == "input_global" - and not do_split - ): - # This is very odd behaviour + surely unintended. - # It's supposed to handle vector values (which we are not checking). - # But the weird behaviour only applies to the 'global' test, which is - # obviously not normal usage anyway. - attrval = "p r o c e s s" - expect_var = attrval - - if local_attr == "STASH" and ( - origin_style == "input_local" or not do_split - ): - # A special case, output translates this to a different attribute name. - self.attrname = "um_stash_source" - - expected_result = [expect_global, expect_var] - if do_split and origin_style == "input_global": - # The result is simply the "other way around" - expected_result = expected_result[::-1] - self.check_roundtrip_results(expected_result) - - @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) - @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) - def test_roundtrip_matrix( - self, testcase, attrname, matrix_results, do_split - ): - do_saves, matrix_results = matrix_results - split_param = "split" if do_split else "unsplit" - testcase_spec = matrix_results["roundtrip"][testcase] - input_spec = testcase_spec["input"] - values = decode_matrix_input(input_spec) - - self.run_roundtrip_testcase(attrname, values) - results = self.fetch_results(filepath=self.result_filepath) - result_spec = encode_matrix_result(results) - - attr_style = deduce_attr_style(attrname) - expected = testcase_spec[attr_style][split_param] - - if do_saves: - testcase_spec[attr_style][split_param] = result_spec - if expected is not None: - assert result_spec == expected - - -class TestLoad(MixinAttrsTesting): - """ - Test loading of file attributes into Iris cube attribute dictionaries. - - Tests loading of various combinations to cube dictionaries, treated as a - single combined result (i.e. not split). This behaviour should be (almost) - conserved with the adoption of split attributes **except possibly for key - orderings** -- i.e. we test only up to dictionary equality. - - NOTE: the tested combinations are identical to the roundtrip test. Test numbering - is kept the same, so some (which are inapplicable for this) are missing. - - """ - - def run_load_testcase(self, attr_name, values): - self.run_testcase( - attr_name=attr_name, values=values, create_cubes_or_files="files" - ) - - def check_load_results(self, expected, oldstyle_combined=False): - if not _SPLIT_SAVE_SUPPORTED and not oldstyle_combined: - # Don't check "newstyle" in the old world -- just skip it. - return - result_cubes = iris.load(self.input_filepaths) - results = self.fetch_results( - cubes=result_cubes, oldstyle_combined=oldstyle_combined - ) - # Standardise expected form to list(lists). - assert isinstance(expected, list) - if not isinstance(expected[0], list): - expected = [expected] - assert results == expected - - ####################################################### - # Tests on "user-style" attributes. - # This means any arbitrary attribute which a user might have added -- i.e. one with - # a name which is *not* recognised in the netCDF or CF conventions. - # - - def test_01_userstyle_single_global(self): - self.run_load_testcase( - attr_name="myname", values=["single_value", None, None] - ) - # Legacy-equivalent result check (single attributes dict per cube) - self.check_load_results( - [None, "single_value", "single_value"], - oldstyle_combined=True, - ) - # Full new-style results check - self.check_load_results(["single_value", None, None]) - - def test_02_userstyle_single_local(self): - # Default behaviour for a general local user-attribute. - # It is attached to only the specific cube. - self.run_load_testcase( - attr_name="myname", # A generic "user" attribute with no special handling - values=[None, "single-value", None], - ) - self.check_load_results( - [None, "single-value", None], oldstyle_combined=True - ) - self.check_load_results([None, "single-value", None]) - - def test_03_userstyle_multiple_different(self): - # Default behaviour for differing local user-attributes. - # The global attribute is simply lost, because there are local ones. - self.run_load_testcase( - attr_name="random", # A generic "user" attribute with no special handling - values=[ - ["global_file1", "f1v1", "f1v2"], - ["global_file2", "x1", "x2"], - ], - ) - self.check_load_results( - [None, "f1v1", "f1v2", "x1", "x2"], - oldstyle_combined=True, - ) - self.check_load_results( - [["global_file1", "f1v1", "f1v2"], ["global_file2", "x1", "x2"]] - ) - - def test_04_userstyle_multiple_same(self): - # Nothing special to note in this case - # TODO: ??remove?? - self.run_load_testcase( - attr_name="random", - values=["global_file1", "same-value", "same-value"], - ) - self.check_load_results( - oldstyle_combined=True, expected=[None, "same-value", "same-value"] - ) - self.check_load_results(["global_file1", "same-value", "same-value"]) - - ####################################################### - # Tests on "Conventions" attribute. - # Note: the usual 'Conventions' behaviour is already tested elsewhere - # - see :class:`TestConventionsAttributes` above - # - # TODO: the name 'conventions' (lower-case) is also listed in _CF_GLOBAL_ATTRS, but - # we have excluded it from the global-attrs testing here. We probably still need to - # test what that does, though it's inclusion might simply be a mistake. - # - - def test_07_conventions_var_local(self): - # What happens if 'Conventions' appears as a variable-local attribute. - # N.B. this is not good CF, but we'll see what happens anyway. - self.run_load_testcase( - attr_name="Conventions", - values=[None, "user_set"], - ) - # Legacy result - self.check_load_results([None, "user_set"], oldstyle_combined=True) - # Newstyle result - self.check_load_results([None, "user_set"]) - - def test_08_conventions_var_both(self): - # What happens if 'Conventions' appears as both global + local attribute. - self.run_load_testcase( - attr_name="Conventions", - values=["global-setting", "local-setting"], - ) - # (#1): legacy result : the global version gets lost. - self.check_load_results( - [None, "local-setting"], oldstyle_combined=True - ) - # (#2): newstyle results : retain both. - self.check_load_results(["global-setting", "local-setting"]) - - ####################################################### - # Tests on "global" style attributes - # = those specific ones which 'ought' only to be global (except on collisions) - # - - def test_09_globalstyle__global(self, global_attr): - attr_content = f"Global tracked {global_attr}" - self.run_load_testcase( - attr_name=global_attr, values=[attr_content, None] - ) - # (#1) legacy - self.check_load_results([None, attr_content], oldstyle_combined=True) - # (#2) newstyle : global status preserved. - self.check_load_results([attr_content, None]) - - def test_10_globalstyle__local(self, global_attr): - # Strictly, not correct CF, but let's see what it does with it. - attr_content = f"Local tracked {global_attr}" - self.run_load_testcase( - attr_name=global_attr, - values=[None, attr_content], - ) - # (#1): legacy result = treated the same as a global setting - self.check_load_results([None, attr_content], oldstyle_combined=True) - # (#2): newstyle result : remains local - self.check_load_results( - [None, attr_content], - ) - - def test_11_globalstyle__both(self, global_attr): - attr_global = f"Global-{global_attr}" - attr_local = f"Local-{global_attr}" - self.run_load_testcase( - attr_name=global_attr, - values=[attr_global, attr_local], - ) - # (#1) legacy result : promoted local setting "wins" - self.check_load_results([None, attr_local], oldstyle_combined=True) - # (#2) newstyle result : both retained - self.check_load_results([attr_global, attr_local]) - - def test_12_globalstyle__multivar_different(self, global_attr): - # Multiple *different* local settings are retained - attr_1 = f"Local-{global_attr}-1" - attr_2 = f"Local-{global_attr}-2" - self.run_load_testcase( - attr_name=global_attr, - values=[None, attr_1, attr_2], - ) - # (#1): legacy values, for cube.attributes viewed as a single dict - self.check_load_results([None, attr_1, attr_2], oldstyle_combined=True) - # (#2): exact results, with newstyle "split" cube attrs - self.check_load_results([None, attr_1, attr_2]) - - def test_14_globalstyle__multifile_different(self, global_attr): - # Different global attributes from multiple files - attr_1 = f"Global-{global_attr}-1" - attr_2 = f"Global-{global_attr}-2" - self.run_load_testcase( - attr_name=global_attr, - values=[[attr_1, None, None], [attr_2, None, None]], - ) - # (#1) legacy : multiple globals retained as local ones - self.check_load_results( - [None, attr_1, attr_1, attr_2, attr_2], oldstyle_combined=True - ) - # (#1) newstyle : result same as input - self.check_load_results([[attr_1, None, None], [attr_2, None, None]]) - - ####################################################### - # Tests on "local" style attributes - # = those specific ones which 'ought' to appear attached to a variable, rather than - # being global - # - - @pytest.mark.parametrize("origin_style", ["input_global", "input_local"]) - def test_16_localstyle(self, local_attr, origin_style): - # local-style attributes should *not* get 'promoted' to global ones - # Set the name extension to avoid tests with different 'style' params having - # collisions over identical testfile names - self.testname_extension = origin_style - - attrval = f"Attr-setting-{local_attr}" - if local_attr == "missing_value": - # Special-case : 'missing_value' type must be compatible with the variable - attrval = 303 - elif local_attr == "ukmo__process_flags": - # Another special case : the handling of this one is "unusual". - attrval = "process" - - # Create testfiles and load them, which should always produce a single cube. - if origin_style == "input_global": - # Record in source as a global attribute - values = [attrval, None] - else: - assert origin_style == "input_local" - # Record in source as a variable-local attribute - values = [None, attrval] - - self.run_load_testcase(attr_name=local_attr, values=values) - - # Work out the expected result. - result_value = attrval - # ... there are some special cases - if origin_style == "input_local": - if local_attr == "ukmo__process_flags": - # Some odd special behaviour here. - result_value = (result_value,) - elif local_attr in ("standard_error_multiplier", "missing_value"): - # For some reason, these ones never appear on the cube - result_value = None - - # NOTE: **legacy** result is the same, whether the original attribute was - # provided as a global or local attribute ... - expected_result_legacy = [None, result_value] - - # While 'newstyle' results preserve the input type local/global. - if origin_style == "input_local": - expected_result_newstyle = [None, result_value] - else: - expected_result_newstyle = [result_value, None] - - # (#1): legacy values, for cube.attributes viewed as a single dict - self.check_load_results(expected_result_legacy, oldstyle_combined=True) - # (#2): exact results, with newstyle "split" cube attrs - self.check_load_results(expected_result_newstyle) - - @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) - @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) - @pytest.mark.parametrize("resultstyle", _MATRIX_LOAD_RESULTSTYLES) - def test_load_matrix( - self, testcase, attrname, matrix_results, resultstyle - ): - do_saves, matrix_results = matrix_results - testcase_spec = matrix_results["load"][testcase] - input_spec = testcase_spec["input"] - values = decode_matrix_input(input_spec) - - self.run_load_testcase(attrname, values) - - result_cubes = iris.load(self.input_filepaths) - do_combined = resultstyle == "legacy" - results = self.fetch_results( - cubes=result_cubes, oldstyle_combined=do_combined - ) - result_spec = encode_matrix_result(results) - - attr_style = deduce_attr_style(attrname) - expected = testcase_spec[attr_style][resultstyle] - - if do_saves: - testcase_spec[attr_style][resultstyle] = result_spec - if expected is not None: - assert result_spec == expected - - -class TestSave(MixinAttrsTesting): - """ - Test saving from cube attributes dictionary (various categories) into files. - - """ - - # Parametrise all tests over split/unsplit saving. - @pytest.fixture( - params=_SPLIT_PARAM_VALUES, ids=_SPLIT_PARAM_IDS, autouse=True - ) - def do_split(self, request): - do_split = request.param - self.save_split_attrs = do_split - return do_split - - def run_save_testcase(self, attr_name: str, values: list): - # Create input cubes. - self.run_testcase( - attr_name=attr_name, - values=values, - create_cubes_or_files="cubes", - ) - - # Save input cubes to a temporary result file. - with warnings.catch_warnings(record=True) as captured_warnings: - self.result_filepath = self._testfile_path("result") - do_split = getattr(self, "save_split_attrs", False) - kwargs = ( - dict(save_split_attrs=do_split) - if _SPLIT_SAVE_SUPPORTED - else dict() - ) - with iris.FUTURE.context(**kwargs): - iris.save(self.input_cubes, self.result_filepath) - - self.captured_warnings = captured_warnings - - def run_save_testcase_legacytype(self, attr_name: str, values: list): - """ - Legacy-type means : before cubes had split attributes. - - This just means we have only one "set" of cubes, with ***no*** distinct global - attribute. - """ - if not isinstance(values, list): - # Translate single input value to list-of-1 - values = [values] - - self.run_save_testcase(attr_name, [None] + values) - - def check_save_results( - self, expected: list, expected_warnings: List[str] = None - ): - results = self.fetch_results(filepath=self.result_filepath) - assert results == expected - check_captured_warnings( - expected_warnings, - self.captured_warnings, - # N.B. only allow a legacy-attributes warning when NOT saving split-attrs - allow_possible_legacy_warning=not self.save_split_attrs, - ) - - def test_userstyle__single(self, do_split): - self.run_save_testcase_legacytype("random", "value-x") - if do_split: - # result as input values - expected_result = [None, "value-x"] - else: - # in legacy mode, promoted = stored as a *global* by default. - expected_result = ["value-x", None] - self.check_save_results(expected_result) - - def test_userstyle__multiple_same(self, do_split): - self.run_save_testcase_legacytype("random", ["value-x", "value-x"]) - if do_split: - # result as input values - expected_result = [None, "value-x", "value-x"] - else: - # in legacy mode, promoted = stored as a *global* by default. - expected_result = ["value-x", None, None] - self.check_save_results(expected_result) - - def test_userstyle__multiple_different(self): - # Clashing values are stored as locals on the individual variables. - self.run_save_testcase_legacytype("random", ["value-A", "value-B"]) - self.check_save_results([None, "value-A", "value-B"]) - - def test_userstyle__multiple_onemissing(self): - # Multiple user-type, with one missing, behave like different values. - self.run_save_testcase_legacytype( - "random", - ["value", None], - ) - # Stored as locals when there are differing values. - self.check_save_results([None, "value", None]) - - def test_Conventions__single(self): - self.run_save_testcase_legacytype("Conventions", "x") - # Always discarded + replaced by a single global setting. - self.check_save_results(["CF-1.7", None]) - - def test_Conventions__multiple_same(self): - self.run_save_testcase_legacytype( - "Conventions", ["same-value", "same-value"] - ) - # Always discarded + replaced by a single global setting. - self.check_save_results(["CF-1.7", None, None]) - - def test_Conventions__multiple_different(self): - self.run_save_testcase_legacytype( - "Conventions", ["value-A", "value-B"] - ) - # Always discarded + replaced by a single global setting. - self.check_save_results(["CF-1.7", None, None]) - - def test_globalstyle__single(self, global_attr, do_split): - self.run_save_testcase_legacytype(global_attr, ["value"]) - if do_split: - # result as input values - expected_warning = "should only be a CF global" - expected_result = [None, "value"] - else: - # in legacy mode, promoted - expected_warning = None - expected_result = ["value", None] - self.check_save_results(expected_result, expected_warning) - - def test_globalstyle__multiple_same(self, global_attr, do_split): - # Multiple global-type with same values are made global. - self.run_save_testcase_legacytype( - global_attr, - ["value-same", "value-same"], - ) - if do_split: - # result as input values - expected_result = [None, "value-same", "value-same"] - expected_warning = "should only be a CF global attribute" - else: - # in legacy mode, promoted - expected_result = ["value-same", None, None] - expected_warning = None - self.check_save_results(expected_result, expected_warning) - - def test_globalstyle__multiple_different(self, global_attr): - # Multiple global-type with different values become local, with warning. - self.run_save_testcase_legacytype(global_attr, ["value-A", "value-B"]) - # *Only* stored as locals when there are differing values. - msg_regexp = ( - f"'{global_attr}' is being added as CF data variable attribute," - f".* should only be a CF global attribute." - ) - self.check_save_results( - [None, "value-A", "value-B"], expected_warnings=msg_regexp - ) - - def test_globalstyle__multiple_onemissing(self, global_attr): - # Multiple global-type, with one missing, behave like different values. - self.run_save_testcase_legacytype( - global_attr, ["value", "value", None] - ) - # Stored as locals when there are differing values. - msg_regexp = ( - f"'{global_attr}' is being added as CF data variable attribute," - f".* should only be a CF global attribute." - ) - self.check_save_results( - [None, "value", "value", None], expected_warnings=msg_regexp - ) - - def test_localstyle__single(self, local_attr): - self.run_save_testcase_legacytype(local_attr, ["value"]) - - # Defaults to local - expected_results = [None, "value"] - # .. but a couple of special cases - if local_attr == "ukmo__process_flags": - # A particular, really weird case - expected_results = [None, "v a l u e"] - elif local_attr == "STASH": - # A special case : the stored name is different - self.attrname = "um_stash_source" - - self.check_save_results(expected_results) - - def test_localstyle__multiple_same(self, local_attr): - self.run_save_testcase_legacytype( - local_attr, ["value-same", "value-same"] - ) - - # They remain separate + local - expected_results = [None, "value-same", "value-same"] - if local_attr == "ukmo__process_flags": - # A particular, really weird case - expected_results = [ - None, - "v a l u e - s a m e", - "v a l u e - s a m e", - ] - elif local_attr == "STASH": - # A special case : the stored name is different - self.attrname = "um_stash_source" - - self.check_save_results(expected_results) - - def test_localstyle__multiple_different(self, local_attr): - self.run_save_testcase_legacytype(local_attr, ["value-A", "value-B"]) - # Different values are treated just the same as matching ones. - expected_results = [None, "value-A", "value-B"] - if local_attr == "ukmo__process_flags": - # A particular, really weird case - expected_results = [ - None, - "v a l u e - A", - "v a l u e - B", - ] - elif local_attr == "STASH": - # A special case : the stored name is different - self.attrname = "um_stash_source" - self.check_save_results(expected_results) - - # - # Test handling of newstyle independent global+local cube attributes. - # - def test_globallocal_clashing(self, do_split): - # A cube has clashing local + global attrs. - original_values = ["valueA", "valueB"] - self.run_save_testcase("userattr", original_values) - expected_result = original_values.copy() - if not do_split: - # in legacy mode, "promote" = lose the local one - expected_result[0] = expected_result[1] - expected_result[1] = None - self.check_save_results(expected_result) - - def test_globallocal_oneeach_same(self, do_split): - # One cube with global attr, another with identical local one. - self.run_save_testcase( - "userattr", values=[[None, "value"], ["value", None]] - ) - if do_split: - expected = [None, "value", "value"] - expected_warning = ( - r"Saving the cube global attributes \['userattr'\] as local" - ) - else: - # N.B. legacy code sees only two equal values (and promotes). - expected = ["value", None, None] - expected_warning = None - - self.check_save_results(expected, expected_warning) - - def test_globallocal_oneeach_different(self, do_split): - # One cube with global attr, another with a *different* local one. - self.run_save_testcase( - "userattr", [[None, "valueA"], ["valueB", None]] - ) - if do_split: - warning = ( - r"Saving the cube global attributes \['userattr'\] as local" - ) - else: - # N.B. legacy code does not warn of global-to-local "demotion". - warning = None - self.check_save_results([None, "valueA", "valueB"], warning) - - def test_globallocal_one_other_clashingglobals(self, do_split): - # Two cubes with both, second cube has a clashing global attribute. - self.run_save_testcase( - "userattr", - values=[["valueA", "valueB"], ["valueXXX", "valueB"]], - ) - if do_split: - expected = [None, "valueB", "valueB"] - expected_warnings = [ - "Saving.* global attributes.* as local", - 'attributes.* of cube "v1" were not saved', - 'attributes.* of cube "v2" were not saved', - ] - else: - # N.B. legacy code sees only the locals, and promotes them. - expected = ["valueB", None, None] - expected_warnings = None - self.check_save_results(expected, expected_warnings) - - def test_globallocal_one_other_clashinglocals(self, do_split): - # Two cubes with both, second cube has a clashing local attribute. - inputs = [["valueA", "valueB"], ["valueA", "valueXXX"]] - if do_split: - expected = ["valueA", "valueB", "valueXXX"] - else: - # N.B. legacy code sees only the locals. - expected = [None, "valueB", "valueXXX"] - self.run_save_testcase("userattr", values=inputs) - self.check_save_results(expected) - - @pytest.mark.parametrize("testcase", _MATRIX_TESTCASES[:max_param_attrs]) - @pytest.mark.parametrize("attrname", _MATRIX_ATTRNAMES) - def test_save_matrix(self, testcase, attrname, matrix_results, do_split): - do_saves, matrix_results = matrix_results - split_param = "split" if do_split else "unsplit" - testcase_spec = matrix_results["save"][testcase] - input_spec = testcase_spec["input"] - values = decode_matrix_input(input_spec) - - self.run_save_testcase(attrname, values) - results = self.fetch_results(filepath=self.result_filepath) - result_spec = encode_matrix_result(results) - - attr_style = deduce_attr_style(attrname) - expected = testcase_spec[attr_style][split_param] - - if do_saves: - testcase_spec[attr_style][split_param] = result_spec - if expected is not None: - assert result_spec == expected diff --git a/lib/iris/tests/integration/test_new_axis.py b/lib/iris/tests/integration/test_new_axis.py index 7c8da13ae5..876eccbb63 100644 --- a/lib/iris/tests/integration/test_new_axis.py +++ b/lib/iris/tests/integration/test_new_axis.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :func:`iris.util.new_axis`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_pickle.py b/lib/iris/tests/integration/test_pickle.py index 7317855512..fa5ddbd73e 100644 --- a/lib/iris/tests/integration/test_pickle.py +++ b/lib/iris/tests/integration/test_pickle.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for pickling things.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py index bab925bd7e..e654694aa7 100644 --- a/lib/iris/tests/integration/test_pp.py +++ b/lib/iris/tests/integration/test_pp.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for loading and saving PP files.""" # Import iris.tests first so that some things can be initialised before @@ -17,7 +18,7 @@ from iris.aux_factory import HybridHeightFactory, HybridPressureFactory from iris.coords import AuxCoord, CellMethod, DimCoord from iris.cube import Cube -from iris.exceptions import IgnoreCubeException, IrisUserWarning +from iris.exceptions import IgnoreCubeException import iris.fileformats.pp from iris.fileformats.pp import load_pairs_from_fields import iris.fileformats.pp_load_rules @@ -289,7 +290,7 @@ def test_hybrid_pressure_with_duplicate_references(self): "iris.fileformats.pp.load", new=load ) as load, mock.patch("warnings.warn") as warn: _, _, _ = iris.fileformats.pp.load_cubes("DUMMY") - warn.assert_called_with(msg, category=IrisUserWarning) + warn.assert_called_with(msg) def test_hybrid_height_with_non_standard_coords(self): # Check the save rules are using the AuxFactory to find the @@ -414,7 +415,7 @@ def test_hybrid_height_round_trip_no_reference(self): "Unable to create instance of HybridHeightFactory. " "The source data contains no field(s) for 'orography'." ) - warn.assert_called_with(msg, category=IrisUserWarning) + warn.assert_called_with(msg) # Check the data cube is set up to use hybrid height. self._test_coord( diff --git a/lib/iris/tests/integration/test_pp_constrained_load_cubes.py b/lib/iris/tests/integration/test_pp_constrained_load_cubes.py index 5ba8978ed1..7ddf39b2ff 100644 --- a/lib/iris/tests/integration/test_pp_constrained_load_cubes.py +++ b/lib/iris/tests/integration/test_pp_constrained_load_cubes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :func:`iris.fileformats.rules.load_cubes`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_regrid_equivalence.py b/lib/iris/tests/integration/test_regrid_equivalence.py index 6bcb1ce403..09b47072e0 100644 --- a/lib/iris/tests/integration/test_regrid_equivalence.py +++ b/lib/iris/tests/integration/test_regrid_equivalence.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Tests to check the validity of replacing "iris.analysis._interpolate.regrid`('nearest')" with diff --git a/lib/iris/tests/integration/test_regridding.py b/lib/iris/tests/integration/test_regridding.py index 44e9fef22e..3e87a8d0aa 100644 --- a/lib/iris/tests/integration/test_regridding.py +++ b/lib/iris/tests/integration/test_regridding.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for regridding.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_subset.py b/lib/iris/tests/integration/test_subset.py index 457616cee3..bc2029afba 100644 --- a/lib/iris/tests/integration/test_subset.py +++ b/lib/iris/tests/integration/test_subset.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for subset.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py index abe8fd0a2e..a8e3acaa41 100644 --- a/lib/iris/tests/integration/test_trajectory.py +++ b/lib/iris/tests/integration/test_trajectory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :mod:`iris.analysis.trajectory`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/integration/um/__init__.py b/lib/iris/tests/integration/um/__init__.py index 40fc56f129..a94785ca58 100644 --- a/lib/iris/tests/integration/um/__init__.py +++ b/lib/iris/tests/integration/um/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Integration tests for :mod:`iris.fileformats.um` fast load functions.""" diff --git a/lib/iris/tests/integration/um/test_fieldsfile.py b/lib/iris/tests/integration/um/test_fieldsfile.py index 2aff7a2989..56b88c2b6d 100644 --- a/lib/iris/tests/integration/um/test_fieldsfile.py +++ b/lib/iris/tests/integration/um/test_fieldsfile.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the fast loading of structured Fieldsfiles. diff --git a/lib/iris/tests/pp.py b/lib/iris/tests/pp.py index 3e07ccbd7f..d8eb3256c4 100644 --- a/lib/iris/tests/pp.py +++ b/lib/iris/tests/pp.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import contextlib import os.path diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 69beacb848..2313c25270 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -1,6 +1,6 @@ { "gallery_tests.test_plot_COP_1d.0": "aefec91c3601249cc9b3336dc4c8cdb31a64c6d997b3c0eccb5932d285e42f33", - "gallery_tests.test_plot_COP_maps.0": "ea91789995668566913e43474adb6a917e8d947c4b46957ec6716a91958e6f81", + "gallery_tests.test_plot_COP_maps.0": "ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94696935", "gallery_tests.test_plot_SOI_filtering.0": "fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4", "gallery_tests.test_plot_TEC.0": "e5a761b69a589a4bc46f9e48c65c6631ce61d1ce3982c13739b33193c0ee3f8c", "gallery_tests.test_plot_anomaly_log_colouring.0": "ec4464e384a39b13931a9b1c85696da968d5e6e63e26847bdbd399938d3c5a4c", @@ -129,8 +129,8 @@ "iris.tests.test_plot.TestHybridHeight.test_points.3": "fe857b91917a847ec4bd3f01c47c6ca43b11915a3ea4db3b1b4a84c4c03f3fc1", "iris.tests.test_plot.TestHybridHeight.test_points.4": "b878387e978ec2f0c0f09f83878f3f81c070c0fe78d0c1763fa13856d03e3f0f", "iris.tests.test_plot.TestMissingCS.test_missing_cs.0": "fa816ac1857e853cc17e957ac15f3e8494c6c8f43e81c13b3f813e91c07e3f46", - "iris.tests.test_plot.TestMissingCoord.test_no_u.0": "ea856a95955a956ac17f954a817e3f8c953ac07e3e81c07f7ea16a81c07e3e81", - "iris.tests.test_plot.TestMissingCoord.test_no_u.1": "ea956ab5954a954ac17e954a857f3f80954ac07f7e80c07f7a856a84c07f3f81", + "iris.tests.test_plot.TestMissingCoord.test_no_u.0": "ea856a95955a954ac17f954a807e3f48951ac07e3f81c0ff7ea16a81c0bf3f81", + "iris.tests.test_plot.TestMissingCoord.test_no_u.1": "ea956ab5954a954ac17e9542817f2f60950ac07f3e80c0ff7a856aa5c2ff3f80", "iris.tests.test_plot.TestMissingCoord.test_no_v.0": "fa816a85957a857ac17e954ac17e1fa2950bc07e3e81c07f3e807a85c17f3f81", "iris.tests.test_plot.TestMissingCoord.test_no_v.1": "fa856a85957a857ac17e954ac17e9d02954ac07e3e81c07f3e857a85c2fd3f80", "iris.tests.test_plot.TestMissingCoord.test_none.0": "fa816a85957a857ac17e954ac17e3fa2950ac07e3e80c07f3e807a85c1ff3f81", @@ -168,12 +168,12 @@ "iris.tests.test_plot.TestPlotCitation.test_axes.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", "iris.tests.test_plot.TestPlotCitation.test_figure.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_non_cube_coordinate.0": "fa81857e857e3e85857e7a81857e7a81857e7a817e81780b7a81c56a7a81857e", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": "ea853e11956ac1e3957a844e957a607e955e6ae36ae17aa16a856be86ab13c32", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": "ea857a85857ac57a957a857a957ad05e850b3ed46e206b917a816f247a953ae4", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": "ebfdcac9bd209434b696856795cb012e95676b7b81186acdc06536ad89182fda", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": "aaff7ab2fd04902cfd0c950f9d010f4bd64069f3e1993a9894262e345ae56f6c", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": "eaa9d5129556c55695568556955623f9c0292bf9c0a90bfdc0fd7e0085562ff9", - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": "eaf77a0d9553c52c950095ac952885ea952c87f3952c6bf3d42c6a536a57bf80", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": "ea853f10956ac1e1957a854e957a207e955e6aa76ae17aa16a856aaf6ab19e12", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": "ea853a85857a857a957a857a957ed05a857b3e946a606b917a816f247a853af4", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": "eafdcec9f4219530b696a56694c3852a95656b7b85986acdc06516adad186e9a", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": "aff24ab7fd05952dbd0f950f910fed48c47868f2e1b9329094266e345a850f6c", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": "eaa9b5699556854e9456854ed05625f9d0a92bfdc0a90afd81f97e00855e7ab6", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": "eaf73e0d9503852c950395ac9528c1fad06cc0f2d1ec6af2c0fc6a536a1797f3", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_x.0": "afea950ddb13c03e34359ad8a4c86f24913f2693806e3ff1f4087b4285fd2af2", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_y.0": "afee9632de05c9d9f180d168c454a53e931b3e84954a3b8c85f94ce703ff7284", "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.0": "ea853f00957ac07c957ac0bf951a69f3c47c7a5f3a4127816b953e646b813761", @@ -202,8 +202,8 @@ "iris.tests.test_plot.TestQuickplotPlot.test_x.0": "82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1", "iris.tests.test_plot.TestQuickplotPlot.test_y.0": "a2fbb46e7f10c99f2013d863e46498dcd06c0d2798421fa5dd221e7789ff6f10", "iris.tests.test_plot.TestQuickplotPlot.test_z.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", - "iris.tests.test_plot.TestSimple.test_bounds.0": "ea856a85955a957ac17e954ac17a9d22956ac07e3e81c07f3e857aa5c27d3f80", - "iris.tests.test_plot.TestSimple.test_points.0": "ea856a85957a957ac17e954ac17e1fa2950ac07e3e80c07d3e847a85c1ff3f81", + "iris.tests.test_plot.TestSimple.test_bounds.0": "ea856a85954a957ac17e954ac17a9d3a956ac07e3e80c07f3e857aa5c27d3f80", + "iris.tests.test_plot.TestSimple.test_points.0": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", "iris.tests.test_plot.TestSymbols.test_cloud_cover.0": "eb5291e494ad6e136b5291ec94ad6e136b5291ec94ad6e136b5291ec94ad6e13", "iris.tests.test_quickplot.TestLabels.test_alignment.0": "be813fe0954ac07fc0ff3e81c03fc97a6d0094af3f80c17f36a53240d97f2d82", "iris.tests.test_quickplot.TestLabels.test_contour.0": "a7fd955a7a016d1a3217c962e4819a56c96f3c859b624d2584de3a6999b662db", @@ -220,12 +220,12 @@ "iris.tests.test_quickplot.TestPlotHist.test_horizontal.0": "b59cc3dadb433c24c4f166039438793591a7dbdcbcdc9ccc68c697a91b139131", "iris.tests.test_quickplot.TestPlotHist.test_vertical.0": "bf80c7c6c07d7959647e343a33364b699589c6c64ec0312b9e227ad681ffcc68", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": "fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": "ea856a95955a956ac17f950a817e3fcc951ac07e3e81c07f7ea16a85c07e3e81", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": "ea856a85957a957ac17e954ac17e1fa2950ac07e3e80c07d3e847a85c1ff3f81", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": "eaf9e2c9ff60b43036168d6795c2892e95674b7b80304aedc8651ead99192eda", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": "a2ff7892771d912cb4089d0ffd4b8d429c4049f3d1bb1a909c266e34dae56f68", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": "eaa9f598b756a41e8056855e955689f9d9610be988290bfdd9fdfe0089562a61", - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": "ead7780cf7d3c5acb40095acd56999e2952899f2d5ec0bf3902c6a536a57b700", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": "ea856a95955a956ac17f950a807e3f4e951ac07e3f81c0ff3ea16aa1c0bd3e81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": "eaf9eec9f729943032168d66d4db896e9567497b81304aedc96514ad8d18669a", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": "a6fb4b967f00950eb00f9d0f900fcd62dc7868f2c1bb3a909c266e34daa52f6c", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": "eaa9b549f756854ea0168d6ed556896fd8a909ed88290afdd9e97e008d6e2296", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": "aad73e0df78085ac840195ac9528d9fad56cd8f2906c48f2d0ec7a536a1737f3", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_x.0": "a6fb958dff50c03e203598dca4c9cd26933f9cf3886e1de1dc047b4289ec2672", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_y.0": "a2ffb6127f0dc9992085d960c6748d3edb121ca49d6a1b048df34ce789ff7205", "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.0": "ea856a95957a957ac07e954ac17e3e86950bc17f3ea4c27d3e833ac1c1e03f80", diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index c66c13bba5..632dc95e20 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A collection of routines which create standard Cubes/files for test purposes. diff --git a/lib/iris/tests/stock/_stock_2d_latlons.py b/lib/iris/tests/stock/_stock_2d_latlons.py index 889f8bce12..4733a15305 100644 --- a/lib/iris/tests/stock/_stock_2d_latlons.py +++ b/lib/iris/tests/stock/_stock_2d_latlons.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Extra stock routines for making and manipulating cubes with 2d coordinates, to mimic ocean grid data. diff --git a/lib/iris/tests/stock/mesh.py b/lib/iris/tests/stock/mesh.py index 7726849252..da226a3790 100644 --- a/lib/iris/tests/stock/mesh.py +++ b/lib/iris/tests/stock/mesh.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Helper functions making objects for unstructured mesh testing.""" diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index 0f6a08b596..bf93f01f6b 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Routines for generating synthetic NetCDF files from template headers.""" from pathlib import Path diff --git a/lib/iris/tests/system_test.py b/lib/iris/tests/system_test.py index 440b544f94..745163b485 100644 --- a/lib/iris/tests/system_test.py +++ b/lib/iris/tests/system_test.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ diff --git a/lib/iris/tests/test_abf.py b/lib/iris/tests/test_abf.py index 92ed337710..0b398879fc 100644 --- a/lib/iris/tests/test_abf.py +++ b/lib/iris/tests/test_abf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index e34d2ff1bd..e5614f6b63 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index f611e25c4e..0d88a23055 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else diff --git a/lib/iris/tests/test_analysis_calculus.py b/lib/iris/tests/test_analysis_calculus.py index 36e008f38e..86cc79092b 100644 --- a/lib/iris/tests/test_analysis_calculus.py +++ b/lib/iris/tests/test_analysis_calculus.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index c0329b72d6..4d92b9a92c 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_cartography.py b/lib/iris/tests/test_cartography.py index 58dccb78aa..c9647dc48e 100644 --- a/lib/iris/tests/test_cartography.py +++ b/lib/iris/tests/test_cartography.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Tests elements of the cartography module. diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index c748b9dfd4..0abb35c566 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test cube indexing, slicing, and extracting, and also the dot graphs. diff --git a/lib/iris/tests/test_cell.py b/lib/iris/tests/test_cell.py index 3925d9b0a7..21d2603072 100644 --- a/lib/iris/tests/test_cell.py +++ b/lib/iris/tests/test_cell.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py index 70f24478d2..3abd6b981b 100644 --- a/lib/iris/tests/test_cf.py +++ b/lib/iris/tests/test_cf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the cf module. diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index e3a1d2eaf3..6cea9dc001 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -1,13 +1,13 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris.tests first so that some things can be initialised before # importing anything else import iris.tests as tests # isort:skip -import ast from datetime import datetime from fnmatch import fnmatch from glob import glob @@ -22,8 +22,9 @@ LICENSE_TEMPLATE = """# Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details.""" +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details.""" # Guess iris repo directory of Iris - realpath is used to mitigate against # Python finding the iris package via a symlink. @@ -132,66 +133,6 @@ def test_python_versions(): assert search in path.read_text() -def test_categorised_warnings(): - """ - To ensure that all UserWarnings raised by Iris are categorised, for ease of use. - - No obvious category? Use the parent: - :class:`iris.exceptions.IrisUserWarning`. - - Warning matches multiple categories? Create a one-off combo class. For - example: - - .. code-block:: python - - class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): - \""" - One-off combination of warning classes - enhances user filtering. - \""" - pass - - """ - warns_without_category = [] - warns_with_user_warning = [] - tmp_list = [] - - for file_path in Path(IRIS_DIR).rglob("*.py"): - file_text = file_path.read_text() - parsed = ast.parse(source=file_text) - calls = filter(lambda node: hasattr(node, "func"), ast.walk(parsed)) - warn_calls = filter( - lambda c: getattr(c.func, "attr", None) == "warn", calls - ) - - warn_call: ast.Call - for warn_call in warn_calls: - warn_ref = f"{file_path}:{warn_call.lineno}" - tmp_list.append(warn_ref) - - category_kwargs = filter( - lambda k: k.arg == "category", warn_call.keywords - ) - category_kwarg: ast.keyword = next(category_kwargs, None) - - if category_kwarg is None: - warns_without_category.append(warn_ref) - # Work with Attribute or Name instances. - elif ( - getattr(category_kwarg.value, "attr", None) - or getattr(category_kwarg.value, "id", None) - ) == "UserWarning": - warns_with_user_warning.append(warn_ref) - - # This avoids UserWarnings being raised by unwritten default behaviour. - assert ( - warns_without_category == [] - ), "All warnings raised by Iris must be raised with the category kwarg." - - assert ( - warns_with_user_warning == [] - ), "No warnings raised by Iris can be the base UserWarning class." - - class TestLicenseHeaders(tests.IrisTest): @staticmethod def whatchanged_parse(whatchanged_output): diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index 7d28d48c31..9287a79fda 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the cube concatenate mechanism. @@ -19,7 +20,6 @@ from iris.aux_factory import HybridHeightFactory from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.cube -from iris.exceptions import IrisUserWarning import iris.tests.stock as stock @@ -340,8 +340,7 @@ def test_points_overlap_increasing(self): cubes.append(_make_cube((0, 2), y, 1)) cubes.append(_make_cube((1, 3), y, 2)) with pytest.warns( - IrisUserWarning, - match="Found cubes with overlap on concatenate axis", + UserWarning, match="Found cubes with overlap on concatenate axis" ): result = concatenate(cubes) self.assertEqual(len(result), 2) @@ -352,8 +351,7 @@ def test_points_overlap_decreasing(self): cubes.append(_make_cube(x, (3, 0, -1), 1)) cubes.append(_make_cube(x, (1, -1, -1), 2)) with pytest.warns( - IrisUserWarning, - match="Found cubes with overlap on concatenate axis", + UserWarning, match="Found cubes with overlap on concatenate axis" ): result = concatenate(cubes) self.assertEqual(len(result), 2) @@ -368,8 +366,7 @@ def test_bounds_overlap_increasing(self): ) cubes.append(cube) with pytest.warns( - IrisUserWarning, - match="Found cubes with overlap on concatenate axis", + UserWarning, match="Found cubes with overlap on concatenate axis" ): result = concatenate(cubes) self.assertEqual(len(result), 2) @@ -384,8 +381,7 @@ def test_bounds_overlap_decreasing(self): ) cubes.append(cube) with pytest.warns( - IrisUserWarning, - match="Found cubes with overlap on concatenate axis", + UserWarning, match="Found cubes with overlap on concatenate axis" ): result = concatenate(cubes) self.assertEqual(len(result), 2) diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index b034525ff2..e568105f91 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the constrained cube loading mechanism. diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 5eb12ba1f3..ea99ae06df 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/test_coord_categorisation.py b/lib/iris/tests/test_coord_categorisation.py new file mode 100644 index 0000000000..0206ba66a5 --- /dev/null +++ b/lib/iris/tests/test_coord_categorisation.py @@ -0,0 +1,197 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Test the coordinate categorisation functions. +""" + +# import iris tests first so that some things can be initialised before importing anything else +import iris.tests as tests # isort:skip + +import warnings + +import cf_units +import numpy as np + +import iris +import iris.coord_categorisation as ccat + +CATEGORISATION_FUNCS = ( + ccat.add_day_of_month, + ccat.add_day_of_year, + ccat.add_weekday, + ccat.add_weekday_fullname, + ccat.add_weekday_number, + ccat.add_month, + ccat.add_month_fullname, + ccat.add_month_number, + ccat.add_year, + ccat.add_season, + ccat.add_season_number, + ccat.add_season_year, + ccat.add_season_membership, +) + + +class TestCategorisations(tests.IrisTest): + def setUp(self): + # make a series of 'day numbers' for the time, that slide across month + # boundaries + day_numbers = np.arange(0, 600, 27, dtype=np.int32) + + cube = iris.cube.Cube( + day_numbers, long_name="test cube", units="metres" + ) + + # use day numbers as data values also (don't actually use this for + # anything) + cube.data = day_numbers + + time_coord = iris.coords.DimCoord( + day_numbers, + standard_name="time", + units=cf_units.Unit("days since epoch", "standard"), + ) + cube.add_dim_coord(time_coord, 0) + + self.cube = cube + self.time_coord = time_coord + + def test_bad_coord(self): + for func in CATEGORISATION_FUNCS: + kwargs = {"name": "my_category"} + if func is ccat.add_season_membership: + kwargs["season"] = "djf" + with self.assertRaises(iris.exceptions.CoordinateNotFoundError): + func(self.cube, "DOES NOT EXIST", **kwargs) + + def test_explicit_result_names(self): + result_name = "my_category" + fmt = "Missing/incorrectly named result for {0!r}" + for func in CATEGORISATION_FUNCS: + # Specify source coordinate by name + cube = self.cube.copy() + kwargs = {"name": result_name} + if func is ccat.add_season_membership: + kwargs["season"] = "djf" + with warnings.catch_warnings(record=True): + func(cube, "time", **kwargs) + result_coords = cube.coords(result_name) + self.assertEqual(len(result_coords), 1, fmt.format(func.__name__)) + # Specify source coordinate by coordinate reference + cube = self.cube.copy() + time = cube.coord("time") + with warnings.catch_warnings(record=True): + func(cube, time, **kwargs) + result_coords = cube.coords(result_name) + self.assertEqual(len(result_coords), 1, fmt.format(func.__name__)) + + def test_basic(self): + cube = self.cube + time_coord = self.time_coord + + ccat.add_year(cube, time_coord, "my_year") + ccat.add_day_of_month(cube, time_coord, "my_day_of_month") + ccat.add_day_of_year(cube, time_coord, "my_day_of_year") + + ccat.add_month(cube, time_coord, "my_month") + ccat.add_month_fullname(cube, time_coord, "my_month_fullname") + ccat.add_month_number(cube, time_coord, "my_month_number") + + ccat.add_weekday(cube, time_coord, "my_weekday") + ccat.add_weekday_number(cube, time_coord, "my_weekday_number") + ccat.add_weekday_fullname(cube, time_coord, "my_weekday_fullname") + + ccat.add_season(cube, time_coord, "my_season") + ccat.add_season_number(cube, time_coord, "my_season_number") + ccat.add_season_year(cube, time_coord, "my_season_year") + + # also test 'generic' categorisation interface + def _month_in_quarter(coord, pt_value): + date = coord.units.num2date(pt_value) + return (date.month - 1) % 3 + + ccat.add_categorised_coord( + cube, "my_month_in_quarter", time_coord, _month_in_quarter + ) + + # To ensure consistent results between 32-bit and 64-bit + # platforms, ensure all the numeric categorisation coordinates + # are always stored as int64. + for coord in cube.coords(): + if coord.long_name is not None and coord.points.dtype.kind == "i": + coord.points = coord.points.astype(np.int64) + + # check values + self.assertCML(cube, ("categorisation", "quickcheck.cml")) + + def test_add_season_nonstandard(self): + # season categorisations work for non-standard seasons? + cube = self.cube + time_coord = self.time_coord + seasons = ["djfm", "amjj", "ason"] + ccat.add_season(cube, time_coord, name="seasons", seasons=seasons) + ccat.add_season_number( + cube, time_coord, name="season_numbers", seasons=seasons + ) + ccat.add_season_year( + cube, time_coord, name="season_years", seasons=seasons + ) + self.assertCML(cube, ("categorisation", "customcheck.cml")) + + def test_add_season_membership(self): + # season membership identifies correct seasons? + season = "djf" + ccat.add_season_membership(self.cube, "time", season, name="in_season") + ccat.add_season(self.cube, "time") + coord_season = self.cube.coord("season") + coord_membership = self.cube.coord("in_season") + season_locations = np.where(coord_season.points == season)[0] + membership_locations = np.where(coord_membership.points)[0] + self.assertArrayEqual(membership_locations, season_locations) + + def test_add_season_invalid_spec(self): + # custom seasons with an invalid season raises an error? + seasons = ("djf", "maj", "jja", "son") # MAJ not a season! + for func in ( + ccat.add_season, + ccat.add_season_year, + ccat.add_season_number, + ): + with self.assertRaises(ValueError): + func(self.cube, "time", name="my_category", seasons=seasons) + + def test_add_season_repeated_months(self): + # custom seasons with repeated months raises an error? + seasons = ("djfm", "mam", "jja", "son") + for func in ( + ccat.add_season, + ccat.add_season_year, + ccat.add_season_number, + ): + with self.assertRaises(ValueError): + func(self.cube, "time", name="my_category", seasons=seasons) + + def test_add_season_missing_months(self): + # custom seasons with missing months raises an error? + seasons = ("djfm", "amjj") + for func in ( + ccat.add_season, + ccat.add_season_year, + ccat.add_season_number, + ): + with self.assertRaises(ValueError): + func(self.cube, "time", name="my_category", seasons=seasons) + + def test_add_season_membership_invalid_spec(self): + season = "maj" # not a season! + with self.assertRaises(ValueError): + ccat.add_season_membership( + self.cube, "time", season, name="maj_season" + ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index e62a94f080..7cd15297cc 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip @@ -17,7 +18,6 @@ ) import iris.coords import iris.cube -from iris.exceptions import IrisUserWarning import iris.tests.stock @@ -341,7 +341,7 @@ def test_inverse_flattening_change(self): cs = GeogCS(6543210, 6500000) initial_crs = cs.as_cartopy_crs() with self.assertWarnsRegex( - IrisUserWarning, + UserWarning, "Setting inverse_flattening does not affect other properties of the GeogCS object.", ): cs.inverse_flattening = cs.inverse_flattening + 1 diff --git a/lib/iris/tests/test_cube.py b/lib/iris/tests/test_cube.py index d13db758a5..c9b76539d2 100644 --- a/lib/iris/tests/test_cube.py +++ b/lib/iris/tests/test_cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_cube_to_pp.py b/lib/iris/tests/test_cube_to_pp.py index 1a6be27f9c..a6fc5e3149 100644 --- a/lib/iris/tests/test_cube_to_pp.py +++ b/lib/iris/tests/test_cube_to_pp.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_ff.py b/lib/iris/tests/test_ff.py index 1abfafdac1..95f3b1493b 100644 --- a/lib/iris/tests/test_ff.py +++ b/lib/iris/tests/test_ff.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the Fieldsfile file loading plugin and FFHeader. diff --git a/lib/iris/tests/test_file_load.py b/lib/iris/tests/test_file_load.py index d0b9b2461a..0fe69ff583 100644 --- a/lib/iris/tests/test_file_load.py +++ b/lib/iris/tests/test_file_load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the file loading mechanism. diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index dc901db715..216637202a 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the file saving mechanism. diff --git a/lib/iris/tests/test_hybrid.py b/lib/iris/tests/test_hybrid.py index e3e5076650..76fc971a08 100644 --- a/lib/iris/tests/test_hybrid.py +++ b/lib/iris/tests/test_hybrid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the hybrid vertical coordinate representations. @@ -17,7 +18,6 @@ import iris from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.exceptions import IrisIgnoringBoundsWarning import iris.tests.stock @@ -136,7 +136,7 @@ def test_invalid_dependencies(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaises(IrisIgnoringBoundsWarning): + with self.assertRaises(UserWarning): _ = HybridHeightFactory(orography=sigma) def test_bounded_orography(self): @@ -154,7 +154,7 @@ def test_bounded_orography(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaisesRegex(IrisIgnoringBoundsWarning, msg): + with self.assertRaisesRegex(UserWarning, msg): self.cube.coord("altitude") @@ -215,7 +215,7 @@ def test_invalid_dependencies(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaises(IrisIgnoringBoundsWarning): + with self.assertRaises(UserWarning): _ = HybridPressureFactory( sigma=sigma, surface_air_pressure=sigma ) @@ -235,7 +235,7 @@ def test_bounded_surface_pressure(self): with warnings.catch_warnings(): # Cause all warnings to raise Exceptions warnings.simplefilter("error") - with self.assertRaisesRegex(IrisIgnoringBoundsWarning, msg): + with self.assertRaisesRegex(UserWarning, msg): self.cube.coord("air_pressure") diff --git a/lib/iris/tests/test_image_json.py b/lib/iris/tests/test_image_json.py index 75e40822dc..b5213156f8 100644 --- a/lib/iris/tests/test_image_json.py +++ b/lib/iris/tests/test_image_json.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_imports.py b/lib/iris/tests/test_imports.py index 46c7cae723..ca0d262ec4 100644 --- a/lib/iris/tests/test_imports.py +++ b/lib/iris/tests/test_imports.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_intersect.py b/lib/iris/tests/test_intersect.py index 29603f61a8..01e9f79af5 100644 --- a/lib/iris/tests/test_intersect.py +++ b/lib/iris/tests/test_intersect.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the intersection of Coords diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index 852944eee5..82da82cfa9 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the io/__init__.py module. diff --git a/lib/iris/tests/test_iterate.py b/lib/iris/tests/test_iterate.py index 1bee6db74f..ec86d2f69d 100644 --- a/lib/iris/tests/test_iterate.py +++ b/lib/iris/tests/test_iterate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the iteration of cubes in step. @@ -21,7 +22,6 @@ import iris import iris.analysis -from iris.exceptions import IrisUserWarning import iris.iterate import iris.tests.stock @@ -365,12 +365,12 @@ def test_izip_different_valued_coords(self): warnings.simplefilter( "error" ) # Cause all warnings to raise Exceptions - with self.assertRaises(IrisUserWarning): + with self.assertRaises(UserWarning): iris.iterate.izip( self.cube_a, self.cube_b, coords=self.coord_names ) # Call with coordinates, rather than names - with self.assertRaises(IrisUserWarning): + with self.assertRaises(UserWarning): iris.iterate.izip( self.cube_a, self.cube_b, coords=[latitude, longitude] ) diff --git a/lib/iris/tests/test_lazy_aggregate_by.py b/lib/iris/tests/test_lazy_aggregate_by.py index 690198c25a..57b748e52f 100644 --- a/lib/iris/tests/test_lazy_aggregate_by.py +++ b/lib/iris/tests/test_lazy_aggregate_by.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. import unittest from iris._lazy_data import as_lazy_data diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 1189f74b55..adb33924e5 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the main loading API. diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index 6ea4571630..202c319b61 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Tests map creation. diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index 7c11fde55d..e53bbfb5f3 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test the cube merging mechanism. @@ -21,7 +22,6 @@ from iris._lazy_data import as_lazy_data from iris.coords import AuxCoord, DimCoord import iris.cube -from iris.cube import CubeAttrsDict import iris.exceptions import iris.tests.stock @@ -1108,86 +1108,5 @@ def test_ancillary_variable_error_msg(self): _ = iris.cube.CubeList([cube1, cube2]).merge_cube() -class TestCubeMerge__split_attributes__error_messages(tests.IrisTest): - """ - Specific tests for the detection and wording of attribute-mismatch errors. - - In particular, the adoption of 'split' attributes with the new - :class:`iris.cube.CubeAttrsDict` introduces some more subtle possible discrepancies - in attributes, where this has also impacted the messaging, so this aims to probe - those cases. - """ - - def _check_merge_error(self, attrs_1, attrs_2, expected_message): - """ - Check the error from a merge failure caused by a mismatch of attributes. - - Build a pair of cubes with given attributes, merge them + check for a match - to the expected error message. - """ - cube_1 = iris.cube.Cube( - [0], - aux_coords_and_dims=[(AuxCoord([1], long_name="x"), None)], - attributes=attrs_1, - ) - cube_2 = iris.cube.Cube( - [0], - aux_coords_and_dims=[(AuxCoord([2], long_name="x"), None)], - attributes=attrs_2, - ) - with self.assertRaisesRegex( - iris.exceptions.MergeError, expected_message - ): - iris.cube.CubeList([cube_1, cube_2]).merge_cube() - - def test_keys_differ__single(self): - self._check_merge_error( - attrs_1=dict(a=1, b=2), - attrs_2=dict(a=1), - # Note: matching key 'a' does *not* appear in the message - expected_message="cube.attributes keys differ: 'b'", - ) - - def test_keys_differ__multiple(self): - self._check_merge_error( - attrs_1=dict(a=1, b=2), - attrs_2=dict(a=1, c=2), - expected_message="cube.attributes keys differ: 'b', 'c'", - ) - - def test_values_differ__single(self): - self._check_merge_error( - attrs_1=dict(a=1, b=2), # Note: matching key 'a' does not appear - attrs_2=dict(a=1, b=3), - expected_message="cube.attributes values differ for keys: 'b'", - ) - - def test_values_differ__multiple(self): - self._check_merge_error( - attrs_1=dict(a=1, b=2), - attrs_2=dict(a=12, b=22), - expected_message="cube.attributes values differ for keys: 'a', 'b'", - ) - - def test_splitattrs_keys_local_global_mismatch(self): - # Since Cube.attributes is now a "split-attributes" dictionary, it is now - # possible to have "cube1.attributes != cube1.attributes", but also - # "set(cube1.attributes.keys()) == set(cube2.attributes.keys())". - # I.E. it is now necessary to specifically compare ".globals" and ".locals" to - # see *what* differs between two attributes dictionaries. - self._check_merge_error( - attrs_1=CubeAttrsDict(globals=dict(a=1), locals=dict(b=2)), - attrs_2=CubeAttrsDict(locals=dict(a=2)), - expected_message="cube.attributes keys differ: 'a', 'b'", - ) - - def test_splitattrs_keys_local_match_masks_global_mismatch(self): - self._check_merge_error( - attrs_1=CubeAttrsDict(globals=dict(a=1), locals=dict(a=3)), - attrs_2=CubeAttrsDict(globals=dict(a=2), locals=dict(a=3)), - expected_message="cube.attributes values differ for keys: 'a'", - ) - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_name.py b/lib/iris/tests/test_name.py index 51bc92c28c..b4e91bafd7 100644 --- a/lib/iris/tests/test_name.py +++ b/lib/iris/tests/test_name.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Tests for NAME loading.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 793f8df876..6438140ed9 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test CF-NetCDF file loading and saving. @@ -25,7 +26,6 @@ from iris._lazy_data import is_lazy_data import iris.analysis.trajectory import iris.coord_systems as icoord_systems -from iris.exceptions import IrisCfSaveWarning from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf from iris.fileformats.netcdf import _thread_safe_nc @@ -1099,9 +1099,7 @@ def test_conflicting_global_attributes(self): with self.temp_filename(suffix=".nc") as filename: with mock.patch("warnings.warn") as warn: iris.save([self.cube, self.cube2], filename) - warn.assert_called_with( - expected_msg, category=IrisCfSaveWarning - ) + warn.assert_called_with(expected_msg) self.assertCDL( filename, ("netcdf", "netcdf_save_confl_global_attr.cdl") ) diff --git a/lib/iris/tests/test_nimrod.py b/lib/iris/tests/test_nimrod.py index ed60a516c8..6d62623198 100644 --- a/lib/iris/tests/test_nimrod.py +++ b/lib/iris/tests/test_nimrod.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_peak.py b/lib/iris/tests/test_peak.py index 1d9dd68cc1..a2b6894149 100644 --- a/lib/iris/tests/test_peak.py +++ b/lib/iris/tests/test_peak.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/test_pickling.py b/lib/iris/tests/test_pickling.py index 342b07cb03..26247e795b 100644 --- a/lib/iris/tests/test_pickling.py +++ b/lib/iris/tests/test_pickling.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test pickling of Iris objects. diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 150d521e34..55c912f423 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_pp_cf.py b/lib/iris/tests/test_pp_cf.py index 44650919e1..49bedaf1e2 100644 --- a/lib/iris/tests/test_pp_cf.py +++ b/lib/iris/tests/test_pp_cf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_pp_module.py b/lib/iris/tests/test_pp_module.py index b8606e3120..ca7f1c50eb 100644 --- a/lib/iris/tests/test_pp_module.py +++ b/lib/iris/tests/test_pp_module.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_pp_stash.py b/lib/iris/tests/test_pp_stash.py index 733d1697de..42390ab2b3 100644 --- a/lib/iris/tests/test_pp_stash.py +++ b/lib/iris/tests/test_pp_stash.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_pp_to_cube.py b/lib/iris/tests/test_pp_to_cube.py index d9c47c7841..eb006fb88e 100644 --- a/lib/iris/tests/test_pp_to_cube.py +++ b/lib/iris/tests/test_pp_to_cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index c42a8989fb..df2db12de6 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Tests the high-level plotting interface. diff --git a/lib/iris/tests/test_std_names.py b/lib/iris/tests/test_std_names.py index bc50903ab6..48d32acbee 100644 --- a/lib/iris/tests/test_std_names.py +++ b/lib/iris/tests/test_std_names.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_uri_callback.py b/lib/iris/tests/test_uri_callback.py index 62ae1b7fc8..67831945c5 100644 --- a/lib/iris/tests/test_uri_callback.py +++ b/lib/iris/tests/test_uri_callback.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index cf1dc44755..d8d5d73e95 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test iris.util diff --git a/lib/iris/tests/unit/__init__.py b/lib/iris/tests/unit/__init__.py index c03d437279..50929c8020 100644 --- a/lib/iris/tests/unit/__init__.py +++ b/lib/iris/tests/unit/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris` package.""" diff --git a/lib/iris/tests/unit/analysis/__init__.py b/lib/iris/tests/unit/analysis/__init__.py index 4f957bd501..974b4e3584 100644 --- a/lib/iris/tests/unit/analysis/__init__.py +++ b/lib/iris/tests/unit/analysis/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis` package.""" diff --git a/lib/iris/tests/unit/analysis/area_weighted/__init__.py b/lib/iris/tests/unit/analysis/area_weighted/__init__.py index 2cccaec14c..464036a6dd 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/__init__.py +++ b/lib/iris/tests/unit/analysis/area_weighted/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis._area_weighted` module.""" diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py index 789426e11b..ecaa028ab3 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py +++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :class:`iris.analysis._area_weighted.AreaWeightedRegridder`. @@ -50,7 +51,7 @@ def check_mdtol(self, mdtol=None): _regrid_info = _regrid_area_weighted_rectilinear_src_and_grid__prepare( src_grid, target_grid ) - self.assertEqual(len(_regrid_info), 9) + self.assertEqual(len(_regrid_info), 10) with mock.patch( "iris.analysis._area_weighted." "_regrid_area_weighted_rectilinear_src_and_grid__prepare", diff --git a/lib/iris/tests/unit/analysis/cartography/__init__.py b/lib/iris/tests/unit/analysis/cartography/__init__.py index ff3db13198..625a6fa141 100644 --- a/lib/iris/tests/unit/analysis/cartography/__init__.py +++ b/lib/iris/tests/unit/analysis/cartography/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis.cartography` module.""" diff --git a/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py b/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py index 23c7097902..612e5d8ecf 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py +++ b/lib/iris/tests/unit/analysis/cartography/test__get_lon_lat_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.analysis.cartography._get_lon_lat_coords""" import pytest diff --git a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py b/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py index e5f6964e22..a44661292f 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py +++ b/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.analysis.cartography._quadrant_area` function""" diff --git a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py b/lib/iris/tests/unit/analysis/cartography/test__xy_range.py index eeafc533e4..009c97fc34 100644 --- a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py +++ b/lib/iris/tests/unit/analysis/cartography/test__xy_range.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.analysis.cartography._xy_range`""" diff --git a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py b/lib/iris/tests/unit/analysis/cartography/test_area_weights.py index 29c906f0d1..696841ddd6 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py +++ b/lib/iris/tests/unit/analysis/cartography/test_area_weights.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.analysis.cartography.area_weights` function""" diff --git a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py index f3f8c81583..810851362e 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py +++ b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.analysis.cartography.gridcell_angles`. diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py index 7b52f4492e..8649cc55ea 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ b/lib/iris/tests/unit/analysis/cartography/test_project.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.analysis.cartography.project`.""" # Import iris.tests first so that some things can be initialised before @@ -15,7 +16,6 @@ import iris.coord_systems import iris.coords import iris.cube -from iris.exceptions import IrisDefaultingWarning import iris.tests import iris.tests.stock @@ -161,8 +161,7 @@ def test_no_coord_system(self): warn.assert_called_once_with( "Coordinate system of latitude and " "longitude coordinates is not specified. " - "Assuming WGS84 Geodetic.", - category=IrisDefaultingWarning, + "Assuming WGS84 Geodetic." ) diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py index 389dfaeb3a..f5c882a983 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.analysis.cartography.rotate_grid_vectors`. diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py index af1a2b8b42..212a39bf2d 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.analysis.cartography.rotate_winds`. diff --git a/lib/iris/tests/unit/analysis/geometry/__init__.py b/lib/iris/tests/unit/analysis/geometry/__init__.py index 25fa6af6cb..c57f5e246a 100644 --- a/lib/iris/tests/unit/analysis/geometry/__init__.py +++ b/lib/iris/tests/unit/analysis/geometry/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis.geometry` module.""" diff --git a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py b/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py index bec45d8b17..2509ac1a92 100644 --- a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py +++ b/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.analysis.geometry._extract_relevant_cube_slice`. diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py index ae0e47292d..49e03a1174 100644 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.analysis.geometry.geometry_area_weights` function. @@ -20,7 +21,6 @@ from iris.analysis.geometry import geometry_area_weights from iris.coords import DimCoord from iris.cube import Cube -from iris.exceptions import IrisGeometryExceedWarning import iris.tests.stock as stock @@ -148,9 +148,7 @@ def test_distinct_xy_bounds_pole(self): "The geometry exceeds the " "cube's y dimension at the upper end.", ) - self.assertTrue( - issubclass(w[-1].category, IrisGeometryExceedWarning) - ) + self.assertTrue(issubclass(w[-1].category, UserWarning)) target = np.array( [ [0, top_cell_half, top_cell_half, 0], diff --git a/lib/iris/tests/unit/analysis/interpolation/__init__.py b/lib/iris/tests/unit/analysis/interpolation/__init__.py index 01208c1aba..3825dacda3 100644 --- a/lib/iris/tests/unit/analysis/interpolation/__init__.py +++ b/lib/iris/tests/unit/analysis/interpolation/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis._interpolation` package.""" diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index 574a25ee7d..a91a08dcb8 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :class:`iris.analysis._interpolation.RectilinearInterpolator`. diff --git a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py b/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py index 26bc32c69f..54e54bc304 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.analysis._interpolation.get_xy_dim_coords`. diff --git a/lib/iris/tests/unit/analysis/maths/__init__.py b/lib/iris/tests/unit/analysis/maths/__init__.py index c77f8ede37..c259bdeff6 100644 --- a/lib/iris/tests/unit/analysis/maths/__init__.py +++ b/lib/iris/tests/unit/analysis/maths/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis.maths` module.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py b/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py index 17c0aeac15..11664af115 100644 --- a/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py +++ b/lib/iris/tests/unit/analysis/maths/test__arith__dask_array.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for cube arithmetic with dask arrays.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py index 85d1c363e6..57e012e1c9 100644 --- a/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py +++ b/lib/iris/tests/unit/analysis/maths/test__arith__derived_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for cube arithmetic involving derived (i.e. factory) coords.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py index c55d19f000..e1255ef9d8 100644 --- a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py +++ b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for cube arithmetic involving MeshCoords.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py b/lib/iris/tests/unit/analysis/maths/test__get_dtype.py index 81cfdee3fb..220b728b32 100644 --- a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py +++ b/lib/iris/tests/unit/analysis/maths/test__get_dtype.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.analysis.maths._get_dtype`. diff --git a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py index 2c97737973..bd81a96fbd 100644 --- a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py +++ b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.analysis.maths._inplace_common_checks`. diff --git a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py index 3f69118e0f..c422e366be 100644 --- a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py +++ b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.analysis.maths._output_dtype`. diff --git a/lib/iris/tests/unit/analysis/maths/test_add.py b/lib/iris/tests/unit/analysis/maths/test_add.py index 69078b9a96..1ca7f7c244 100644 --- a/lib/iris/tests/unit/analysis/maths/test_add.py +++ b/lib/iris/tests/unit/analysis/maths/test_add.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :func:`iris.analysis.maths.add` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test_divide.py b/lib/iris/tests/unit/analysis/maths/test_divide.py index 17e5105126..4bd202e037 100644 --- a/lib/iris/tests/unit/analysis/maths/test_divide.py +++ b/lib/iris/tests/unit/analysis/maths/test_divide.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :func:`iris.analysis.maths.divide` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test_multiply.py b/lib/iris/tests/unit/analysis/maths/test_multiply.py index 945a86a4d1..266342605a 100644 --- a/lib/iris/tests/unit/analysis/maths/test_multiply.py +++ b/lib/iris/tests/unit/analysis/maths/test_multiply.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :func:`iris.analysis.maths.multiply` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/maths/test_subtract.py b/lib/iris/tests/unit/analysis/maths/test_subtract.py index 6812176412..f7a9df34d0 100644 --- a/lib/iris/tests/unit/analysis/maths/test_subtract.py +++ b/lib/iris/tests/unit/analysis/maths/test_subtract.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :func:`iris.analysis.maths.subtract` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/regrid/__init__.py b/lib/iris/tests/unit/analysis/regrid/__init__.py index c4e5c119ea..a0a0fd0a6b 100644 --- a/lib/iris/tests/unit/analysis/regrid/__init__.py +++ b/lib/iris/tests/unit/analysis/regrid/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis._regrid` module.""" diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index 4855b92332..a018507fb3 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.analysis._regrid.RectilinearRegridder`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py index 16639c1649..9b0160aee4 100644 --- a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.analysis._regrid.CurvilinearRegridder`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py b/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py index cd80f89470..67218194c2 100644 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py +++ b/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis.scipy_interpolate` module.""" diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py index 9bf9621fb4..f0aa027baa 100644 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py +++ b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :func:`iris.analysis._scipy_interpolate._RegularGridInterpolator` class.""" diff --git a/lib/iris/tests/unit/analysis/stats/__init__.py b/lib/iris/tests/unit/analysis/stats/__init__.py index 8787858158..0b896d648d 100644 --- a/lib/iris/tests/unit/analysis/stats/__init__.py +++ b/lib/iris/tests/unit/analysis/stats/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis.stats` module.""" diff --git a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py index 648aeb8a64..63cf4e2abe 100644 --- a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py +++ b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.analysis.stats.pearsonr` function.""" # Import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_Aggregator.py b/lib/iris/tests/unit/analysis/test_Aggregator.py index a7029d7d33..45081ad07f 100644 --- a/lib/iris/tests/unit/analysis/test_Aggregator.py +++ b/lib/iris/tests/unit/analysis/test_Aggregator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.analysis.Aggregator` class instance.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_AreaWeighted.py b/lib/iris/tests/unit/analysis/test_AreaWeighted.py index 1e16e4bcb2..2454e0817c 100644 --- a/lib/iris/tests/unit/analysis/test_AreaWeighted.py +++ b/lib/iris/tests/unit/analysis/test_AreaWeighted.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.analysis.AreaWeighted`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_COUNT.py b/lib/iris/tests/unit/analysis/test_COUNT.py index fa51565474..96274f7cd0 100644 --- a/lib/iris/tests/unit/analysis/test_COUNT.py +++ b/lib/iris/tests/unit/analysis/test_COUNT.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.COUNT` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_Linear.py b/lib/iris/tests/unit/analysis/test_Linear.py index e98a6f585e..27565f8c51 100644 --- a/lib/iris/tests/unit/analysis/test_Linear.py +++ b/lib/iris/tests/unit/analysis/test_Linear.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.analysis.Linear`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MAX.py b/lib/iris/tests/unit/analysis/test_MAX.py index 8753c5e660..91d4daf1f0 100644 --- a/lib/iris/tests/unit/analysis/test_MAX.py +++ b/lib/iris/tests/unit/analysis/test_MAX.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.MAX` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MAX_RUN.py b/lib/iris/tests/unit/analysis/test_MAX_RUN.py index 13a940f6fa..00de383f7a 100755 --- a/lib/iris/tests/unit/analysis/test_MAX_RUN.py +++ b/lib/iris/tests/unit/analysis/test_MAX_RUN.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.MAX_RUN` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MEAN.py b/lib/iris/tests/unit/analysis/test_MEAN.py index 9b8ccc1aa7..18e2b4ca6c 100644 --- a/lib/iris/tests/unit/analysis/test_MEAN.py +++ b/lib/iris/tests/unit/analysis/test_MEAN.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.MEAN` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_MIN.py b/lib/iris/tests/unit/analysis/test_MIN.py index 06757517d3..f12790f0f1 100644 --- a/lib/iris/tests/unit/analysis/test_MIN.py +++ b/lib/iris/tests/unit/analysis/test_MIN.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.MIN` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_Nearest.py b/lib/iris/tests/unit/analysis/test_Nearest.py index 053fca1907..f3736d2cf3 100644 --- a/lib/iris/tests/unit/analysis/test_Nearest.py +++ b/lib/iris/tests/unit/analysis/test_Nearest.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.analysis.Nearest`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py index d841619ccc..bfd3234d26 100644 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_PERCENTILE.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.PERCENTILE` aggregator.""" # Import iris.tests first so that some things can be initialised before @@ -93,7 +94,7 @@ class ScipyAggregateMixin: Tests for calculations specific to the default (scipy) function. Includes tests on masked data and tests to verify that the function is called with the expected keywords. Needs to be used with AggregateMixin, as some of - these tests reuse its method. + these tests re-use its method. """ diff --git a/lib/iris/tests/unit/analysis/test_PROPORTION.py b/lib/iris/tests/unit/analysis/test_PROPORTION.py index dc890463ae..b7118241af 100644 --- a/lib/iris/tests/unit/analysis/test_PROPORTION.py +++ b/lib/iris/tests/unit/analysis/test_PROPORTION.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.PROPORTION` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py index 0137a50019..f11cd7a8d3 100644 --- a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. diff --git a/lib/iris/tests/unit/analysis/test_PointInCell.py b/lib/iris/tests/unit/analysis/test_PointInCell.py index 83453c26d1..2570465245 100644 --- a/lib/iris/tests/unit/analysis/test_PointInCell.py +++ b/lib/iris/tests/unit/analysis/test_PointInCell.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.analysis.PointInCell`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_RMS.py b/lib/iris/tests/unit/analysis/test_RMS.py index f5da089a9c..74f309ce00 100644 --- a/lib/iris/tests/unit/analysis/test_RMS.py +++ b/lib/iris/tests/unit/analysis/test_RMS.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.RMS` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_STD_DEV.py b/lib/iris/tests/unit/analysis/test_STD_DEV.py index 0abf4f9dc3..978bdb4ddf 100644 --- a/lib/iris/tests/unit/analysis/test_STD_DEV.py +++ b/lib/iris/tests/unit/analysis/test_STD_DEV.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.STD_DEV` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_SUM.py b/lib/iris/tests/unit/analysis/test_SUM.py index 90be890797..64699b442f 100644 --- a/lib/iris/tests/unit/analysis/test_SUM.py +++ b/lib/iris/tests/unit/analysis/test_SUM.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.SUM` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_VARIANCE.py b/lib/iris/tests/unit/analysis/test_VARIANCE.py index e4dde970a9..857bc7e1d2 100644 --- a/lib/iris/tests/unit/analysis/test_VARIANCE.py +++ b/lib/iris/tests/unit/analysis/test_VARIANCE.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.VARIANCE` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py b/lib/iris/tests/unit/analysis/test_WPERCENTILE.py index c8bcf5018e..a59bf4ce9c 100644 --- a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_WPERCENTILE.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis.PERCENTILE` aggregator.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py index a1306063b6..0cd808d1c7 100644 --- a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. diff --git a/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py b/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py index f4cb94f466..505a00df78 100644 --- a/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py +++ b/lib/iris/tests/unit/analysis/test__axis_to_single_trailing.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :data:`iris.analysis._axis_to_single_trailing` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/analysis/trajectory/__init__.py b/lib/iris/tests/unit/analysis/trajectory/__init__.py index 8033fd8c30..55d3ebd8bc 100644 --- a/lib/iris/tests/unit/analysis/trajectory/__init__.py +++ b/lib/iris/tests/unit/analysis/trajectory/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.analysis.trajectory` module.""" diff --git a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py b/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py index c8971a897e..32c41b78db 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :class:`iris.analysis.trajectory.Trajectory`. diff --git a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py b/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py index f70c3e7518..a652ceb72e 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :class:`iris.analysis.trajectory.UnstructuredNearestNeigbourRegridder`. diff --git a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py b/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py index d30feecadd..8b9e4cafa4 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py +++ b/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :meth:`iris.analysis.trajectory._nearest_neighbour_indices_ndcoords`. diff --git a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py index c156354f8f..f1b9711068 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :meth:`iris.analysis.trajectory.interpolate`. diff --git a/lib/iris/tests/unit/aux_factory/__init__.py b/lib/iris/tests/unit/aux_factory/__init__.py index 621625e9da..00b9f1a3bd 100644 --- a/lib/iris/tests/unit/aux_factory/__init__.py +++ b/lib/iris/tests/unit/aux_factory/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.aux_factory` module.""" diff --git a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py index 88da4ca463..6e417a3b38 100644 --- a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.aux_factory.AtmosphereSigmaFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index 619a0482b6..f8bd54093f 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for `iris.aux_factory.AuxCoordFactory`. diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index 31f791f10e..48fead3aa5 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.aux_factory.HybridPressureFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index 4bd85d1e3b..f588c9f001 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.aux_factory.OceanSFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 349b4cfcb6..7a2f4c631c 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.aux_factory.OceanSg1Factory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index 3304cf121d..4d1f268a1e 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.aux_factory.OceanSg2Factory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index a03afa661d..30d9647952 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.aux_factory.OceanSigmaFactory` class. diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index a191fac978..736a883846 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.aux_factory.OceanSigmaZFactory` class. diff --git a/lib/iris/tests/unit/common/__init__.py b/lib/iris/tests/unit/common/__init__.py index 60d8548652..5380785042 100644 --- a/lib/iris/tests/unit/common/__init__.py +++ b/lib/iris/tests/unit/common/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.common` module.""" diff --git a/lib/iris/tests/unit/common/lenient/__init__.py b/lib/iris/tests/unit/common/lenient/__init__.py index e927f5f3ac..2a99e7a4c2 100644 --- a/lib/iris/tests/unit/common/lenient/__init__.py +++ b/lib/iris/tests/unit/common/lenient/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.common.lenient` package.""" diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py index 6bcf366a25..62e2b24891 100644 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.lenient.Lenient`. diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index 9bff110942..44f38d9c5a 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.lenient._Lenient`. diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py index b604e49608..3a19563efc 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_client.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_client.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.common.lenient._lenient_client`. diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_service.py b/lib/iris/tests/unit/common/lenient/test__lenient_service.py index f6bafde5e7..9545b137ea 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_service.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_service.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.common.lenient._lenient_service`. diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py index 6e2eb23bc6..3deefbf30d 100644 --- a/lib/iris/tests/unit/common/lenient/test__qualname.py +++ b/lib/iris/tests/unit/common/lenient/test__qualname.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.common.lenient._qualname`. diff --git a/lib/iris/tests/unit/common/metadata/__init__.py b/lib/iris/tests/unit/common/metadata/__init__.py index 973234fb21..aba33c8312 100644 --- a/lib/iris/tests/unit/common/metadata/__init__.py +++ b/lib/iris/tests/unit/common/metadata/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.common.metadata` package.""" diff --git a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py index 196ab48d20..9efb43ec42 100644 --- a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.metadata.AncillaryVariableMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py index e7434922cf..f4760b3051 100644 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.metadata.BaseMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py index 25b287909d..a434651206 100644 --- a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.metadata.CellMeasureMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py index dac1f26f35..e3b7486012 100644 --- a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.metadata.CoordMetadata`. diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 4425ba62d7..848431565b 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.metadata.CubeMetadata`. @@ -15,11 +16,8 @@ import unittest.mock as mock from unittest.mock import sentinel -import pytest - from iris.common.lenient import _LENIENT, _qualname from iris.common.metadata import BaseMetadata, CubeMetadata -from iris.cube import CubeAttrsDict def _make_metadata( @@ -93,360 +91,9 @@ def test_bases(self): self.assertTrue(issubclass(self.cls, BaseMetadata)) -@pytest.fixture(params=CubeMetadata._fields) -def fieldname(request): - """Parametrize testing over all CubeMetadata field names.""" - return request.param - - -@pytest.fixture(params=["strict", "lenient"]) -def op_leniency(request): - """Parametrize testing over strict or lenient operation.""" - return request.param - - -@pytest.fixture(params=["primaryAA", "primaryAX", "primaryAB"]) -def primary_values(request): - """ - Parametrize over the possible non-trivial pairs of operation values. - - The parameters all provide two attribute values which are the left- and right-hand - arguments to the tested operation. The attribute values are single characters from - the end of the parameter name -- except that "X" denotes a "missing" attribute. - - The possible cases are: - - * one side has a value and the other is missing - * left and right have the same non-missing value - * left and right have different non-missing values - """ - return request.param - - -@pytest.fixture(params=[False, True], ids=["primaryLocal", "primaryGlobal"]) -def primary_is_global_not_local(request): - """Parametrize split-attribute testing over "global" or "local" attribute types.""" - return request.param - - -@pytest.fixture(params=[False, True], ids=["leftrightL2R", "leftrightR2L"]) -def order_reversed(request): - """Parametrize split-attribute testing over "left OP right" or "right OP left".""" - return request.param - - -# Define the expected results for split-attribute testing. -# This dictionary records the expected results for the various possible arrangements of -# values of a single attribute in the "left" and "right" inputs of a CubeMetadata -# operation. -# The possible operations are "equal", "combine" or "difference", and may all be -# performed "strict" or "lenient". -# N.B. the *same* results should also apply when left+right are swapped, with a suitable -# adjustment to the result value. Likewise, results should be the same for either -# global- or local-style attributes. -_ALL_RESULTS = { - "equal": { - "primaryAA": {"lenient": True, "strict": True}, - "primaryAX": {"lenient": True, "strict": False}, - "primaryAB": {"lenient": False, "strict": False}, - }, - "combine": { - "primaryAA": {"lenient": "A", "strict": "A"}, - "primaryAX": {"lenient": "A", "strict": None}, - "primaryAB": {"lenient": None, "strict": None}, - }, - "difference": { - "primaryAA": {"lenient": None, "strict": None}, - "primaryAX": {"lenient": None, "strict": ("A", None)}, - "primaryAB": {"lenient": ("A", "B"), "strict": ("A", "B")}, - }, -} -# A fixed attribute name used for all the split-attribute testing. -_TEST_ATTRNAME = "_test_attr_" - - -def extract_attribute_value(split_dict, extract_global): - """ - Extract a test-attribute value from a split-attribute dictionary. - - Parameters - ---------- - split_dict : CubeAttrsDict - a split dictionary from an operation result - extract_global : bool - whether to extract values of the global, or local, `_TEST_ATTRNAME` attribute - - Returns - ------- - str | None - """ - if extract_global: - result = split_dict.globals.get(_TEST_ATTRNAME, None) - else: - result = split_dict.locals.get(_TEST_ATTRNAME, None) - return result - - -def extract_result_value(input, extract_global): - """ - Extract the values(s) of the main test attribute from an operation result. - - Parameters - ---------- - input : bool | CubeMetadata - an operation result : the structure varies for the three different operations. - extract_global : bool - whether to return values of a global, or local, `_TEST_ATTRNAME` attribute. - - Returns - ------- - None | bool | str | tuple[None | str] - result value(s) - """ - if not isinstance(input, CubeMetadata): - # Result is either boolean (for "equals") or a None (for "difference"). - result = input - else: - # Result is a CubeMetadata. Get the value(s) of the required attribute. - result = input.attributes - - if isinstance(result, CubeAttrsDict): - result = extract_attribute_value(result, extract_global) - else: - # For "difference", input.attributes is a *pair* of dictionaries. - assert isinstance(result, tuple) - result = tuple( - [ - extract_attribute_value(dic, extract_global) - for dic in result - ] - ) - if result == (None, None): - # This value occurs when the desired attribute is *missing* from a - # difference result, but other (secondary) attributes were *different*. - # We want only differences of the *target* attribute, so convert these - # to a plain 'no difference', for expected-result testing purposes. - result = None - - return result - - -def make_attrsdict(value): - """ - Return a dictionary containing a test attribute with the given value. - - If the value is "X", the attribute is absent (result is empty dict). - """ - if value == "X": - # Translate an "X" input as "missing". - result = {} - else: - result = {_TEST_ATTRNAME: value} - return result - - -def check_splitattrs_testcase( - operation_name: str, - check_is_lenient: bool, - primary_inputs: str = "AA", # two character values - secondary_inputs: str = "XX", # two character values - check_global_not_local: bool = True, - check_reversed: bool = False, -): - """ - Test a metadata operation with split-attributes against known expected results. - - Parameters - ---------- - operation_name : str - One of "equal", "combine" or "difference. - check_is_lenient : bool - Whether the tested operation is performed 'lenient' or 'strict'. - primary_inputs : str - A pair of characters defining left + right attribute values for the operands of - the operation. - secondary_inputs : str - A further pair of values for an attribute of the same name but "other" type - ( i.e. global/local when the main test is local/global ). - check_global_not_local : bool - If `True` then the primary operands, and the tested result values, are *global* - attributes, and the secondary ones are local. - Otherwise, the other way around. - check_reversed : bool - If True, the left and right operands are exchanged, and the expected value - modified according. - - Notes - ----- - The expected result of an operation is mostly defined by : the operation applied; - the main "primary" inputs; and the lenient/strict mode. - - In the case of the "equals" operation, however, the expected result is simply - set to `False` if the secondary inputs do not match. - - Calling with different values for the keywords aims to show that the main operation - has the expected value, from _ALL_RESULTS, the ***same in essentially all cases*** - ( though modified in specific ways for some factors ). - - This regularity also demonstrates the required independence over the other - test-factors, i.e. global/local attribute type, and right-left order. - """ - # Just for comfort, check that inputs are all one of a few single characters. - assert all( - (item in list("ABCDX")) for item in (primary_inputs + secondary_inputs) - ) - # Interpret "primary" and "secondary" inputs as "global" and "local" attributes. - if check_global_not_local: - global_values, local_values = primary_inputs, secondary_inputs - else: - local_values, global_values = primary_inputs, secondary_inputs - - # Form 2 inputs to the operation : Make left+right split-attribute input - # dictionaries, with both the primary and secondary attribute value settings. - input_dicts = [ - CubeAttrsDict( - globals=make_attrsdict(global_value), - locals=make_attrsdict(local_value), - ) - for global_value, local_value in zip(global_values, local_values) - ] - # Make left+right CubeMetadata with those attributes, other fields all blank. - input_l, input_r = [ - CubeMetadata( - **{ - field: attrs if field == "attributes" else None - for field in CubeMetadata._fields - } - ) - for attrs in input_dicts - ] - - if check_reversed: - # Swap the inputs to perform a 'reversed' calculation. - input_l, input_r = input_r, input_l - - # Run the actual operation - result = getattr(input_l, operation_name)( - input_r, lenient=check_is_lenient - ) - - if operation_name == "difference" and check_reversed: - # Adjust the result of a "reversed" operation to the 'normal' way round. - # ( N.B. only "difference" results are affected by reversal. ) - if isinstance(result, CubeMetadata): - result = result._replace(attributes=result.attributes[::-1]) - - # Extract, from the operation result, the value to be tested against "expected". - result = extract_result_value(result, check_global_not_local) - - # Get the *expected* result for this operation. - which = "lenient" if check_is_lenient else "strict" - primary_key = "primary" + primary_inputs - expected = _ALL_RESULTS[operation_name][primary_key][which] - if operation_name == "equal" and expected: - # Account for the equality cases made `False` by mismatched secondary values. - left, right = secondary_inputs - secondaries_same = left == right or ( - check_is_lenient and "X" in (left, right) - ) - if not secondaries_same: - expected = False - - # Check that actual extracted operation result matches the "expected" one. - assert result == expected - - -class MixinSplitattrsMatrixTests: - """ - Define split-attributes tests to perform on all the metadata operations. - - This is inherited by the testclass for each operation : - i.e. Test___eq__, Test_combine and Test_difference - """ - - # Define the operation name : set in each inheritor - operation_name = None - - def test_splitattrs_cases( - self, - op_leniency, - primary_values, - primary_is_global_not_local, - order_reversed, - ): - """ - Check the basic operation against the expected result from _ALL_RESULTS. - - Parametrisation checks this for all combinations of various factors : - - * possible arrangements of the primary values - * strict and lenient - * global- and local-type attributes - * left-to-right or right-to-left operation order. - """ - primary_inputs = primary_values[-2:] - check_is_lenient = {"strict": False, "lenient": True}[op_leniency] - check_splitattrs_testcase( - operation_name=self.operation_name, - check_is_lenient=check_is_lenient, - primary_inputs=primary_inputs, - secondary_inputs="XX", - check_global_not_local=primary_is_global_not_local, - check_reversed=order_reversed, - ) - - @pytest.mark.parametrize( - "secondary_values", - [ - "secondaryXX", - "secondaryCX", - "secondaryXC", - "secondaryCC", - "secondaryCD", - ] - # NOTE: test CX as well as XC, since primary choices has "AX" but not "XA". - ) - def test_splitattrs_global_local_independence( - self, - op_leniency, - primary_values, - secondary_values, - ): - """ - Check that results are (mostly) independent of the "other" type attributes. - - The operation on attributes of the 'primary' type (global/local) should be - basically unaffected by those of the 'secondary' type (--> local/global). - - This is not really true for equality, so we adjust those results to compensate. - See :func:`check_splitattrs_testcase` for explanations. - - Notes - ----- - We provide this *separate* test for global/local attribute independence, - parametrized over selected relevant arrangements of the 'secondary' values. - We *don't* test with reversed order or "local" primary inputs, because matrix - testing over *all* relevant factors produces too many possible combinations. - """ - primary_inputs = primary_values[-2:] - secondary_inputs = secondary_values[-2:] - check_is_lenient = {"strict": False, "lenient": True}[op_leniency] - check_splitattrs_testcase( - operation_name=self.operation_name, - check_is_lenient=check_is_lenient, - primary_inputs=primary_inputs, - secondary_inputs=secondary_inputs, - check_global_not_local=True, - check_reversed=False, - ) - - -class Test___eq__(MixinSplitattrsMatrixTests): - operation_name = "equal" - - @pytest.fixture(autouse=True) - def setup(self): - self.lvalues = dict( +class Test___eq__(tests.IrisTest): + def setUp(self): + self.values = dict( standard_name=sentinel.standard_name, long_name=sentinel.long_name, var_name=sentinel.var_name, @@ -455,19 +102,17 @@ def setup(self): attributes=dict(), cell_methods=sentinel.cell_methods, ) - # Setup another values tuple with all-distinct content objects. - self.rvalues = deepcopy(self.lvalues) self.dummy = sentinel.dummy self.cls = CubeMetadata def test_wraps_docstring(self): - assert self.cls.__eq__.__doc__ == BaseMetadata.__eq__.__doc__ + self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) def test_lenient_service(self): qualname___eq__ = _qualname(self.cls.__eq__) - assert qualname___eq__ in _LENIENT - assert _LENIENT[qualname___eq__] - assert _LENIENT[self.cls.__eq__] + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) def test_call(self): other = sentinel.other @@ -478,114 +123,107 @@ def test_call(self): ) as mocker: result = metadata.__eq__(other) - assert return_value == result - assert mocker.call_args_list == [mock.call(other)] - - def test_op_same(self, op_leniency): - # Check op all-same content, but all-new data. - # NOTE: test for both strict/lenient, should both work the same. - is_lenient = op_leniency == "lenient" - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) - assert rmetadata.__eq__(lmetadata) - - def test_op_different__none(self, fieldname, op_leniency): - # One side has field=value, and the other field=None, both strict + lenient. - if fieldname == "attributes": - # Must be a dict, cannot be None. - pytest.skip() - else: - is_lenient = op_leniency == "lenient" - lmetadata = self.cls(**self.lvalues) - self.rvalues.update({fieldname: None}) - rmetadata = self.cls(**self.rvalues) - if fieldname in ("cell_methods", "standard_name", "units"): - # These ones are compared strictly - expect_success = False - elif fieldname in ("var_name", "long_name"): - # For other 'normal' fields : lenient succeeds, strict does not. - expect_success = is_lenient - else: - # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" - ) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) == expect_success - assert rmetadata.__eq__(lmetadata) == expect_success - - def test_op_different__value(self, fieldname, op_leniency): - # Compare when a given field value is changed, both strict + lenient. - if fieldname == "attributes": - # Dicts have more possibilities: handled separately. - pytest.skip() - else: - is_lenient = op_leniency == "lenient" - lmetadata = self.cls(**self.lvalues) - self.rvalues.update({fieldname: self.dummy}) - rmetadata = self.cls(**self.rvalues) - if fieldname in ( - "cell_methods", - "standard_name", - "units", - "long_name", - ): - # These ones are compared strictly - expect_success = False - elif fieldname == "var_name": - # For other 'normal' fields : lenient succeeds, strict does not. - expect_success = is_lenient - else: - # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" - ) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) == expect_success - assert rmetadata.__eq__(lmetadata) == expect_success - - def test_op_different__attribute_extra(self, op_leniency): - # Check when one set of attributes has an extra entry. - is_lenient = op_leniency == "lenient" - lmetadata = self.cls(**self.lvalues) - self.rvalues["attributes"]["_extra_"] = 1 - rmetadata = self.cls(**self.rvalues) - # This counts as equal *only* in the lenient case. - expect_success = is_lenient - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) == expect_success - assert rmetadata.__eq__(lmetadata) == expect_success - - def test_op_different__attribute_value(self, op_leniency): - # lhs and rhs have different values for an attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" - self.lvalues["attributes"]["_extra_"] = mock.sentinel.value1 - self.rvalues["attributes"]["_extra_"] = mock.sentinel.value2 - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # This should ALWAYS fail. - assert not lmetadata.__eq__(rmetadata) - assert not rmetadata.__eq__(lmetadata) + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_measure_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) class Test___lt__(tests.IrisTest): @@ -619,12 +257,9 @@ def test__ignore_attributes_cell_methods(self): self.assertFalse(result) -class Test_combine(MixinSplitattrsMatrixTests): - operation_name = "combine" - - @pytest.fixture(autouse=True) - def setup(self): - self.lvalues = dict( +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( standard_name=sentinel.standard_name, long_name=sentinel.long_name, var_name=sentinel.var_name, @@ -632,20 +267,20 @@ def setup(self): attributes=sentinel.attributes, cell_methods=sentinel.cell_methods, ) - # Get a second copy with all-new objects. - self.rvalues = deepcopy(self.lvalues) self.dummy = sentinel.dummy self.cls = CubeMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - assert self.cls.combine.__doc__ == BaseMetadata.combine.__doc__ + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__ + ) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) - assert qualname_combine in _LENIENT - assert _LENIENT[qualname_combine] - assert _LENIENT[self.cls.combine] + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) def test_lenient_default(self): other = sentinel.other @@ -655,8 +290,11 @@ def test_lenient_default(self): ) as mocker: result = self.none.combine(other) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=None)] + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) def test_lenient(self): other = sentinel.other @@ -667,165 +305,149 @@ def test_lenient(self): ) as mocker: result = self.none.combine(other, lenient=lenient) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=lenient)] - - def test_op_same(self, op_leniency): - # Result is same as either input, both strict + lenient. - is_lenient = op_leniency == "lenient" - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - expected = self.lvalues - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected - - def test_op_different__none(self, fieldname, op_leniency): - # One side has field=value, and the other field=None, both strict + lenient. - if fieldname == "attributes": - # Can't be None : Tested separately - pytest.skip() - - is_lenient = op_leniency == "lenient" - - lmetadata = self.cls(**self.lvalues) - # Cancel one setting in the rhs argument. - self.rvalues[fieldname] = None - rmetadata = self.cls(**self.rvalues) - - if fieldname in ("cell_methods", "units"): - # NB cell-methods and units *always* strict behaviour. - # strict form : take only those which both have set - strict_result = True - elif fieldname in ("standard_name", "long_name", "var_name"): - strict_result = not is_lenient - else: - # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" - ) + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) - if strict_result: - # include only those which both have - expected = self.rvalues - else: - # also include those which only 1 has - expected = self.lvalues - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected - - def test_op_different__value(self, fieldname, op_leniency): - # One field has different value for lhs/rhs, both strict + lenient. - if fieldname == "attributes": - # Attribute behaviours are tested separately - pytest.skip() - - is_lenient = op_leniency == "lenient" - - self.lvalues[fieldname] = mock.sentinel.value1 - self.rvalues[fieldname] = mock.sentinel.value2 - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - - # In all cases, this field should be None in the result : leniency has no effect - expected = self.lvalues.copy() - expected[fieldname] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected - - def test_op_different__attribute_extra(self, op_leniency): - # One field has an extra attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" - - self.lvalues["attributes"] = {"_a_common_": mock.sentinel.dummy} - self.rvalues["attributes"] = self.lvalues["attributes"].copy() - self.rvalues["attributes"]["_extra_"] = mock.sentinel.testvalue - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - - if is_lenient: - # the extra attribute should appear in the result .. - expected = self.rvalues - else: - # .. it should not - expected = self.lvalues - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected - - def test_op_different__attribute_value(self, op_leniency): - # lhs and rhs have different values for an attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" - - self.lvalues["attributes"] = { - "_a_common_": self.dummy, - "_b_common_": mock.sentinel.value1, - } - self.lvalues["attributes"] = { - "_a_common_": self.dummy, - "_b_common_": mock.sentinel.value2, - } - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - - # Result has entirely EMPTY attributes (whether strict or lenient). - # TODO: is this maybe a mistake of the existing implementation ? - expected = self.lvalues.copy() - expected["attributes"] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected - - -class Test_difference(MixinSplitattrsMatrixTests): - operation_name = "difference" - - @pytest.fixture(autouse=True) - def setup(self): - self.lvalues = dict( + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["cell_methods"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_cell_methods(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["cell_methods"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["cell_methods"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( standard_name=sentinel.standard_name, long_name=sentinel.long_name, var_name=sentinel.var_name, units=sentinel.units, - attributes=dict(), # MUST be a dict + attributes=sentinel.attributes, cell_methods=sentinel.cell_methods, ) - # Make a copy with all-different objects in it. - self.rvalues = deepcopy(self.lvalues) self.dummy = sentinel.dummy self.cls = CubeMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - assert self.cls.difference.__doc__ == BaseMetadata.difference.__doc__ + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__ + ) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) - assert qualname_difference in _LENIENT - assert _LENIENT[qualname_difference] - assert _LENIENT[self.cls.difference] + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) def test_lenient_default(self): other = sentinel.other @@ -835,8 +457,11 @@ def test_lenient_default(self): ) as mocker: result = self.none.difference(other) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=None)] + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) def test_lenient(self): other = sentinel.other @@ -847,149 +472,178 @@ def test_lenient(self): ) as mocker: result = self.none.difference(other, lenient=lenient) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=lenient)] - - def test_op_same(self, op_leniency): - is_lenient = op_leniency == "lenient" - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - assert lmetadata.difference(rmetadata) is None - assert rmetadata.difference(lmetadata) is None - - def test_op_different__none(self, fieldname, op_leniency): - # One side has field=value, and the other field=None, both strict + lenient. - if fieldname in ("attributes",): - # These cannot properly be set to 'None'. Tested elsewhere. - pytest.skip() - - is_lenient = op_leniency == "lenient" - - lmetadata = self.cls(**self.lvalues) - self.rvalues[fieldname] = None - rmetadata = self.cls(**self.rvalues) - - if fieldname in ("units", "cell_methods"): - # These ones are always "strict" - strict_result = True - elif fieldname in ("standard_name", "long_name", "var_name"): - strict_result = not is_lenient - else: - # Ensure we are handling all the different field cases - raise ValueError( - f"{self.__name__} unhandled fieldname : {fieldname}" + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_cell_methods_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = (sentinel.cell_methods, None) + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = (None, sentinel.cell_methods) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_cell_methods(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = ( + left["cell_methods"], + right["cell_methods"], + ) + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = lexpected["cell_methods"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() ) - if strict_result: - diffentry = tuple( - [getattr(mm, fieldname) for mm in (lmetadata, rmetadata)] + def test_op_strict_different_cell_methods(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["cell_methods"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = ( + left["cell_methods"], + right["cell_methods"], + ) + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = lexpected["cell_methods"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() ) - # NOTE: in these cases, the difference metadata will fail an == operation, - # because of the 'None' entries. - # But we can use metadata._asdict() and test that. - lexpected = self.none._asdict() - lexpected[fieldname] = diffentry - rexpected = lexpected.copy() - rexpected[fieldname] = diffentry[::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - if strict_result: - assert lmetadata.difference(rmetadata)._asdict() == lexpected - assert rmetadata.difference(lmetadata)._asdict() == rexpected - else: - # Expect NO differences - assert lmetadata.difference(rmetadata) is None - assert rmetadata.difference(lmetadata) is None - - def test_op_different__value(self, fieldname, op_leniency): - # One field has different value for lhs/rhs, both strict + lenient. - if fieldname == "attributes": - # Attribute behaviours are tested separately - pytest.skip() - - self.lvalues[fieldname] = mock.sentinel.value1 - self.rvalues[fieldname] = mock.sentinel.value2 - lmetadata = self.cls(**self.lvalues) - rmetadata = self.cls(**self.rvalues) - - # In all cases, this field should show a difference : leniency has no effect - ldiff_values = (mock.sentinel.value1, mock.sentinel.value2) - ldiff_metadata = self.none._asdict() - ldiff_metadata[fieldname] = ldiff_values - rdiff_metadata = self.none._asdict() - rdiff_metadata[fieldname] = ldiff_values[::-1] - - # Check both l+r and r+l - assert lmetadata.difference(rmetadata)._asdict() == ldiff_metadata - assert rmetadata.difference(lmetadata)._asdict() == rdiff_metadata - - def test_op_different__attribute_extra(self, op_leniency): - # One field has an extra attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" - self.lvalues["attributes"] = {"_a_common_": self.dummy} - lmetadata = self.cls(**self.lvalues) - rvalues = deepcopy(self.lvalues) - rvalues["attributes"]["_b_extra_"] = mock.sentinel.extra - rmetadata = self.cls(**rvalues) - - if not is_lenient: - # In this case, attributes returns a "difference dictionary" - diffentry = tuple([{}, {"_b_extra_": mock.sentinel.extra}]) - lexpected = self.none._asdict() - lexpected["attributes"] = diffentry - rexpected = lexpected.copy() - rexpected["attributes"] = diffentry[::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - if is_lenient: - # It recognises no difference - assert lmetadata.difference(rmetadata) is None - assert rmetadata.difference(lmetadata) is None - else: - # As calculated above - assert lmetadata.difference(rmetadata)._asdict() == lexpected - assert rmetadata.difference(lmetadata)._asdict() == rexpected - - def test_op_different__attribute_value(self, op_leniency): - # lhs and rhs have different values for an attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" - self.lvalues["attributes"] = { - "_a_common_": self.dummy, - "_b_extra_": mock.sentinel.value1, - } - lmetadata = self.cls(**self.lvalues) - self.rvalues["attributes"] = { - "_a_common_": self.dummy, - "_b_extra_": mock.sentinel.value2, - } - rmetadata = self.cls(**self.rvalues) - - # In this case, attributes returns a "difference dictionary" - diffentry = tuple( - [ - {"_b_extra_": mock.sentinel.value1}, - {"_b_extra_": mock.sentinel.value2}, - ] + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_measure_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["cell_methods"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["cell_methods"] = ( + left["cell_methods"], + right["cell_methods"], ) - lexpected = self.none._asdict() - lexpected["attributes"] = diffentry - rexpected = lexpected.copy() - rexpected["attributes"] = diffentry[::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=is_lenient - ): - # As calculated above -- same for both strict + lenient - assert lmetadata.difference(rmetadata)._asdict() == lexpected - assert rmetadata.difference(lmetadata)._asdict() == rexpected + rexpected = deepcopy(self.none)._asdict() + rexpected["cell_methods"] = lexpected["cell_methods"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) class Test_equal(tests.IrisTest): diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index 1608b1c42e..155c4f99b8 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`. diff --git a/lib/iris/tests/unit/common/metadata/test_hexdigest.py b/lib/iris/tests/unit/common/metadata/test_hexdigest.py index 9a16d9252b..949002af89 100644 --- a/lib/iris/tests/unit/common/metadata/test_hexdigest.py +++ b/lib/iris/tests/unit/common/metadata/test_hexdigest.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.common.metadata.hexdigest`. diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py index 340b6a5355..9c5987f235 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.common.metadata_filter`. diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index 1bf342004d..cbb29b7161 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`. diff --git a/lib/iris/tests/unit/common/mixin/__init__.py b/lib/iris/tests/unit/common/mixin/__init__.py index 3c1f5bbc9d..493e140626 100644 --- a/lib/iris/tests/unit/common/mixin/__init__.py +++ b/lib/iris/tests/unit/common/mixin/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.common.mixin` package.""" diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 55d2ca5d79..88a88be567 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.mixin.CFVariableMixin`. diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py index d29a120f35..32c78b6697 100644 --- a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py +++ b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`. @@ -20,7 +21,7 @@ class Test(tests.IrisTest): def setUp(self): - self.forbidden_keys = LimitedAttributeDict.CF_ATTRS_FORBIDDEN + self.forbidden_keys = LimitedAttributeDict._forbidden_keys self.emsg = "{!r} is not a permitted attribute" def test__invalid_keys(self): diff --git a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py index 634eae4cf3..8fc21f2965 100644 --- a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py +++ b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`. diff --git a/lib/iris/tests/unit/common/resolve/__init__.py b/lib/iris/tests/unit/common/resolve/__init__.py index 8bfbe20970..d0b189e59d 100644 --- a/lib/iris/tests/unit/common/resolve/__init__.py +++ b/lib/iris/tests/unit/common/resolve/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.common.resolve` package.""" diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 182cbbd61c..db1759c5fc 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.common.resolve.Resolve`. diff --git a/lib/iris/tests/unit/concatenate/__init__.py b/lib/iris/tests/unit/concatenate/__init__.py index 6deaf26aa0..229476f3a6 100644 --- a/lib/iris/tests/unit/concatenate/__init__.py +++ b/lib/iris/tests/unit/concatenate/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit-test infrastructure for the :mod:`iris._concatenate` package.""" from __future__ import annotations diff --git a/lib/iris/tests/unit/concatenate/test__CoordMetaData.py b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py index 35c3cfd17b..6f29e1f65f 100644 --- a/lib/iris/tests/unit/concatenate/test__CoordMetaData.py +++ b/lib/iris/tests/unit/concatenate/test__CoordMetaData.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit-tests for :class:`iris._concatenate._CoordMetaData`.""" from __future__ import annotations diff --git a/lib/iris/tests/unit/concatenate/test__CoordSignature.py b/lib/iris/tests/unit/concatenate/test__CoordSignature.py index c5e4850170..eb62c5ec64 100644 --- a/lib/iris/tests/unit/concatenate/test__CoordSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CoordSignature.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit-tests for :class:`iris._concatenate._CoordSignature`.""" from __future__ import annotations diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py index 64a25a2fad..cc20cdfa1f 100644 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CubeSignature.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test class :class:`iris._concatenate._CubeSignature`.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index 96932e11d4..c2ca01f781 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._concatenate.concatenate.py`.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/config/__init__.py b/lib/iris/tests/unit/config/__init__.py index 07805d4bd7..38806c7db8 100644 --- a/lib/iris/tests/unit/config/__init__.py +++ b/lib/iris/tests/unit/config/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.config` module.""" diff --git a/lib/iris/tests/unit/config/test_NetCDF.py b/lib/iris/tests/unit/config/test_NetCDF.py index 5b691a1dc3..c7f7564e4e 100644 --- a/lib/iris/tests/unit/config/test_NetCDF.py +++ b/lib/iris/tests/unit/config/test_NetCDF.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.config.NetCDF` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/conftest.py b/lib/iris/tests/unit/conftest.py deleted file mode 100644 index a4ddb89294..0000000000 --- a/lib/iris/tests/unit/conftest.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests fixture infra-structure.""" -import pytest - -import iris - - -@pytest.fixture -def sample_coord(): - sample_coord = iris.coords.DimCoord(points=(1, 2, 3, 4, 5)) - return sample_coord diff --git a/lib/iris/tests/unit/constraints/__init__.py b/lib/iris/tests/unit/constraints/__init__.py index 987e88c6e7..03a987b1a1 100644 --- a/lib/iris/tests/unit/constraints/__init__.py +++ b/lib/iris/tests/unit/constraints/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris._constraints` module.""" diff --git a/lib/iris/tests/unit/constraints/test_Constraint_equality.py b/lib/iris/tests/unit/constraints/test_Constraint_equality.py index 6e0b37c3f4..01e61b70a7 100644 --- a/lib/iris/tests/unit/constraints/test_Constraint_equality.py +++ b/lib/iris/tests/unit/constraints/test_Constraint_equality.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for equality testing of different constraint types.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/constraints/test_NameConstraint.py b/lib/iris/tests/unit/constraints/test_NameConstraint.py index b959b82434..46aea25331 100644 --- a/lib/iris/tests/unit/constraints/test_NameConstraint.py +++ b/lib/iris/tests/unit/constraints/test_NameConstraint.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris._constraints.NameConstraint` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_categorisation/__init__.py b/lib/iris/tests/unit/coord_categorisation/__init__.py index 9c60613915..18fe8f2482 100644 --- a/lib/iris/tests/unit/coord_categorisation/__init__.py +++ b/lib/iris/tests/unit/coord_categorisation/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.coord_categorisation` module.""" diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py index 2291e677bc..0c20f16f5a 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.coord_categorisation.add_categorised_coord`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py index caf52e9c84..418ac72557 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test coordinate categorisation function add_hour. """ diff --git a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py deleted file mode 100644 index 6560f65a32..0000000000 --- a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py +++ /dev/null @@ -1,251 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -""" -Test the coordinate categorisation functions. -""" - -import warnings - -import cf_units -import numpy as np -import pytest - -import iris -import iris.coord_categorisation as ccat -import iris.coords -import iris.cube -import iris.exceptions -from iris.tests import IrisTest - - -@pytest.fixture( - scope="module", - params=( - ccat.add_day_of_month, - ccat.add_day_of_year, - ccat.add_weekday, - ccat.add_weekday_fullname, - ccat.add_weekday_number, - ccat.add_month, - ccat.add_month_fullname, - ccat.add_month_number, - ccat.add_year, - ccat.add_season, - ccat.add_season_number, - ccat.add_season_year, - ccat.add_season_membership, - ), -) -def categorisation_func(request): - return request.param - - -@pytest.fixture( - scope="module", - params=( - ccat.add_season, - ccat.add_season_number, - ccat.add_season_year, - ), -) -def season_cat_func(request): - return request.param - - -@pytest.fixture(scope="module") -def day_numbers(): - # make a series of 'day numbers' for the time, that slide across month - # boundaries - return np.arange(0, 600, 27, dtype=np.int32) - - -@pytest.fixture -def time_coord(day_numbers): - return iris.coords.DimCoord( - day_numbers, - standard_name="time", - units=cf_units.Unit("days since epoch", "standard"), - ) - - -@pytest.fixture -def cube(day_numbers, time_coord): - _cube = iris.cube.Cube(day_numbers, long_name="test cube", units="metres") - # use day numbers as data values also (don't actually use this for - # anything) - _cube.data = day_numbers - _cube.add_dim_coord(time_coord, 0) - return _cube - - -def test_bad_coord(cube, categorisation_func): - kwargs = {"name": "my_category"} - if categorisation_func is ccat.add_season_membership: - kwargs["season"] = "djf" - with pytest.raises(iris.exceptions.CoordinateNotFoundError): - categorisation_func(cube, "DOES NOT EXIST", **kwargs) - - -def test_explicit_result_names(cube, categorisation_func): - result_name = "my_category" - fmt = "Missing/incorrectly named result for {0!r}" - # Specify source coordinate by name - new_cube = cube.copy() - kwargs = {"name": result_name} - if categorisation_func is ccat.add_season_membership: - kwargs["season"] = "djf" - with warnings.catch_warnings(record=True): - categorisation_func(new_cube, "time", **kwargs) - result_coords = new_cube.coords(result_name) - assert len(result_coords) == 1, fmt.format(categorisation_func.__name__) - # Specify source coordinate by coordinate reference - new_cube = cube.copy() - time = new_cube.coord("time") - with warnings.catch_warnings(record=True): - categorisation_func(new_cube, time, **kwargs) - result_coords = new_cube.coords(result_name) - assert len(result_coords) == 1, fmt.format(categorisation_func.__name__) - - -def test_basic(cube, time_coord): - ccat.add_year(cube, time_coord, "my_year") - ccat.add_day_of_month(cube, time_coord, "my_day_of_month") - ccat.add_day_of_year(cube, time_coord, "my_day_of_year") - - ccat.add_month(cube, time_coord, "my_month") - ccat.add_month_fullname(cube, time_coord, "my_month_fullname") - ccat.add_month_number(cube, time_coord, "my_month_number") - - ccat.add_weekday(cube, time_coord, "my_weekday") - ccat.add_weekday_number(cube, time_coord, "my_weekday_number") - ccat.add_weekday_fullname(cube, time_coord, "my_weekday_fullname") - - ccat.add_season(cube, time_coord, "my_season") - ccat.add_season_number(cube, time_coord, "my_season_number") - ccat.add_season_year(cube, time_coord, "my_season_year") - - # also test 'generic' categorisation interface - def _month_in_quarter(coord, pt_value): - date = coord.units.num2date(pt_value) - return (date.month - 1) % 3 - - ccat.add_categorised_coord( - cube, "my_month_in_quarter", time_coord, _month_in_quarter - ) - - # To ensure consistent results between 32-bit and 64-bit - # platforms, ensure all the numeric categorisation coordinates - # are always stored as int64. - for coord in cube.coords(): - if coord.long_name is not None and coord.points.dtype.kind == "i": - coord.points = coord.points.astype(np.int64) - - # check values - IrisTest.assertCML(IrisTest(), cube, ("categorisation", "quickcheck.cml")) - - -def test_add_season_nonstandard(cube, time_coord): - # season categorisations work for non-standard seasons? - seasons = ["djfm", "amjj", "ason"] - ccat.add_season(cube, time_coord, name="seasons", seasons=seasons) - ccat.add_season_number( - cube, time_coord, name="season_numbers", seasons=seasons - ) - ccat.add_season_year( - cube, time_coord, name="season_years", seasons=seasons - ) - IrisTest.assertCML(IrisTest(), cube, ("categorisation", "customcheck.cml")) - - -@pytest.mark.parametrize("backwards", [None, False, True]) -@pytest.mark.parametrize( - "nonstandard", - [False, True], - ids=["standard_seasons", "nonstandard_seasons"], -) -def test_add_season_year(cube, time_coord, backwards, nonstandard): - """Specific test to account for the extra use_year_at_season_start argument.""" - - kwargs = dict( - cube=cube, - coord=time_coord, - name="season_years", - use_year_at_season_start=backwards, - ) - if nonstandard: - kwargs["seasons"] = ["ndjfm", "amjj", "aso"] - - # Based on the actual years of each date. - expected_years = np.array(([1970] * 14) + ([1971] * 9)) - # Subset to just the 'season' of interest. - season_slice = np.s_[12:17] - expected_years = expected_years[season_slice] - - # Single indices to examine to test the handling of specific months. - nov = 0 - dec = 1 - jan = 2 - feb = 3 - mar = 4 - - # Set the expected deviations from the actual date years. - if backwards is True: - expected_years[jan] = 1970 - expected_years[feb] = 1970 - if nonstandard: - expected_years[mar] = 1970 - else: - # Either False or None - False being the default behaviour. - expected_years[dec] = 1971 - if nonstandard: - expected_years[nov] = 1971 - - ccat.add_season_year(**kwargs) - actual_years = cube.coord(kwargs["name"]).points - # Subset to just the 'season' of interest. - actual_years = actual_years[season_slice] - - np.testing.assert_array_almost_equal(actual_years, expected_years) - - -def test_add_season_membership(cube): - # season membership identifies correct seasons? - season = "djf" - ccat.add_season_membership(cube, "time", season, name="in_season") - ccat.add_season(cube, "time") - coord_season = cube.coord("season") - coord_membership = cube.coord("in_season") - season_locations = np.where(coord_season.points == season)[0] - membership_locations = np.where(coord_membership.points)[0] - np.testing.assert_array_almost_equal( - membership_locations, season_locations - ) - - -def test_add_season_invalid_spec(cube, season_cat_func): - # custom seasons with an invalid season raises an error? - seasons = ("djf", "maj", "jja", "son") # MAJ not a season! - with pytest.raises(ValueError): - season_cat_func(cube, "time", name="my_category", seasons=seasons) - - -def test_add_season_repeated_months(cube, season_cat_func): - # custom seasons with repeated months raises an error? - seasons = ("djfm", "mam", "jja", "son") - with pytest.raises(ValueError): - season_cat_func(cube, "time", name="my_category", seasons=seasons) - - -def test_add_season_missing_months(cube, season_cat_func): - # custom seasons with missing months raises an error? - seasons = ("djfm", "amjj") - with pytest.raises(ValueError): - season_cat_func(cube, "time", name="my_category", seasons=seasons) - - -def test_add_season_membership_invalid_spec(cube): - season = "maj" # not a season! - with pytest.raises(ValueError): - ccat.add_season_membership(cube, "time", season, name="maj_season") diff --git a/lib/iris/tests/unit/coord_systems/__init__.py b/lib/iris/tests/unit/coord_systems/__init__.py index 21f703eed0..39d4d25f73 100644 --- a/lib/iris/tests/unit/coord_systems/__init__.py +++ b/lib/iris/tests/unit/coord_systems/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.coord_systems` module.""" diff --git a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py index 26aa79ac47..99a7c9f59b 100644 --- a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.coord_systems.AlbersEqualArea` class. diff --git a/lib/iris/tests/unit/coord_systems/test_GeogCS.py b/lib/iris/tests/unit/coord_systems/test_GeogCS.py index acb9029b1e..f3f9531dbb 100644 --- a/lib/iris/tests/unit/coord_systems/test_GeogCS.py +++ b/lib/iris/tests/unit/coord_systems/test_GeogCS.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.GeogCS` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_Geostationary.py b/lib/iris/tests/unit/coord_systems/test_Geostationary.py index f144dca190..cc3c8384db 100644 --- a/lib/iris/tests/unit/coord_systems/test_Geostationary.py +++ b/lib/iris/tests/unit/coord_systems/test_Geostationary.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.Geostationary` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py index b2d0c576bb..971ee06293 100644 --- a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.coord_systems.LambertAzimuthalEqualArea` class. diff --git a/lib/iris/tests/unit/coord_systems/test_LambertConformal.py b/lib/iris/tests/unit/coord_systems/test_LambertConformal.py index a9bf70ef94..7ba89208b1 100644 --- a/lib/iris/tests/unit/coord_systems/test_LambertConformal.py +++ b/lib/iris/tests/unit/coord_systems/test_LambertConformal.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.LambertConformal` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_Mercator.py b/lib/iris/tests/unit/coord_systems/test_Mercator.py index dd2f42bb2f..ba04c77d57 100644 --- a/lib/iris/tests/unit/coord_systems/test_Mercator.py +++ b/lib/iris/tests/unit/coord_systems/test_Mercator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.Mercator` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py deleted file mode 100644 index b17c1cc788..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.coord_systems.ObliqueMercator` class.""" - -from typing import List, NamedTuple -from unittest.mock import Mock - -from cartopy import crs as ccrs -import pytest - -from iris.coord_systems import GeogCS, ObliqueMercator - -#### -# ALL TESTS MUST BE CONTAINED IN CLASSES, TO ENABLE INHERITANCE BY -# test_RotatedMercator.py . -#### - - -class GlobeWithEq(ccrs.Globe): - def __eq__(self, other): - """Need eq to enable comparison with expected arguments.""" - result = NotImplemented - if isinstance(other, ccrs.Globe): - result = other.__dict__ == self.__dict__ - return result - - -class ParamTuple(NamedTuple): - """Used for easy coupling of test parameters.""" - - id: str - class_kwargs: dict - cartopy_kwargs: dict - - -kwarg_permutations: List[ParamTuple] = [ - ParamTuple( - "default", - dict(), - dict(), - ), - ParamTuple( - "azimuth", - dict(azimuth_of_central_line=90), - dict(azimuth=90), - ), - ParamTuple( - "central_longitude", - dict(longitude_of_projection_origin=90), - dict(central_longitude=90), - ), - ParamTuple( - "central_latitude", - dict(latitude_of_projection_origin=45), - dict(central_latitude=45), - ), - ParamTuple( - "false_easting_northing", - dict(false_easting=1000000, false_northing=-2000000), - dict(false_easting=1000000, false_northing=-2000000), - ), - ParamTuple( - "scale_factor", - # Number inherited from Cartopy's test_mercator.py . - dict(scale_factor_at_projection_origin=0.939692620786), - dict(scale_factor=0.939692620786), - ), - ParamTuple( - "globe", - dict(ellipsoid=GeogCS(1)), - dict( - globe=GlobeWithEq(semimajor_axis=1, semiminor_axis=1, ellipse=None) - ), - ), - ParamTuple( - "combo", - dict( - azimuth_of_central_line=90, - longitude_of_projection_origin=90, - latitude_of_projection_origin=45, - false_easting=1000000, - false_northing=-2000000, - scale_factor_at_projection_origin=0.939692620786, - ellipsoid=GeogCS(1), - ), - dict( - azimuth=90.0, - central_longitude=90.0, - central_latitude=45.0, - false_easting=1000000, - false_northing=-2000000, - scale_factor=0.939692620786, - globe=GlobeWithEq( - semimajor_axis=1, semiminor_axis=1, ellipse=None - ), - ), - ), -] -permutation_ids: List[str] = [p.id for p in kwarg_permutations] - - -class TestArgs: - GeogCS = GeogCS - class_kwargs_default = dict( - azimuth_of_central_line=0.0, - latitude_of_projection_origin=0.0, - longitude_of_projection_origin=0.0, - ) - cartopy_kwargs_default = dict( - central_longitude=0.0, - central_latitude=0.0, - false_easting=0.0, - false_northing=0.0, - scale_factor=1.0, - azimuth=0.0, - globe=None, - ) - - @pytest.fixture( - autouse=True, params=kwarg_permutations, ids=permutation_ids - ) - def make_variant_inputs(self, request) -> None: - """Parse a ParamTuple into usable test information.""" - inputs: ParamTuple = request.param - self.class_kwargs = dict( - self.class_kwargs_default, **inputs.class_kwargs - ) - self.cartopy_kwargs_expected = dict( - self.cartopy_kwargs_default, **inputs.cartopy_kwargs - ) - - def make_instance(self) -> ObliqueMercator: - return ObliqueMercator(**self.class_kwargs) - - @pytest.fixture() - def instance(self): - return self.make_instance() - - def test_instantiate(self): - _ = self.make_instance() - - def test_cartopy_crs(self, instance): - ccrs.ObliqueMercator = Mock() - instance.as_cartopy_crs() - ccrs.ObliqueMercator.assert_called_with(**self.cartopy_kwargs_expected) - - def test_cartopy_projection(self, instance): - ccrs.ObliqueMercator = Mock() - instance.as_cartopy_projection() - ccrs.ObliqueMercator.assert_called_with(**self.cartopy_kwargs_expected) - - @pytest.fixture() - def label_class(self, instance): - """Make the tested coordinate system available, even for subclasses.""" - from iris import coord_systems - - instance_class = "{!s}".format(instance.__class__.__name__) - globals()[instance_class] = getattr(coord_systems, instance_class) - - def test_repr(self, instance, label_class): - """Test that the repr can be used to regenerate an identical object.""" - assert eval(repr(instance)) == instance diff --git a/lib/iris/tests/unit/coord_systems/test_Orthographic.py b/lib/iris/tests/unit/coord_systems/test_Orthographic.py index a2b63ad5fe..ffcbecf55c 100644 --- a/lib/iris/tests/unit/coord_systems/test_Orthographic.py +++ b/lib/iris/tests/unit/coord_systems/test_Orthographic.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.Orthographic` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py index 16f3ef2e7d..25f5d24800 100755 --- a/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py +++ b/lib/iris/tests/unit/coord_systems/test_PolarStereographic.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.PolarStereographic` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py b/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py deleted file mode 100644 index 01a0640d3b..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_RotatedMercator.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.coord_systems.RotatedMercator` class.""" - -import pytest - -from iris._deprecation import IrisDeprecation -from iris.coord_systems import RotatedMercator - -from . import test_ObliqueMercator - - -class TestArgs(test_ObliqueMercator.TestArgs): - class_kwargs_default = dict( - latitude_of_projection_origin=0.0, - longitude_of_projection_origin=0.0, - ) - cartopy_kwargs_default = dict( - central_longitude=0.0, - central_latitude=0.0, - false_easting=0.0, - false_northing=0.0, - scale_factor=1.0, - azimuth=90.0, - globe=None, - ) - - def make_instance(self) -> RotatedMercator: - kwargs = self.class_kwargs - kwargs.pop("azimuth_of_central_line", None) - return RotatedMercator(**kwargs) - - -def test_deprecated(): - with pytest.warns(IrisDeprecation, match="azimuth_of_central_line=90"): - _ = RotatedMercator(0, 0) diff --git a/lib/iris/tests/unit/coord_systems/test_RotatedPole.py b/lib/iris/tests/unit/coord_systems/test_RotatedPole.py index 69408a96bc..dbb7a05bca 100644 --- a/lib/iris/tests/unit/coord_systems/test_RotatedPole.py +++ b/lib/iris/tests/unit/coord_systems/test_RotatedPole.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.RotatedPole` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_Stereographic.py b/lib/iris/tests/unit/coord_systems/test_Stereographic.py index cdc2fee581..acd77112c1 100644 --- a/lib/iris/tests/unit/coord_systems/test_Stereographic.py +++ b/lib/iris/tests/unit/coord_systems/test_Stereographic.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.Stereographic` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py b/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py index 526985e20d..95b80333c2 100644 --- a/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py +++ b/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.TransverseMercator` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py index 4cd5f215a9..56498e40fa 100644 --- a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py +++ b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coord_systems.VerticalPerspective` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/__init__.py b/lib/iris/tests/unit/coords/__init__.py index a99795d4da..10cee9db8b 100644 --- a/lib/iris/tests/unit/coords/__init__.py +++ b/lib/iris/tests/unit/coords/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :mod:`iris.coords` module. diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py index 0177bcafc4..e5fc8fd28a 100644 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ b/lib/iris/tests/unit/coords/test_AncillaryVariable.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coords.AncillaryVariable` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_AuxCoord.py b/lib/iris/tests/unit/coords/test_AuxCoord.py index 31bd54eb12..e5147659fc 100644 --- a/lib/iris/tests/unit/coords/test_AuxCoord.py +++ b/lib/iris/tests/unit/coords/test_AuxCoord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.coords.AuxCoord` class. diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py index b34ffdfb91..2408ec9f36 100644 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ b/lib/iris/tests/unit/coords/test_Cell.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coords.Cell` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py index c667e012ef..0bd66c6e98 100644 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ b/lib/iris/tests/unit/coords/test_CellMeasure.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coords.CellMeasure` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py index 274606510a..21b309a32b 100644 --- a/lib/iris/tests/unit/coords/test_CellMethod.py +++ b/lib/iris/tests/unit/coords/test_CellMethod.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.coords.CellMethod`. """ diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 14dcdf7ca0..69b6b70c96 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coords.Coord` class.""" # Import iris.tests first so that some things can be initialised before @@ -14,12 +15,11 @@ import dask.array as da import numpy as np -import pytest import iris from iris.coords import AuxCoord, Coord, DimCoord from iris.cube import Cube -from iris.exceptions import IrisVagueMetadataWarning, UnitConversionError +from iris.exceptions import UnitConversionError from iris.tests.unit.coords import CoordTestMixin Pair = collections.namedtuple("Pair", "points bounds") @@ -482,7 +482,7 @@ def test_numeric_nd_multidim_bounds_warning(self): "Collapsing a multi-dimensional coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): coord.collapsed() def test_lazy_nd_multidim_bounds_warning(self): @@ -493,7 +493,7 @@ def test_lazy_nd_multidim_bounds_warning(self): "Collapsing a multi-dimensional coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): coord.collapsed() def test_numeric_nd_noncontiguous_bounds_warning(self): @@ -504,7 +504,7 @@ def test_numeric_nd_noncontiguous_bounds_warning(self): "Collapsing a non-contiguous coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): coord.collapsed() def test_lazy_nd_noncontiguous_bounds_warning(self): @@ -515,7 +515,7 @@ def test_lazy_nd_noncontiguous_bounds_warning(self): "Collapsing a non-contiguous coordinate. " "Metadata may not be fully descriptive for 'y'." ) - with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): coord.collapsed() def test_numeric_3_bounds(self): @@ -530,7 +530,7 @@ def test_numeric_3_bounds(self): r"1D coordinates with 2 bounds. Metadata may not be fully " r"descriptive for 'x'. Ignoring bounds." ) - with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): collapsed_coord = coord.collapsed() self.assertFalse(collapsed_coord.has_lazy_points()) @@ -553,7 +553,7 @@ def test_lazy_3_bounds(self): r"1D coordinates with 2 bounds. Metadata may not be fully " r"descriptive for 'x'. Ignoring bounds." ) - with self.assertWarnsRegex(IrisVagueMetadataWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): collapsed_coord = coord.collapsed() self.assertTrue(collapsed_coord.has_lazy_points()) @@ -1150,39 +1150,6 @@ def test_change_units(self): self.assertFalse(coord.climatological) -class TestIgnoreAxis: - def test_default(self, sample_coord): - assert sample_coord.ignore_axis is False - - def test_set_true(self, sample_coord): - sample_coord.ignore_axis = True - assert sample_coord.ignore_axis is True - - def test_set_random_value(self, sample_coord): - with pytest.raises( - ValueError, - match=r"'ignore_axis' can only be set to 'True' or 'False'", - ): - sample_coord.ignore_axis = "foo" - - @pytest.mark.parametrize( - "ignore_axis, copy_or_from, result", - [ - (True, "copy", True), - (True, "from_coord", True), - (False, "copy", False), - (False, "from_coord", False), - ], - ) - def test_copy_coord(self, ignore_axis, copy_or_from, result, sample_coord): - sample_coord.ignore_axis = ignore_axis - if copy_or_from == "copy": - new_coord = sample_coord.copy() - elif copy_or_from == "from_coord": - new_coord = sample_coord.from_coord(sample_coord) - assert new_coord.ignore_axis is result - - class Test___init____abstractmethod(tests.IrisTest): def test(self): emsg = ( diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py index 2c8ab3a7ba..dd0ba48f3d 100644 --- a/lib/iris/tests/unit/coords/test_DimCoord.py +++ b/lib/iris/tests/unit/coords/test_DimCoord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.coords.DimCoord` class. diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 91a50a9a1c..83fcbc4512 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.coords._DimensionalMetadata` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/cube/__init__.py b/lib/iris/tests/unit/cube/__init__.py index 8c72b2af8c..7852593e21 100644 --- a/lib/iris/tests/unit/cube/__init__.py +++ b/lib/iris/tests/unit/cube/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.cube` module.""" diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 5e513c2bd0..8084ab31fa 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.cube.Cube` class.""" # Import iris.tests first so that some things can be initialised before @@ -33,14 +34,12 @@ CellMethod, DimCoord, ) -from iris.cube import Cube, CubeAttrsDict +from iris.cube import Cube import iris.exceptions from iris.exceptions import ( AncillaryVariableNotFoundError, CellMeasureNotFoundError, CoordinateNotFoundError, - IrisUserWarning, - IrisVagueMetadataWarning, UnitConversionError, ) import iris.tests.stock as stock @@ -677,10 +676,7 @@ def _assert_warn_collapse_without_weight(self, coords, warn): # Ensure that warning is raised. msg = "Collapsing spatial coordinate {!r} without weighting" for coord in coords: - self.assertIn( - mock.call(msg.format(coord), category=IrisUserWarning), - warn.call_args_list, - ) + self.assertIn(mock.call(msg.format(coord)), warn.call_args_list) def _assert_nowarn_collapse_without_weight(self, coords, warn): # Ensure that warning is not raised. @@ -769,10 +765,7 @@ def _assert_warn_cannot_check_contiguity(self, warn): f"bounds. Metadata may not be fully descriptive for " f"'{coord}'. Ignoring bounds." ) - self.assertIn( - mock.call(msg, category=IrisVagueMetadataWarning), - warn.call_args_list, - ) + self.assertIn(mock.call(msg), warn.call_args_list) def _assert_cube_as_expected(self, cube): """Ensure that cube data and coordinates are as expected.""" @@ -2761,13 +2754,6 @@ def test_bad_coord(self): _ = self.cube.coord(bad_coord) -class Test_coord_division_units(tests.IrisTest): - def test(self): - aux = AuxCoord(1, long_name="length", units="metres") - cube = Cube(1, units="seconds") - self.assertEqual((aux / cube).units, "m.s-1") - - class Test__getitem_CellMeasure(tests.IrisTest): def setUp(self): cube = Cube(np.arange(6).reshape(2, 3)) @@ -3436,31 +3422,5 @@ def test_fail_assign_duckcellmethod(self): self.cube.cell_methods = (test_object,) -class TestAttributesProperty: - def test_attrs_type(self): - # Cube attributes are always of a special dictionary type. - cube = Cube([0], attributes={"a": 1}) - assert type(cube.attributes) is CubeAttrsDict - assert cube.attributes == {"a": 1} - - def test_attrs_remove(self): - # Wiping attributes replaces the stored object - cube = Cube([0], attributes={"a": 1}) - attrs = cube.attributes - cube.attributes = None - assert cube.attributes is not attrs - assert type(cube.attributes) is CubeAttrsDict - assert cube.attributes == {} - - def test_attrs_clear(self): - # Clearing attributes leaves the same object - cube = Cube([0], attributes={"a": 1}) - attrs = cube.attributes - cube.attributes.clear() - assert cube.attributes is attrs - assert type(cube.attributes) is CubeAttrsDict - assert cube.attributes == {} - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py deleted file mode 100644 index 615de7b8e6..0000000000 --- a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py +++ /dev/null @@ -1,407 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the `iris.cube.CubeAttrsDict` class.""" - -import pickle - -import numpy as np -import pytest - -from iris.common.mixin import LimitedAttributeDict -from iris.cube import CubeAttrsDict -from iris.fileformats.netcdf.saver import _CF_GLOBAL_ATTRS - - -@pytest.fixture -def sample_attrs() -> CubeAttrsDict: - return CubeAttrsDict( - locals={"a": 1, "z": "this"}, globals={"b": 2, "z": "that"} - ) - - -def check_content(attrs, locals=None, globals=None, matches=None): - """ - Check a CubeAttrsDict for expected properties. - - Its ".globals" and ".locals" must match 'locals' and 'globals' args - -- except that, if 'matches' is provided, it is a CubeAttrsDict, whose - locals/globals *replace* the 'locals'/'globals' arguments. - - Check that the result is a CubeAttrsDict and, for both local + global parts, - * parts match for *equality* (==) but are *non-identical* (is not) - * order of keys matches expected (N.B. which is *not* required for equality) - """ - assert isinstance(attrs, CubeAttrsDict) - attr_locals, attr_globals = attrs.locals, attrs.globals - assert type(attr_locals) is LimitedAttributeDict - assert type(attr_globals) is LimitedAttributeDict - if matches: - locals, globals = matches.locals, matches.globals - - def check(arg, content): - if not arg: - arg = {} - if not isinstance(arg, LimitedAttributeDict): - arg = LimitedAttributeDict(arg) - # N.B. if 'arg' is an actual given LimitedAttributeDict, it is not changed.. - # .. we proceed to ensure that the stored content is equal but NOT the same - assert content == arg - assert content is not arg - assert list(content.keys()) == list(arg.keys()) - - check(locals, attr_locals) - check(globals, attr_globals) - - -class Test___init__: - def test_empty(self): - attrs = CubeAttrsDict() - check_content(attrs, None, None) - - def test_from_combined_dict(self): - attrs = CubeAttrsDict({"q": 3, "history": "something"}) - check_content(attrs, locals={"q": 3}, globals={"history": "something"}) - - def test_from_separate_dicts(self): - locals = {"q": 3} - globals = {"history": "something"} - attrs = CubeAttrsDict(locals=locals, globals=globals) - check_content(attrs, locals=locals, globals=globals) - - def test_from_cubeattrsdict(self, sample_attrs): - result = CubeAttrsDict(sample_attrs) - check_content(result, matches=sample_attrs) - - def test_from_cubeattrsdict_like(self): - class MyDict: - pass - - mydict = MyDict() - locals, globals = {"a": 1}, {"b": 2} - mydict.locals = locals - mydict.globals = globals - attrs = CubeAttrsDict(mydict) - check_content(attrs, locals=locals, globals=globals) - - -class Test_OddMethods: - def test_pickle(self, sample_attrs): - bytes = pickle.dumps(sample_attrs) - result = pickle.loads(bytes) - check_content(result, matches=sample_attrs) - - def test_clear(self, sample_attrs): - sample_attrs.clear() - check_content(sample_attrs, {}, {}) - - def test_del(self, sample_attrs): - # 'z' is in both locals+globals. Delete removes both. - assert "z" in sample_attrs.keys() - del sample_attrs["z"] - assert "z" not in sample_attrs.keys() - - def test_copy(self, sample_attrs): - copy = sample_attrs.copy() - assert copy is not sample_attrs - check_content(copy, matches=sample_attrs) - - @pytest.fixture(params=["regular_arg", "split_arg"]) - def update_testcase(self, request): - lhs = CubeAttrsDict(globals={"a": 1, "b": 2}, locals={"b": 3, "c": 4}) - if request.param == "split_arg": - # A set of "update settings", with global/local-specific keys. - rhs = CubeAttrsDict( - globals={"a": 1001, "x": 1007}, - # NOTE: use a global-default key here, to check that type is preserved - locals={"b": 1003, "history": 1099}, - ) - expected_result = CubeAttrsDict( - globals={"a": 1001, "b": 2, "x": 1007}, - locals={"b": 1003, "c": 4, "history": 1099}, - ) - else: - assert request.param == "regular_arg" - # A similar set of update values in a regular dict (so not local/global) - rhs = {"a": 1001, "x": 1007, "b": 1003, "history": 1099} - expected_result = CubeAttrsDict( - globals={"a": 1001, "b": 2, "history": 1099}, - locals={"b": 1003, "c": 4, "x": 1007}, - ) - return lhs, rhs, expected_result - - def test_update(self, update_testcase): - testval, updater, expected = update_testcase - testval.update(updater) - check_content(testval, matches=expected) - - def test___or__(self, update_testcase): - testval, updater, expected = update_testcase - original = testval.copy() - result = testval | updater - assert result is not testval - assert testval == original - check_content(result, matches=expected) - - def test___ior__(self, update_testcase): - testval, updater, expected = update_testcase - testval |= updater - check_content(testval, matches=expected) - - def test___ror__(self): - # Check the "or" operation, when lhs is a regular dictionary - lhs = {"a": 1, "b": 2, "history": 3} - rhs = CubeAttrsDict( - globals={"a": 1001, "x": 1007}, - # NOTE: use a global-default key here, to check that type is preserved - locals={"b": 1003, "history": 1099}, - ) - # The lhs should be promoted to a CubeAttrsDict, and then combined. - expected = CubeAttrsDict( - globals={"history": 3, "a": 1001, "x": 1007}, - locals={"a": 1, "b": 1003, "history": 1099}, - ) - result = lhs | rhs - check_content(result, matches=expected) - - @pytest.mark.parametrize("value", [1, None]) - @pytest.mark.parametrize("inputtype", ["regular_arg", "split_arg"]) - def test__fromkeys(self, value, inputtype): - if inputtype == "regular_arg": - # Check when input is a plain iterable of key-names - keys = ["a", "b", "history"] - # Result has keys assigned local/global via default mechanism. - expected = CubeAttrsDict( - globals={"history": value}, - locals={"a": value, "b": value}, - ) - else: - assert inputtype == "split_arg" - # Check when input is a CubeAttrsDict - keys = CubeAttrsDict( - globals={"a": 1}, locals={"b": 2, "history": 3} - ) - # The result preserves the input keys' local/global identity - # N.B. "history" would be global by default (cf. "regular_arg" case) - expected = CubeAttrsDict( - globals={"a": value}, - locals={"b": value, "history": value}, - ) - result = CubeAttrsDict.fromkeys(keys, value) - check_content(result, matches=expected) - - def test_to_dict(self, sample_attrs): - result = dict(sample_attrs) - expected = sample_attrs.globals.copy() - expected.update(sample_attrs.locals) - assert result == expected - - def test_array_copies(self): - array = np.array([3, 2, 1, 4]) - map = {"array": array} - attrs = CubeAttrsDict(map) - check_content(attrs, globals=None, locals=map) - attrs_array = attrs["array"] - assert np.all(attrs_array == array) - assert attrs_array is not array - - def test__str__(self, sample_attrs): - result = str(sample_attrs) - assert result == "{'b': 2, 'z': 'this', 'a': 1}" - - def test__repr__(self, sample_attrs): - result = repr(sample_attrs) - expected = ( - "CubeAttrsDict(" - "globals={'b': 2, 'z': 'that'}, " - "locals={'a': 1, 'z': 'this'})" - ) - assert result == expected - - -class TestEq: - def test_eq_empty(self): - attrs_1 = CubeAttrsDict() - attrs_2 = CubeAttrsDict() - assert attrs_1 == attrs_2 - - def test_eq_nonempty(self, sample_attrs): - attrs_1 = sample_attrs - attrs_2 = sample_attrs.copy() - assert attrs_1 == attrs_2 - - @pytest.mark.parametrize("aspect", ["locals", "globals"]) - def test_ne_missing(self, sample_attrs, aspect): - attrs_1 = sample_attrs - attrs_2 = sample_attrs.copy() - del getattr(attrs_2, aspect)["z"] - assert attrs_1 != attrs_2 - assert attrs_2 != attrs_1 - - @pytest.mark.parametrize("aspect", ["locals", "globals"]) - def test_ne_different(self, sample_attrs, aspect): - attrs_1 = sample_attrs - attrs_2 = sample_attrs.copy() - getattr(attrs_2, aspect)["z"] = 99 - assert attrs_1 != attrs_2 - assert attrs_2 != attrs_1 - - def test_ne_locals_vs_globals(self): - attrs_1 = CubeAttrsDict(locals={"a": 1}) - attrs_2 = CubeAttrsDict(globals={"a": 1}) - assert attrs_1 != attrs_2 - assert attrs_2 != attrs_1 - - def test_eq_dict(self): - # A CubeAttrsDict can be equal to a plain dictionary (which would create it) - vals_dict = {"a": 1, "b": 2, "history": "this"} - attrs = CubeAttrsDict(vals_dict) - assert attrs == vals_dict - assert vals_dict == attrs - - def test_ne_dict_local_global(self): - # Dictionary equivalence fails if the local/global assignments are wrong. - # sample dictionary - vals_dict = {"title": "b"} - # these attrs are *not* the same, because 'title' is global by default - attrs = CubeAttrsDict(locals={"title": "b"}) - assert attrs != vals_dict - assert vals_dict != attrs - - def test_empty_not_none(self): - # An empty CubeAttrsDict is not None, and does not compare to 'None' - # N.B. this for compatibility with the LimitedAttributeDict - attrs = CubeAttrsDict() - assert attrs is not None - with pytest.raises(TypeError, match="iterable"): - # Cannot *compare* to None (or anything non-iterable) - # N.B. not actually testing against None, as it upsets black (!) - attrs == 0 - - def test_empty_eq_iterables(self): - # An empty CubeAttrsDict is "equal" to various empty containers - attrs = CubeAttrsDict() - assert attrs == {} - assert attrs == [] - assert attrs == () - - -class TestDictOrderBehaviour: - def test_ordering(self): - attrs = CubeAttrsDict({"a": 1, "b": 2}) - assert list(attrs.keys()) == ["a", "b"] - # Remove, then reinstate 'a' : it will go to the back - del attrs["a"] - attrs["a"] = 1 - assert list(attrs.keys()) == ["b", "a"] - - def test_globals_locals_ordering(self): - # create attrs with a global attribute set *before* a local one .. - attrs = CubeAttrsDict() - attrs.globals.update(dict(a=1, m=3)) - attrs.locals.update(dict(f=7, z=4)) - # .. and check key order of combined attrs - assert list(attrs.keys()) == ["a", "m", "f", "z"] - - def test_locals_globals_nonalphabetic_order(self): - # create the "same" thing with locals before globals, *and* different key order - attrs = CubeAttrsDict() - attrs.locals.update(dict(z=4, f=7)) - attrs.globals.update(dict(m=3, a=1)) - # .. this shows that the result is not affected either by alphabetical key - # order, or the order of adding locals/globals - # I.E. result is globals-in-create-order, then locals-in-create-order - assert list(attrs.keys()) == ["m", "a", "z", "f"] - - -class TestSettingBehaviours: - def test_add_localtype(self): - attrs = CubeAttrsDict() - # Any attribute not recognised as global should go into 'locals' - attrs["z"] = 3 - check_content(attrs, locals={"z": 3}) - - @pytest.mark.parametrize("attrname", _CF_GLOBAL_ATTRS) - def test_add_globaltype(self, attrname): - # These specific attributes are recognised as belonging in 'globals' - attrs = CubeAttrsDict() - attrs[attrname] = "this" - check_content(attrs, globals={attrname: "this"}) - - def test_overwrite_local(self): - attrs = CubeAttrsDict({"a": 1}) - attrs["a"] = 2 - check_content(attrs, locals={"a": 2}) - - @pytest.mark.parametrize("attrname", _CF_GLOBAL_ATTRS) - def test_overwrite_global(self, attrname): - attrs = CubeAttrsDict({attrname: 1}) - attrs[attrname] = 2 - check_content(attrs, globals={attrname: 2}) - - @pytest.mark.parametrize("global_attrname", _CF_GLOBAL_ATTRS) - def test_overwrite_forced_local(self, global_attrname): - attrs = CubeAttrsDict(locals={global_attrname: 1}) - # The attr *remains* local, even though it would be created global by default - attrs[global_attrname] = 2 - check_content(attrs, locals={global_attrname: 2}) - - def test_overwrite_forced_global(self): - attrs = CubeAttrsDict(globals={"data": 1}) - # The attr remains global, even though it would be created local by default - attrs["data"] = 2 - check_content(attrs, globals={"data": 2}) - - def test_overwrite_both(self): - attrs = CubeAttrsDict(locals={"z": 1}, globals={"z": 1}) - # Where both exist, it will always update the local one - attrs["z"] = 2 - check_content(attrs, locals={"z": 2}, globals={"z": 1}) - - def test_local_global_masking(self, sample_attrs): - # initially, local 'z' masks the global one - assert sample_attrs["z"] == sample_attrs.locals["z"] - # remove local, global will show - del sample_attrs.locals["z"] - assert sample_attrs["z"] == sample_attrs.globals["z"] - # re-set local - sample_attrs.locals["z"] = "new" - assert sample_attrs["z"] == "new" - # change the global, makes no difference - sample_attrs.globals["z"] == "other" - assert sample_attrs["z"] == "new" - - @pytest.mark.parametrize("globals_or_locals", ("globals", "locals")) - @pytest.mark.parametrize( - "value_type", - ("replace", "emptylist", "emptytuple", "none", "zero", "false"), - ) - def test_replace_subdict(self, globals_or_locals, value_type): - # Writing to attrs.xx always replaces content with a *new* LimitedAttributeDict - locals, globals = {"a": 1}, {"b": 2} - attrs = CubeAttrsDict(locals=locals, globals=globals) - # Snapshot old + write new value, of either locals or globals - old_content = getattr(attrs, globals_or_locals) - value = { - "replace": {"qq": 77}, - "emptytuple": (), - "emptylist": [], - "none": None, - "zero": 0, - "false": False, - }[value_type] - setattr(attrs, globals_or_locals, value) - # check new content is expected type and value - new_content = getattr(attrs, globals_or_locals) - assert isinstance(new_content, LimitedAttributeDict) - assert new_content is not old_content - if value_type != "replace": - value = {} - assert new_content == value - # Check expected whole: i.e. either globals or locals was replaced with value - if globals_or_locals == "globals": - globals = value - else: - locals = value - check_content(attrs, locals=locals, globals=globals) diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 386df39b66..86457d3888 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.cube.CubeList` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index 854a0d431a..c591e45f63 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.cube.Cube` class aggregated_by method.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/cube/test_Cube__operators.py b/lib/iris/tests/unit/cube/test_Cube__operators.py index 0afd5a9d70..e860c57636 100644 --- a/lib/iris/tests/unit/cube/test_Cube__operators.py +++ b/lib/iris/tests/unit/cube/test_Cube__operators.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.cube.Cube` class operators.""" # import iris tests first so that some things can be initialised diff --git a/lib/iris/tests/unit/data_manager/__init__.py b/lib/iris/tests/unit/data_manager/__init__.py index 1a2ebdc944..41dcc0adf3 100644 --- a/lib/iris/tests/unit/data_manager/__init__.py +++ b/lib/iris/tests/unit/data_manager/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris._data_manager` module.""" diff --git a/lib/iris/tests/unit/data_manager/test_DataManager.py b/lib/iris/tests/unit/data_manager/test_DataManager.py index 1b91e256f4..e73714730f 100644 --- a/lib/iris/tests/unit/data_manager/test_DataManager.py +++ b/lib/iris/tests/unit/data_manager/test_DataManager.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris._data_manager.DataManager`. diff --git a/lib/iris/tests/unit/experimental/__init__.py b/lib/iris/tests/unit/experimental/__init__.py index 38af9c7a97..438827bab2 100644 --- a/lib/iris/tests/unit/experimental/__init__.py +++ b/lib/iris/tests/unit/experimental/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental` package.""" diff --git a/lib/iris/tests/unit/experimental/raster/__init__.py b/lib/iris/tests/unit/experimental/raster/__init__.py index 408926e2d9..5f85d810c9 100644 --- a/lib/iris/tests/unit/experimental/raster/__init__.py +++ b/lib/iris/tests/unit/experimental/raster/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.raster` module.""" diff --git a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py b/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py index af726c0fa9..a3b68ef761 100644 --- a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py +++ b/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.experimental.raster.export_geotiff` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/regrid/__init__.py b/lib/iris/tests/unit/experimental/regrid/__init__.py index f001cccada..578c15f11c 100644 --- a/lib/iris/tests/unit/experimental/regrid/__init__.py +++ b/lib/iris/tests/unit/experimental/regrid/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.regrid` package.""" diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index 3cec1f8569..5ec3c956b9 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`. diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py index f9397da219..b0908dd2e4 100644 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py +++ b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.experimental.regrid.regrid_weighted_curvilinear_to_rectilinear`. diff --git a/lib/iris/tests/unit/experimental/representation/__init__.py b/lib/iris/tests/unit/experimental/representation/__init__.py index 764b46ef58..c856263a5c 100644 --- a/lib/iris/tests/unit/experimental/representation/__init__.py +++ b/lib/iris/tests/unit/experimental/representation/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.representation` package.""" diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py index 678eaa5fea..8dc3cd7849 100644 --- a/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py +++ b/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.cube.CubeRepresentation` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py index aadb07c882..e6b1425110 100644 --- a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py +++ b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.cube.CubeRepresentation` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/experimental/stratify/__init__.py b/lib/iris/tests/unit/experimental/stratify/__init__.py index 41663ee7a4..7218455e76 100644 --- a/lib/iris/tests/unit/experimental/stratify/__init__.py +++ b/lib/iris/tests/unit/experimental/stratify/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.stratify` package.""" diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index a0db398257..6958fa9a2f 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.experimental.stratify.relevel` function. diff --git a/lib/iris/tests/unit/experimental/ugrid/__init__.py b/lib/iris/tests/unit/experimental/ugrid/__init__.py index 27d7921e5f..7f55678f06 100644 --- a/lib/iris/tests/unit/experimental/ugrid/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.ugrid` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py b/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py index 19507555c7..2e70f2cd5d 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.cf` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py index 22914215b7..bdf1d5e03b 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable` class. @@ -13,14 +14,12 @@ # importing anything else. import iris.tests as tests # isort:skip -import re -import warnings - import numpy as np -import pytest -import iris.exceptions -from iris.experimental.ugrid.cf import CFUGridAuxiliaryCoordinateVariable +from iris.experimental.ugrid.cf import ( + CFUGridAuxiliaryCoordinateVariable, + logger, +) from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, ) @@ -214,37 +213,26 @@ def test_warn(self): "ref_source": ref_source, } - def operation(warn: bool): - warnings.warn( - "emit at least 1 warning", - category=iris.exceptions.IrisUserWarning, - ) - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) + # The warn kwarg and expected corresponding log level. + warn_and_level = {True: "WARNING", False: "DEBUG"} # Missing warning. - warn_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" - with pytest.warns( - iris.exceptions.IrisCfMissingVarWarning, match=warn_regex - ): - operation(warn=True) - with pytest.warns() as record: - operation(warn=False) - warn_list = [str(w.message) for w in record] - assert list(filter(re.compile(warn_regex).match, warn_list)) == [] + log_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + result = CFUGridAuxiliaryCoordinateVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) # String variable warning. - warn_regex = r".*is a CF-netCDF label variable.*" - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - with pytest.warns( - iris.exceptions.IrisCfLabelVarWarning, match=warn_regex - ): - operation(warn=True) - with pytest.warns() as record: - operation(warn=False) - warn_list = [str(w.message) for w in record] - assert list(filter(re.compile(warn_regex).match, warn_list)) == [] + log_regex = r".*is a CF-netCDF label variable.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + result = CFUGridAuxiliaryCoordinateVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py index 5fae20e6fc..7d461b324a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridConnectivityVariable` class. @@ -13,14 +14,9 @@ # importing anything else. import iris.tests as tests # isort:skip -import re -import warnings - import numpy as np -import pytest -import iris.exceptions -from iris.experimental.ugrid.cf import CFUGridConnectivityVariable +from iris.experimental.ugrid.cf import CFUGridConnectivityVariable, logger from iris.experimental.ugrid.mesh import Connectivity from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, @@ -203,37 +199,26 @@ def test_warn(self): "ref_source": ref_source, } - def operation(warn: bool): - warnings.warn( - "emit at least 1 warning", - category=iris.exceptions.IrisUserWarning, - ) - result = CFUGridConnectivityVariable.identify(vars_all, warn=warn) - self.assertDictEqual({}, result) + # The warn kwarg and expected corresponding log level. + warn_and_level = {True: "WARNING", False: "DEBUG"} # Missing warning. - warn_regex = ( - rf"Missing CF-UGRID connectivity variable {subject_name}.*" - ) - with pytest.warns( - iris.exceptions.IrisCfMissingVarWarning, match=warn_regex - ): - operation(warn=True) - with pytest.warns() as record: - operation(warn=False) - warn_list = [str(w.message) for w in record] - assert list(filter(re.compile(warn_regex).match, warn_list)) == [] + log_regex = rf"Missing CF-UGRID connectivity variable {subject_name}.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + result = CFUGridConnectivityVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) # String variable warning. - warn_regex = r".*is a CF-netCDF label variable.*" - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - with pytest.warns( - iris.exceptions.IrisCfLabelVarWarning, match=warn_regex - ): - operation(warn=True) - with pytest.warns() as record: - operation(warn=False) - warn_list = [str(w.message) for w in record] - assert list(filter(re.compile(warn_regex).match, warn_list)) == [] + log_regex = r".*is a CF-netCDF label variable.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + result = CFUGridConnectivityVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py index 1e707d9550..a3a0e665bb 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py index 59d3a8aad9..08915f7cff 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridMeshVariable` class. @@ -13,14 +14,9 @@ # importing anything else. import iris.tests as tests # isort:skip -import re -import warnings - import numpy as np -import pytest -import iris.exceptions -from iris.experimental.ugrid.cf import CFUGridMeshVariable +from iris.experimental.ugrid.cf import CFUGridMeshVariable, logger from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, ) @@ -246,35 +242,22 @@ def test_warn(self): "ref_source": ref_source, } - def operation(warn: bool): - warnings.warn( - "emit at least 1 warning", - category=iris.exceptions.IrisUserWarning, - ) - result = CFUGridMeshVariable.identify(vars_all, warn=warn) - self.assertDictEqual({}, result) + # The warn kwarg and expected corresponding log level. + warn_and_level = {True: "WARNING", False: "DEBUG"} # Missing warning. - warn_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" - with pytest.warns( - iris.exceptions.IrisCfMissingVarWarning, match=warn_regex - ): - operation(warn=True) - with pytest.warns() as record: - operation(warn=False) - warn_list = [str(w.message) for w in record] - assert list(filter(re.compile(warn_regex).match, warn_list)) == [] + log_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + result = CFUGridMeshVariable.identify(vars_all, warn=warn) + self.assertDictEqual({}, result) # String variable warning. - warn_regex = r".*is a CF-netCDF label variable.*" - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - with pytest.warns( - iris.exceptions.IrisCfLabelVarWarning, match=warn_regex - ): - operation(warn=True) - with pytest.warns() as record: - operation(warn=False) - warn_list = [str(w.message) for w in record] - assert list(filter(re.compile(warn_regex).match, warn_list)) == [] + log_regex = r".*is a CF-netCDF label variable.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + result = CFUGridMeshVariable.identify(vars_all, warn=warn) + self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py index 52eb569b43..d9de814b05 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/__init__.py b/lib/iris/tests/unit/experimental/ugrid/load/__init__.py index 3248db6e41..36c9108dc2 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.load` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py b/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py index 5c33b27d3e..1203633297 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.load.ParseUgridOnLoad` class. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py index 8dab48ae9c..4de11d5610 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.experimental.ugrid.load.load_mesh` function. diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py index 1ec3e65a97..310e68248a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.experimental.ugrid.load.load_meshes` function. diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py b/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py index d485782c11..4ce979d845 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.mesh` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index 7251597006..f343f4be24 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.experimental.ugrid.mesh.Connectivity` class.""" # Import iris.tests first so that some things can be initialised before @@ -13,7 +14,7 @@ import numpy as np from numpy import ma -from packaging import version +from pkg_resources import parse_version from iris._lazy_data import as_lazy_data, is_lazy_data from iris.experimental.ugrid.mesh import Connectivity @@ -62,7 +63,7 @@ def test_indices(self): def test_read_only(self): attributes = ("indices", "cf_role", "start_index", "location_axis") - if version.parse(python_version()) >= version.parse("3.11"): + if parse_version(python_version()) >= parse_version("3.11"): msg = "object has no setter" else: msg = "can't set attribute" diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index 6784bb6e50..f39f3706ee 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`mesh` class.""" # Import iris.tests first so that some things can be initialised before @@ -71,7 +72,7 @@ def setUpClass(cls): class TestProperties1D(TestMeshCommon): - # Tests that can reuse a single instance for greater efficiency. + # Tests that can re-use a single instance for greater efficiency. @classmethod def setUpClass(cls): super().setUpClass() @@ -736,7 +737,7 @@ def test___str__units_stdname(self): class TestOperations1D(TestMeshCommon): - # Tests that cannot reuse an existing Mesh instance, instead need a new + # Tests that cannot re-use an existing Mesh instance, instead need a new # one each time. def setUp(self): self.mesh = mesh.Mesh( diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index a023762d10..cb90c176b6 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.mesh.MeshCoord`. @@ -16,7 +17,7 @@ import dask.array as da import numpy as np -from packaging import version +from pkg_resources import parse_version import pytest from iris._lazy_data import as_lazy_data, is_lazy_data @@ -78,7 +79,7 @@ def setUp(self): def test_fixed_metadata(self): # Check that you cannot set any of these on an existing MeshCoord. meshcoord = self.meshcoord - if version.parse(python_version()) >= version.parse("3.11"): + if parse_version(python_version()) >= parse_version("3.11"): msg = "object has no setter" else: msg = "can't set attribute" @@ -577,7 +578,7 @@ def _make_test_meshcoord( edge_xs = self.EDGECOORDS_BASENUM + np.arange(n_edges) face_xs = self.FACECOORDS_BASENUM + np.arange(n_faces) - # Record all these for reuse in tests + # Record all these for re-use in tests self.n_faces = n_faces self.n_nodes = n_nodes self.face_xs = face_xs diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py index 2581bf106a..edd34f94a1 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords`. diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py b/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py index a8ad2bc014..2d2d040c1d 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.metadata` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py index 3b8e5ded9f..af92e69b08 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.metadata.ConnectivityMetadata`. diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py index 0786c52934..5c96fb7856 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshCoordMetadata`. diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py index ba7199b777..a8b25dc2e7 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshMetadata`. diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py b/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py index ea8202f8fb..135d7ee49c 100644 --- a/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py +++ b/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.experimental.ugrid.utils` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py index a3cd91815f..4face700ad 100644 --- a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.experimental.ugrid.utils.recombine_submeshes`. diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py index 4c0bca25c1..fa31283c87 100644 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ b/lib/iris/tests/unit/fileformats/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats` package.""" import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/fileformats/abf/__init__.py b/lib/iris/tests/unit/fileformats/abf/__init__.py index 5ddf017c42..aaddf427c5 100644 --- a/lib/iris/tests/unit/fileformats/abf/__init__.py +++ b/lib/iris/tests/unit/fileformats/abf/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.abf` module.""" diff --git a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py b/lib/iris/tests/unit/fileformats/abf/test_ABFField.py index b67e02ec06..98db52d3e9 100644 --- a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py +++ b/lib/iris/tests/unit/fileformats/abf/test_ABFField.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.abf.ABFField` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/cf/__init__.py b/lib/iris/tests/unit/fileformats/cf/__init__.py index 6bc562f922..1bff79368b 100644 --- a/lib/iris/tests/unit/fileformats/cf/__init__.py +++ b/lib/iris/tests/unit/fileformats/cf/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.cf` module.""" diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py index 48f383d7f7..bfc2d586ef 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.fileformats.cf.CFGroup` class.""" from unittest.mock import MagicMock diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 4829d03dbb..9e5cf9b7a5 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.fileformats.cf.CFReader` class. diff --git a/lib/iris/tests/unit/fileformats/dot/__init__.py b/lib/iris/tests/unit/fileformats/dot/__init__.py index afbfed17d8..0dbc3ad4c6 100644 --- a/lib/iris/tests/unit/fileformats/dot/__init__.py +++ b/lib/iris/tests/unit/fileformats/dot/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :mod:`iris.fileformats.dot`.""" diff --git a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py index a29eb625d0..1111e8bc83 100644 --- a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py +++ b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.fileformats.dot._dot_path`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/__init__.py b/lib/iris/tests/unit/fileformats/ff/__init__.py index 945b4f46b5..4d13a18520 100644 --- a/lib/iris/tests/unit/fileformats/ff/__init__.py +++ b/lib/iris/tests/unit/fileformats/ff/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.ff` module.""" diff --git a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py index 5e731632c6..d37b854405 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.fileformat.ff.ArakawaC`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py index 98bc42ddf3..696dacd672 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.fileformat.ff.ENDGame`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index 15bb61e230..cec4f53bc3 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.fileformat.ff.FF2PP` class.""" # Import iris.tests first so that some things can be initialised before @@ -14,7 +15,7 @@ import numpy as np -from iris.exceptions import IrisLoadWarning, NotYetImplementedError +from iris.exceptions import NotYetImplementedError import iris.fileformats._ff as ff from iris.fileformats._ff import FF2PP import iris.fileformats.pp as pp @@ -466,7 +467,7 @@ def test_unequal_spacing_eitherside(self): with mock.patch("warnings.warn") as warn: result = ff2pp._det_border(field_x, None) - warn.assert_called_with(msg, category=IrisLoadWarning) + warn.assert_called_with(msg) self.assertIs(result, field_x) def test_increasing_field_values(self): diff --git a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py index cbbc81dd4b..6a65397086 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.fileformat.ff.FFHeader`.""" # Import iris.tests first so that some things can be initialised before @@ -13,7 +14,7 @@ import numpy as np -from iris.fileformats._ff import FFHeader, _WarnComboLoadingDefaulting +from iris.fileformats._ff import FFHeader MyGrid = collections.namedtuple("MyGrid", "column row real horiz_grid_type") @@ -59,8 +60,7 @@ def test_unknown(self): grid = header.grid() warn.assert_called_with( "Staggered grid type: 0 not currently" - " interpreted, assuming standard C-grid", - category=_WarnComboLoadingDefaulting, + " interpreted, assuming standard C-grid" ) self.assertIs(grid, mock.sentinel.grid) diff --git a/lib/iris/tests/unit/fileformats/ff/test_Grid.py b/lib/iris/tests/unit/fileformats/ff/test_Grid.py index 1bb9688c1a..b20c85b9a8 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_Grid.py +++ b/lib/iris/tests/unit/fileformats/ff/test_Grid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.fileformat.ff.Grid`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py index f3cc41aa82..5f0d64da71 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py +++ b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.fileformat.ff.NewDynamics`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/name_loaders/__init__.py b/lib/iris/tests/unit/fileformats/name_loaders/__init__.py index e9af5168b8..751801a176 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/__init__.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.name_loaders` package.""" diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py index 2ebde5782f..ded635984c 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`. @@ -14,7 +15,6 @@ from unittest import mock import iris.coords -from iris.exceptions import IrisLoadWarning from iris.fileformats.name_loaders import _build_cell_methods @@ -104,7 +104,7 @@ def test_unrecognised(self): "Unknown {} statistic: {!r}. Unable to " "create cell method.".format(coord_name, unrecognised_heading) ) - warn.assert_called_with(expected_msg, category=IrisLoadWarning) + warn.assert_called_with(expected_msg) def test_unrecognised_similar_to_no_averaging(self): unrecognised_headings = [ @@ -129,7 +129,7 @@ def test_unrecognised_similar_to_no_averaging(self): "Unknown {} statistic: {!r}. Unable to " "create cell method.".format(coord_name, unrecognised_heading) ) - warn.assert_called_with(expected_msg, category=IrisLoadWarning) + warn.assert_called_with(expected_msg) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py index e71a31f10f..5954823c54 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py index 20ef79cec3..c4cbde8c14 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py index ea09d40acb..078f65d572 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.analysis.name_loaders._cf_height_from_name` function. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py index 2eea25a26d..d50a7fdad1 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py index a29f504b7e..2ea22c420b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules` . diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index efb5e55be8..399a987f11 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. @@ -11,7 +12,6 @@ import tempfile import warnings -from iris.exceptions import IrisLoadWarning import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf @@ -138,7 +138,7 @@ def run_testcase(self, warning_regex=None, **testcase_kwargs): if warning_regex is None: context = self.assertNoWarningsRegexp() else: - context = self.assertWarnsRegex(IrisLoadWarning, warning_regex) + context = self.assertWarnsRegex(UserWarning, warning_regex) with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index 906ba33f9c..99a1b66ae4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index 582ab7e200..d962fc2758 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py index a8c7d2cc5f..dfa862c4d1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py index a1a93056cb..ffe00c8c19 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index c27d2445e9..59ffa30684 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py index 127ebbf68b..e6508bea85 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules.engine` . diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index 994d2958c2..df5fbd4922 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py index 62bc3a6c9f..69a536b9ae 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the module :mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index b6e9ba954c..c040d43ca0 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_albers_equal_area_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py index 2d1010166f..87070e00ba 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.build_ancil_var`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index fd500b4831..369f92f238 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_auxilliary_coordinate`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py index ee66f8b267..d0421186b4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.build_cell_measure`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py index 973e10217b..a13fa6cca0 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers\ build_cube_metadata`. @@ -41,7 +42,7 @@ def _make_engine(global_attributes=None, standard_name=None, long_name=None): return engine -class TestGlobalAttributes(tests.IrisTest): +class TestInvalidGlobalAttributes(tests.IrisTest): def test_valid(self): global_attributes = { "Conventions": "CF-1.5", @@ -50,7 +51,7 @@ def test_valid(self): engine = _make_engine(global_attributes) build_cube_metadata(engine) expected = global_attributes - self.assertEqual(engine.cube.attributes.globals, expected) + self.assertEqual(engine.cube.attributes, expected) def test_invalid(self): global_attributes = { @@ -64,14 +65,13 @@ def test_invalid(self): # Check for a warning. self.assertEqual(warn.call_count, 1) self.assertIn( - "Skipping disallowed global attribute 'calendar'", - warn.call_args[0][0], + "Skipping global attribute 'calendar'", warn.call_args[0][0] ) # Check resulting attributes. The invalid entry 'calendar' # should be filtered out. global_attributes.pop("calendar") expected = global_attributes - self.assertEqual(engine.cube.attributes.globals, expected) + self.assertEqual(engine.cube.attributes, expected) class TestCubeName(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py index 4f19d44a2a..bc13975441 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_dimension_coordinate`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index a3efcb0dc4..28b3d8ab9a 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_geostationary_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index 8d5b46c6bb..05185a4cf5 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_lambert_azimuthal_equal_area_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index 9232e146cb..22bb7149b1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_lambert_conformal_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index 4958eccbfd..ab61d3b1b2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_mercator_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py deleted file mode 100644 index c377cf7d1b..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`. - -""" -from typing import List, NamedTuple, Type -from unittest import mock - -import pytest - -from iris import coord_systems -from iris._deprecation import IrisDeprecation -from iris.coord_systems import ( - CoordSystem, - GeogCS, - ObliqueMercator, - RotatedMercator, -) -from iris.fileformats._nc_load_rules.helpers import ( - build_oblique_mercator_coordinate_system, -) - - -class ParamTuple(NamedTuple): - """Used for easy coupling of test parameters.""" - - id: str - nc_attributes: dict - expected_class: Type[CoordSystem] - coord_system_kwargs: dict - - -kwarg_permutations: List[ParamTuple] = [ - ParamTuple( - "default", - dict(), - ObliqueMercator, - dict(), - ), - ParamTuple( - "azimuth", - dict(azimuth_of_central_line=90), - ObliqueMercator, - dict(azimuth_of_central_line=90), - ), - ParamTuple( - "central_longitude", - dict(longitude_of_projection_origin=90), - ObliqueMercator, - dict(longitude_of_projection_origin=90), - ), - ParamTuple( - "central_latitude", - dict(latitude_of_projection_origin=45), - ObliqueMercator, - dict(latitude_of_projection_origin=45), - ), - ParamTuple( - "false_easting_northing", - dict(false_easting=1000000, false_northing=-2000000), - ObliqueMercator, - dict(false_easting=1000000, false_northing=-2000000), - ), - ParamTuple( - "scale_factor", - # Number inherited from Cartopy's test_mercator.py . - dict(scale_factor_at_projection_origin=0.939692620786), - ObliqueMercator, - dict(scale_factor_at_projection_origin=0.939692620786), - ), - ParamTuple( - "globe", - dict(semi_major_axis=1), - ObliqueMercator, - dict(ellipsoid=GeogCS(semi_major_axis=1, semi_minor_axis=1)), - ), - ParamTuple( - "combo", - dict( - azimuth_of_central_line=90, - longitude_of_projection_origin=90, - latitude_of_projection_origin=45, - false_easting=1000000, - false_northing=-2000000, - scale_factor_at_projection_origin=0.939692620786, - semi_major_axis=1, - ), - ObliqueMercator, - dict( - azimuth_of_central_line=90.0, - longitude_of_projection_origin=90.0, - latitude_of_projection_origin=45.0, - false_easting=1000000, - false_northing=-2000000, - scale_factor_at_projection_origin=0.939692620786, - ellipsoid=GeogCS(semi_major_axis=1, semi_minor_axis=1), - ), - ), - ParamTuple( - "rotated", - dict(grid_mapping_name="rotated_mercator"), - RotatedMercator, - dict(), - ), - ParamTuple( - "rotated_azimuth_ignored", - dict( - grid_mapping_name="rotated_mercator", - azimuth_of_central_line=45, - ), - RotatedMercator, - dict(), - ), -] -permutation_ids: List[str] = [p.id for p in kwarg_permutations] - - -class TestAttributes: - """Test that NetCDF attributes are correctly converted to class arguments.""" - - nc_attributes_default = dict( - grid_mapping_name="oblique_mercator", - azimuth_of_central_line=0.0, - latitude_of_projection_origin=0.0, - longitude_of_projection_origin=0.0, - scale_factor_at_projection_origin=1.0, - # Optional attributes not included. - ) - coord_system_kwargs_default = dict( - azimuth_of_central_line=0.0, - latitude_of_projection_origin=0.0, - longitude_of_projection_origin=0.0, - false_easting=None, - false_northing=None, - scale_factor_at_projection_origin=1.0, - ellipsoid=None, - ) - - @pytest.fixture( - autouse=True, params=kwarg_permutations, ids=permutation_ids - ) - def make_variant_inputs(self, request) -> None: - """Parse a ParamTuple into usable test information.""" - inputs: ParamTuple = request.param - - self.nc_attributes = dict( - self.nc_attributes_default, **inputs.nc_attributes - ) - self.expected_class = inputs.expected_class - coord_system_kwargs_expected = dict( - self.coord_system_kwargs_default, **inputs.coord_system_kwargs - ) - - if self.expected_class is RotatedMercator: - del coord_system_kwargs_expected["azimuth_of_central_line"] - - self.coord_system_args_expected = list( - coord_system_kwargs_expected.values() - ) - - def test_attributes(self): - cf_var_mock = mock.Mock(spec=[], **self.nc_attributes) - coord_system_mock = mock.Mock(spec=self.expected_class) - setattr(coord_systems, self.expected_class.__name__, coord_system_mock) - - _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) - coord_system_mock.assert_called_with(*self.coord_system_args_expected) - - -def test_deprecation(): - nc_attributes = dict( - grid_mapping_name="rotated_mercator", - latitude_of_projection_origin=0.0, - longitude_of_projection_origin=0.0, - scale_factor_at_projection_origin=1.0, - ) - cf_var_mock = mock.Mock(spec=[], **nc_attributes) - with pytest.warns(IrisDeprecation, match="azimuth_of_central_line = 90"): - _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py index a20443005c..09cfde9d5b 100755 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_polar_stereographic_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index a483390e36..3796aeebab 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_sterographic_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index ae881259fe..0096c5df4b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_transverse_mercator_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py index 21906ba644..f34992c2be 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ build_vertical_perspective_coordinate_system`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py index d80b33f002..a159ef81a8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_attr_units`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py index 7d0dc4952c..ff9c51f40b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_cf_bounds_var`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py index b6a0f3d3c1..3c7c496b54 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ get_names`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index 9dc31ba490..bb94adc72e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ has_supported_mercator_parameters`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py index faffefd8f2..6e6d6e4e81 100755 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ has_supported_polar_stereographic_parameters`. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index a58413d399..729a2d8b14 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`. @@ -14,7 +15,6 @@ from unittest import mock from iris.coords import CellMethod -from iris.exceptions import IrisCfLoadWarning from iris.fileformats._nc_load_rules.helpers import parse_cell_methods @@ -123,7 +123,7 @@ def test_comment_bracket_mismatch_warning(self): ] for cell_method_str in cell_method_strings: with self.assertWarns( - IrisCfLoadWarning, + UserWarning, msg="Cell methods may be incorrectly parsed due to mismatched brackets", ): _ = parse_cell_methods(cell_method_str) @@ -139,7 +139,7 @@ def test_badly_formatted_warning(self): ] for cell_method_str in cell_method_strings: with self.assertWarns( - IrisCfLoadWarning, + UserWarning, msg=f"Failed to fully parse cell method string: {cell_method_str}", ): _ = parse_cell_methods(cell_method_str) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py index 1e9d13110e..1ee0cfbf2e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.fileformats._nc_load_rules.helpers.\ reorder_bounds_data`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/__init__.py index 961f7779a8..732094f67a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.netcdf` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py index 67d3fe0fc6..7c2ae96158 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.netcdf.loader` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py deleted file mode 100644 index 7249c39829..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py +++ /dev/null @@ -1,216 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :class:`iris.fileformats.netcdf.loader.ChunkControl`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip -from unittest.mock import ANY, patch - -import dask -import numpy as np -import pytest - -import iris -from iris.cube import CubeList -from iris.fileformats.netcdf import loader -from iris.fileformats.netcdf.loader import CHUNK_CONTROL -import iris.tests.stock as istk - - -@pytest.fixture() -def save_cubelist_with_sigma(tmp_filepath): - cube = istk.simple_4d_with_hybrid_height() - cube_varname = "my_var" - sigma_varname = "my_sigma" - cube.var_name = cube_varname - cube.coord("sigma").var_name = sigma_varname - cube.coord("sigma").guess_bounds() - iris.save(cube, tmp_filepath) - return cube_varname, sigma_varname - - -@pytest.fixture -def save_cube_with_chunksize(tmp_filepath): - cube = istk.simple_3d() - # adding an aux coord allows us to test that - # iris.fileformats.netcdf.loader._get_cf_var_data() - # will only throw an error if from_file mode is - # True when the entire cube has no specified chunking - aux = iris.coords.AuxCoord( - points=np.zeros((3, 4)), - long_name="random", - units="1", - ) - cube.add_aux_coord(aux, [1, 2]) - iris.save(cube, tmp_filepath, chunksizes=(1, 3, 4)) - - -@pytest.fixture(scope="session") -def tmp_filepath(tmp_path_factory): - tmp_dir = tmp_path_factory.mktemp("data") - tmp_path = tmp_dir / "tmp.nc" - return str(tmp_path) - - -@pytest.fixture(autouse=True) -def remove_min_bytes(): - old_min_bytes = loader._LAZYVAR_MIN_BYTES - loader._LAZYVAR_MIN_BYTES = 0 - yield - loader._LAZYVAR_MIN_BYTES = old_min_bytes - - -def test_default(tmp_filepath, save_cubelist_with_sigma): - cube_varname, _ = save_cubelist_with_sigma - cubes = CubeList(loader.load_cubes(tmp_filepath)) - cube = cubes.extract_cube(cube_varname) - assert cube.shape == (3, 4, 5, 6) - assert cube.lazy_data().chunksize == (3, 4, 5, 6) - - sigma = cube.coord("sigma") - assert sigma.shape == (4,) - assert sigma.lazy_points().chunksize == (4,) - assert sigma.lazy_bounds().chunksize == (4, 2) - - -def test_control_global(tmp_filepath, save_cubelist_with_sigma): - cube_varname, _ = save_cubelist_with_sigma - with CHUNK_CONTROL.set(model_level_number=2): - cubes = CubeList(loader.load_cubes(tmp_filepath)) - cube = cubes.extract_cube(cube_varname) - assert cube.shape == (3, 4, 5, 6) - assert cube.lazy_data().chunksize == (3, 2, 5, 6) - - sigma = cube.coord("sigma") - assert sigma.shape == (4,) - assert sigma.lazy_points().chunksize == (2,) - assert sigma.lazy_bounds().chunksize == (2, 2) - - -def test_control_sigma_only(tmp_filepath, save_cubelist_with_sigma): - cube_varname, sigma_varname = save_cubelist_with_sigma - with CHUNK_CONTROL.set(sigma_varname, model_level_number=2): - cubes = CubeList(loader.load_cubes(tmp_filepath)) - cube = cubes.extract_cube(cube_varname) - assert cube.shape == (3, 4, 5, 6) - assert cube.lazy_data().chunksize == (3, 4, 5, 6) - - sigma = cube.coord("sigma") - assert sigma.shape == (4,) - assert sigma.lazy_points().chunksize == (2,) - # N.B. this does not apply to bounds array - assert sigma.lazy_bounds().chunksize == (4, 2) - - -def test_control_cube_var(tmp_filepath, save_cubelist_with_sigma): - cube_varname, _ = save_cubelist_with_sigma - with CHUNK_CONTROL.set(cube_varname, model_level_number=2): - cubes = CubeList(loader.load_cubes(tmp_filepath)) - cube = cubes.extract_cube(cube_varname) - assert cube.shape == (3, 4, 5, 6) - assert cube.lazy_data().chunksize == (3, 2, 5, 6) - - sigma = cube.coord("sigma") - assert sigma.shape == (4,) - assert sigma.lazy_points().chunksize == (2,) - assert sigma.lazy_bounds().chunksize == (2, 2) - - -def test_invalid_chunksize(tmp_filepath, save_cubelist_with_sigma): - with pytest.raises(ValueError): - with CHUNK_CONTROL.set(model_level_numer="2"): - CubeList(loader.load_cubes(tmp_filepath)) - - -def test_invalid_var_name(tmp_filepath, save_cubelist_with_sigma): - with pytest.raises(ValueError): - with CHUNK_CONTROL.set([1, 2], model_level_numer="2"): - CubeList(loader.load_cubes(tmp_filepath)) - - -def test_control_multiple(tmp_filepath, save_cubelist_with_sigma): - cube_varname, sigma_varname = save_cubelist_with_sigma - with CHUNK_CONTROL.set( - cube_varname, model_level_number=2 - ), CHUNK_CONTROL.set(sigma_varname, model_level_number=3): - cubes = CubeList(loader.load_cubes(tmp_filepath)) - cube = cubes.extract_cube(cube_varname) - assert cube.shape == (3, 4, 5, 6) - assert cube.lazy_data().chunksize == (3, 2, 5, 6) - - sigma = cube.coord("sigma") - assert sigma.shape == (4,) - assert sigma.lazy_points().chunksize == (3,) - assert sigma.lazy_bounds().chunksize == (2, 2) - - -def test_neg_one(tmp_filepath, save_cubelist_with_sigma): - cube_varname, _ = save_cubelist_with_sigma - with dask.config.set({"array.chunk-size": "50B"}): - with CHUNK_CONTROL.set(model_level_number=-1): - cubes = CubeList(loader.load_cubes(tmp_filepath)) - cube = cubes.extract_cube(cube_varname) - assert cube.shape == (3, 4, 5, 6) - # uses known good output - assert cube.lazy_data().chunksize == (1, 4, 1, 1) - - sigma = cube.coord("sigma") - assert sigma.shape == (4,) - assert sigma.lazy_points().chunksize == (4,) - assert sigma.lazy_bounds().chunksize == (4, 1) - - -def test_from_file(tmp_filepath, save_cube_with_chunksize): - with CHUNK_CONTROL.from_file(): - cube = next(loader.load_cubes(tmp_filepath)) - assert cube.shape == (2, 3, 4) - assert cube.lazy_data().chunksize == (1, 3, 4) - - -def test_no_chunks_from_file(tmp_filepath, save_cubelist_with_sigma): - cube_varname, _ = save_cubelist_with_sigma - with pytest.raises(KeyError): - with CHUNK_CONTROL.from_file(): - CubeList(loader.load_cubes(tmp_filepath)) - - -def test_as_dask(tmp_filepath, save_cubelist_with_sigma): - """ - This does not test return values, as we can't be sure - dask chunking behaviour won't change, or that it will differ - from our own chunking behaviour. - """ - message = "Mock called, rest of test unneeded" - with patch("iris.fileformats.netcdf.loader.as_lazy_data") as as_lazy_data: - as_lazy_data.side_effect = RuntimeError(message) - with CHUNK_CONTROL.as_dask(): - try: - CubeList(loader.load_cubes(tmp_filepath)) - except RuntimeError as e: - if str(e) != message: - raise e - as_lazy_data.assert_called_with(ANY, chunks=None, dask_chunking=True) - - -def test_pinned_optimisation(tmp_filepath, save_cubelist_with_sigma): - cube_varname, _ = save_cubelist_with_sigma - with dask.config.set({"array.chunk-size": "250B"}): - with CHUNK_CONTROL.set(model_level_number=2): - cubes = CubeList(loader.load_cubes(tmp_filepath)) - cube = cubes.extract_cube(cube_varname) - assert cube.shape == (3, 4, 5, 6) - # uses known good output - # known good output WITHOUT pinning: (1, 1, 5, 6) - assert cube.lazy_data().chunksize == (1, 2, 2, 6) - - sigma = cube.coord("sigma") - assert sigma.shape == (4,) - assert sigma.lazy_points().chunksize == (2,) - assert sigma.lazy_bounds().chunksize == (2, 2) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py index caece8b6bc..6c487d74e7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.netcdf._get_cf_var_data` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. @@ -14,7 +15,7 @@ from iris._lazy_data import _optimum_chunksize import iris.fileformats.cf -from iris.fileformats.netcdf.loader import CHUNK_CONTROL, _get_cf_var_data +from iris.fileformats.netcdf.loader import _get_cf_var_data class Test__get_cf_var_data(tests.IrisTest): @@ -29,7 +30,6 @@ def _make( cf_data = mock.MagicMock( _FillValue=None, __getitem__="", - dimensions=["dim_" + str(x) for x in range(len(shape or "1"))], ) cf_data.chunking = mock.MagicMock(return_value=chunksizes) if shape is None: @@ -61,16 +61,6 @@ def test_cf_data_chunks(self): expected_chunks = _optimum_chunksize(chunks, self.shape) self.assertArrayEqual(lazy_data_chunks, expected_chunks) - def test_cf_data_chunk_control(self): - # more thorough testing can be found at `test__chunk_control` - chunks = [2500, 240, 200] - cf_var = self._make(shape=(2500, 240, 200), chunksizes=chunks) - with CHUNK_CONTROL.set(dim_0=25, dim_1=24, dim_2=20): - lazy_data = _get_cf_var_data(cf_var, self.filename) - lazy_data_chunks = [c[0] for c in lazy_data.chunks] - expected_chunks = (25, 24, 20) - self.assertArrayEqual(lazy_data_chunks, expected_chunks) - def test_cf_data_no_chunks(self): # No chunks means chunks are calculated from the array's shape by # `iris._lazy_data._optimum_chunksize()`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py index 01c6838241..841935cc81 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.netcdf._load_aux_factory` function.""" # Import iris.tests first so that some things can be initialised before @@ -15,7 +16,6 @@ from iris.coords import DimCoord from iris.cube import Cube -from iris.exceptions import IrisFactoryCoordNotFoundWarning from iris.fileformats.netcdf.loader import _load_aux_factory @@ -165,8 +165,7 @@ def test_formula_terms_ap_missing_coords(self): with mock.patch("warnings.warn") as warn: _load_aux_factory(self.engine, self.cube) warn.assert_called_once_with( - "Unable to find coordinate for variable " "'ap'", - category=IrisFactoryCoordNotFoundWarning, + "Unable to find coordinate for variable " "'ap'" ) self._check_no_delta() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py index 25beca8f59..b67c546aa0 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py index 2522392c21..77bb0d3950 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.netcdf._translate_constraints_to_var_callback`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 77c1da2d1c..1a2ef1d29d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.fileformats.netcdf.load_cubes` function. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py index 53d42128f6..a68d5fc5d0 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.netcdf.saver` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index e4d8488a76..12af318c01 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :class:`iris.fileformats.netcdf.Saver` class.""" # Import iris.tests first so that some things can be initialised before @@ -23,16 +24,13 @@ LambertAzimuthalEqualArea, LambertConformal, Mercator, - ObliqueMercator, RotatedGeogCS, - RotatedMercator, Stereographic, TransverseMercator, VerticalPerspective, ) from iris.coords import AuxCoord, DimCoord from iris.cube import Cube -from iris.exceptions import IrisMaskValueMatchWarning from iris.fileformats.netcdf import Saver, _thread_safe_nc import iris.tests.stock as stock @@ -557,7 +555,7 @@ def test_contains_fill_value_passed(self): cube = self._make_cube(">f4") fill_value = 1 with self.assertWarnsRegex( - IrisMaskValueMatchWarning, + UserWarning, "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube, fill_value=fill_value): @@ -569,7 +567,7 @@ def test_contains_fill_value_byte(self): cube = self._make_cube(">i1") fill_value = 1 with self.assertWarnsRegex( - IrisMaskValueMatchWarning, + UserWarning, "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube, fill_value=fill_value): @@ -581,7 +579,7 @@ def test_contains_default_fill_value(self): cube = self._make_cube(">f4") cube.data[0, 0] = _thread_safe_nc.default_fillvals["f4"] with self.assertWarnsRegex( - IrisMaskValueMatchWarning, + UserWarning, "contains unmasked data points equal to the fill-value", ): with self._netCDF_var(cube): @@ -1066,50 +1064,6 @@ def test_geo_cs(self): } self._test(coord_system, expected) - def test_oblique_cs(self): - # Some none-default settings to confirm all parameters are being - # handled. - - kwargs_rotated = dict( - latitude_of_projection_origin=90.0, - longitude_of_projection_origin=45.0, - false_easting=1000000.0, - false_northing=-2000000.0, - scale_factor_at_projection_origin=0.939692620786, - ellipsoid=GeogCS(1), - ) - - # Same as rotated, but with azimuth too. - oblique_azimuth = dict(azimuth_of_central_line=45.0) - kwargs_oblique = dict(kwargs_rotated, **oblique_azimuth) - - expected_rotated = dict( - # Automatically converted to oblique_mercator in line with CF 1.11 . - grid_mapping_name=b"oblique_mercator", - # Azimuth should be automatically populated. - azimuth_of_central_line=90.0, - **kwargs_rotated, - ) - # Convert the ellipsoid - expected_rotated.update( - dict( - earth_radius=expected_rotated.pop("ellipsoid").semi_major_axis, - longitude_of_prime_meridian=0.0, - ) - ) - - # Same as rotated, but different azimuth. - expected_oblique = dict(expected_rotated, **oblique_azimuth) - - oblique = ObliqueMercator(**kwargs_oblique) - rotated = RotatedMercator(**kwargs_rotated) - - for coord_system, expected in [ - (oblique, expected_oblique), - (rotated, expected_rotated), - ]: - self._test(coord_system, expected) - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index 2e7091c43b..e1211dc276 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Mirror of :mod:`iris.tests.unit.fileformats.netcdf.test_Saver`, but with lazy arrays.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index 10c5dbecf4..9686c88abf 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :meth:`iris.fileformats.netcdf.saver.Saver._lazy_stream_data`. @@ -17,7 +18,6 @@ import numpy as np import pytest -from iris.exceptions import IrisMaskValueMatchWarning import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc from iris.fileformats.netcdf.saver import Saver, _FillvalueCheckInfo @@ -183,5 +183,5 @@ def test_warnings(self, compute, data_form): if n_expected_warnings > 0: warning = issued_warnings[0] msg = "contains unmasked data points equal to the fill-value, 2.0" - assert isinstance(warning, IrisMaskValueMatchWarning) + assert isinstance(warning, UserWarning) assert msg in warning.args[0] diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 8177e0c299..27d9709fe6 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :class:`iris.fileformats.netcdf.Saver` class. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py index 9068837b2c..95a518e4e5 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.netcdf.saver._data_fillvalue_check`. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py index 42119094a7..b2e4b63e3a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. """ @@ -10,9 +11,9 @@ import numpy as np import pytest -from iris.exceptions import IrisSaverFillValueWarning from iris.fileformats.netcdf._thread_safe_nc import default_fillvals from iris.fileformats.netcdf.saver import ( + SaverFillValueWarning, _fillvalue_report, _FillvalueCheckInfo, ) @@ -92,14 +93,12 @@ def test_warn(self, has_collision): expected_msg = "'' contains unmasked data points equal to the fill-value" # Enter a warnings context that checks for the error. warning_context = pytest.warns( - IrisSaverFillValueWarning, match=expected_msg + SaverFillValueWarning, match=expected_msg ) warning_context.__enter__() else: # Check that we get NO warning of the expected type. - warnings.filterwarnings( - "error", category=IrisSaverFillValueWarning - ) + warnings.filterwarnings("error", category=SaverFillValueWarning) # Do call: it should raise AND return a warning, ONLY IF there was a collision. result = _fillvalue_report( diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index 620bc64461..68049b57fc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :func:`iris.fileformats.netcdf.save` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py index 928c9d9361..429ee9ce1f 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.nimrod_load_rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py index c15a721ad3..a15337f849 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.fileformats.nimrod_load_rules.units` function. diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py index 4f1b948839..44dcf8ac48 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.fileformats.nimrod_load_rules.vertical_coord` function. @@ -62,7 +63,7 @@ def test_unhandled(self): vertical_coord_val=1.0, vertical_coord_type=-1 ) warn.assert_called_once_with( - "Vertical coord -1 not yet handled", category=TranslationWarning + "Vertical coord -1 not yet handled", TranslationWarning ) def test_null(self): diff --git a/lib/iris/tests/unit/fileformats/pp/__init__.py b/lib/iris/tests/unit/fileformats/pp/__init__.py index 7eedc830d9..f309b6848a 100644 --- a/lib/iris/tests/unit/fileformats/pp/__init__.py +++ b/lib/iris/tests/unit/fileformats/pp/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.pp` module.""" diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py index 4d963e7f08..d70e573296 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.PPDataProxy` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index 1a49f57712..316894ded1 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.PPField` class.""" # Import iris.tests first so that some things can be initialised before @@ -12,7 +13,6 @@ import numpy as np -from iris.exceptions import IrisDefaultingWarning, IrisMaskValueMatchWarning import iris.fileformats.pp as pp from iris.fileformats.pp import PPField, SplittableInt @@ -91,7 +91,7 @@ def field_checksum(data): data_64 = np.linspace(0, 1, num=10, endpoint=False).reshape(2, 5) checksum_32 = field_checksum(data_64.astype(">f4")) msg = "Downcasting array precision from float64 to float32 for save." - with self.assertWarnsRegex(IrisDefaultingWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): checksum_64 = field_checksum(data_64.astype(">f8")) self.assertEqual(checksum_32, checksum_64) @@ -104,7 +104,7 @@ def test_masked_mdi_value_warning(self): [1.0, field.bmdi, 3.0], dtype=np.float32 ) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): with self.temp_filename(".pp") as temp_filename: with open(temp_filename, "wb") as pp_file: field.save(pp_file) @@ -116,7 +116,7 @@ def test_unmasked_mdi_value_warning(self): # Make float32 data, as float64 default produces an extra warning. field.data = np.array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): + with self.assertWarnsRegex(UserWarning, msg): with self.temp_filename(".pp") as temp_filename: with open(temp_filename, "wb") as pp_file: field.save(pp_file) diff --git a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py index cca9bb4641..514e326393 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py +++ b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py index ab80332186..16d2b500a5 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py +++ b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp._create_field_data` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index 45635af391..73913c6219 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function. diff --git a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py index 80b90fc8d2..31ac4f6b19 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py +++ b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp._field_gen` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py index aa03c068ce..0b83cade76 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py +++ b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp._interpret_field` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py index 213eb6c9c4..3ff228e106 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.as_fields` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_load.py b/lib/iris/tests/unit/fileformats/pp/test_load.py index e802b36c0e..77da1288c2 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_load.py +++ b/lib/iris/tests/unit/fileformats/pp/test_load.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index f49d389841..8200259cca 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.save` function.""" # Import iris.tests first so that some things can be initialised before @@ -43,18 +44,6 @@ def test_grid_and_pole__scalar_dim_longitude(unit, modulus): assert field.lbnpt == lon.points.size -def test_realization(): - cube = stock.lat_lon_cube() - real_coord = DimCoord(42, standard_name="realization", units=1) - cube.add_aux_coord(real_coord) - with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: - pp_field.lbrsvd = list(range(4)) - verify(cube, pp_field) - member_number = pp_field.lbrsvd[3] - - assert member_number == 42 - - def _pp_save_ppfield_values(cube): """ Emulate saving a cube as PP, and capture the resulting PP field values. diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py index 2eaebc0059..fdd470cb47 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.save_fields` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py index 5ab3f7c480..cdd3c9cd49 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.pp.save_pairs_from_cube` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py index c8361feae4..70d28f7c09 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.pp_load_rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py index 591bfda857..e194e240c6 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index c87e199956..c9c4821e0a 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._collapse_degenerate_points_and_bounds`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py index 803e47227f..d3046ee63e 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_pseudo_level_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py index 6159a1dbd4..759a399dad 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_scalar_realization_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index 5afaeee45d..cf147e5928 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_time_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py index a7ed6355f6..47552a646a 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._convert_vertical_coords`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py index 176d0a38a1..7769ca1de1 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.fileformats.pp_load_rules._dim_or_aux`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py index 2724d45871..2c5d672e14 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._epoch_date_hours`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py index c99de5bc34..fa381b91c1 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py index a33128f39b..fc30f66f7f 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._reduce_points_and_bounds`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py index d12a718e98..4e6d50fea7 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for :func:`iris.fileformats.pp_load_rules._reshape_vector_args`. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py index 0b46d11f9d..569d676183 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.fileformats.pp_load_rules.convert`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/rules/__init__.py b/lib/iris/tests/unit/fileformats/rules/__init__.py index 1b14a8b07b..55c9c7779e 100644 --- a/lib/iris/tests/unit/fileformats/rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/rules/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.fileformats.rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/rules/test_Loader.py b/lib/iris/tests/unit/fileformats/rules/test_Loader.py index b99d1e6f40..be96f526d2 100644 --- a/lib/iris/tests/unit/fileformats/rules/test_Loader.py +++ b/lib/iris/tests/unit/fileformats/rules/test_Loader.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris.fileformats.rules.Loader`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py index 91862658e5..b6c4528399 100644 --- a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py +++ b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.fileformats.rules._make_cube`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py b/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py index 8a0a9a38d7..c703284fc0 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :mod:`iris.fileformats._structured_array_identification` module. diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index 9f5466afaa..871aab4f1e 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :mod:`iris.fileformats._structured_array_identification.ArrayStructure` class. diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py index ec98664f51..a7818ad802 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :mod:`iris.fileformats._structured_array_identification.GroupStructure` class. diff --git a/lib/iris/tests/unit/fileformats/test_rules.py b/lib/iris/tests/unit/fileformats/test_rules.py index b7e17b205e..c243a374cb 100644 --- a/lib/iris/tests/unit/fileformats/test_rules.py +++ b/lib/iris/tests/unit/fileformats/test_rules.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test iris.fileformats.rules.py - metadata translation rules. diff --git a/lib/iris/tests/unit/fileformats/um/__init__.py b/lib/iris/tests/unit/fileformats/um/__init__.py index 6652c6d543..6b4abc61bb 100644 --- a/lib/iris/tests/unit/fileformats/um/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.fileformats.um` package.""" diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py index f2c18b5f8a..b5eb259e5b 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the module :mod:`iris.fileformats.um._fast_load`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py index 930050813f..0c15e5e839 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the class :class:`iris.fileformats.um._fast_load.FieldCollation`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py index f6e3fd5928..90c411b41d 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.fileformats.um._fast_load._convert_collation`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py index c26382aca9..f0932c3ac8 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the module :mod:`iris.fileformats.um._fast_load_structured_fields`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py index 19c64ec57a..57100c79af 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the class :class:`iris.fileformats.um._fast_load_structured_fields.BasicFieldCollation`. diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py index 75b54dfd4f..b7ef9a62a3 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:\ `iris.fileformats.um._fast_load_structured_fields.group_structured_fields`. diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py index 5a72973519..8070719de8 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the module :mod:`iris.fileformats.um._optimal_array_structuring`. diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index 92a8b19ec0..96566f3c80 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.fileformats.um._optimal_array_structuring.optimal_array_structure`. diff --git a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py index 40ac6826d3..ef6369f638 100644 --- a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py +++ b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the function :func:`iris.fileformats.um.um_to_pp`. diff --git a/lib/iris/tests/unit/io/__init__.py b/lib/iris/tests/unit/io/__init__.py index 1a11fe5d30..5e347c9ebc 100644 --- a/lib/iris/tests/unit/io/__init__.py +++ b/lib/iris/tests/unit/io/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.io` package.""" diff --git a/lib/iris/tests/unit/io/test__generate_cubes.py b/lib/iris/tests/unit/io/test__generate_cubes.py index 96d790db2d..3a896a111c 100755 --- a/lib/iris/tests/unit/io/test__generate_cubes.py +++ b/lib/iris/tests/unit/io/test__generate_cubes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.io._generate_cubes` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/io/test_expand_filespecs.py b/lib/iris/tests/unit/io/test_expand_filespecs.py index bd5e5933a3..8720478153 100644 --- a/lib/iris/tests/unit/io/test_expand_filespecs.py +++ b/lib/iris/tests/unit/io/test_expand_filespecs.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.io.expand_filespecs` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/io/test_run_callback.py b/lib/iris/tests/unit/io/test_run_callback.py index cd55743b29..94ae7ac09d 100644 --- a/lib/iris/tests/unit/io/test_run_callback.py +++ b/lib/iris/tests/unit/io/test_run_callback.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.io.run_callback` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/io/test_save.py b/lib/iris/tests/unit/io/test_save.py index cec125d0fe..623cf417f2 100755 --- a/lib/iris/tests/unit/io/test_save.py +++ b/lib/iris/tests/unit/io/test_save.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.io.save` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/__init__.py b/lib/iris/tests/unit/lazy_data/__init__.py index 55920077f3..b463897c50 100644 --- a/lib/iris/tests/unit/lazy_data/__init__.py +++ b/lib/iris/tests/unit/lazy_data/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris._lazy_data` module.""" diff --git a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py index 91b22a3c0e..1a98c81fac 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.as_concrete_data`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py index 2222d185c3..5f9dece153 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test the function :func:`iris._lazy data.as_lazy_data`.""" # Import iris.tests first so that some things can be initialised before @@ -41,25 +42,6 @@ def test_non_default_chunks(self): (result,) = np.unique(lazy_data.chunks) self.assertEqual(result, 24) - def test_dask_chunking(self): - data = np.arange(24) - chunks = (12,) - optimum = self.patch("iris._lazy_data._optimum_chunksize") - optimum.return_value = chunks - _ = as_lazy_data(data, chunks=None, dask_chunking=True) - self.assertFalse(optimum.called) - - def test_dask_chunking_error(self): - data = np.arange(24) - chunks = (12,) - optimum = self.patch("iris._lazy_data._optimum_chunksize") - optimum.return_value = chunks - with self.assertRaisesRegex( - ValueError, - r"Dask chunking chosen, but chunks already assigned value", - ): - as_lazy_data(data, chunks=chunks, dask_chunking=True) - def test_with_masked_constant(self): masked_data = ma.masked_array([8], mask=True) masked_constant = masked_data[0] @@ -170,10 +152,7 @@ def test_default_chunks_limiting(self): limitcall_patch.call_args_list, [ mock.call( - list(test_shape), - shape=test_shape, - dtype=np.dtype("f4"), - dims_fixed=None, + list(test_shape), shape=test_shape, dtype=np.dtype("f4") ) ], ) diff --git a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py index 3b265d615d..0c10d69c16 100644 --- a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py +++ b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.co_realise_cubes`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py index a8018c67b1..45b3194f32 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.is_lazy_data`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py index 6466ab0ea2..4d627a706b 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.is_lazy_masked_data`.""" import dask.array as da diff --git a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py index 651a774c4d..49fd6ad70b 100644 --- a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py +++ b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.lazy_elementwise`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py index 1c694d292b..66c03d04c8 100644 --- a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py +++ b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.map_complete_blocks`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py b/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py index 993cb01178..9fe79a0d4c 100644 --- a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py +++ b/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.multidim_lazy_stack`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/lazy_data/test_non_lazy.py b/lib/iris/tests/unit/lazy_data/test_non_lazy.py index 3c6bb99e0a..cc4ed33ea3 100644 --- a/lib/iris/tests/unit/lazy_data/test_non_lazy.py +++ b/lib/iris/tests/unit/lazy_data/test_non_lazy.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris._lazy data.non_lazy`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/merge/__init__.py b/lib/iris/tests/unit/merge/__init__.py index 14ef96573f..c3ead61576 100644 --- a/lib/iris/tests/unit/merge/__init__.py +++ b/lib/iris/tests/unit/merge/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris._merge` module.""" diff --git a/lib/iris/tests/unit/merge/test_ProtoCube.py b/lib/iris/tests/unit/merge/test_ProtoCube.py index 80f135e108..0fca726b28 100644 --- a/lib/iris/tests/unit/merge/test_ProtoCube.py +++ b/lib/iris/tests/unit/merge/test_ProtoCube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris._merge.ProtoCube` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/pandas/__init__.py b/lib/iris/tests/unit/pandas/__init__.py index 2ee1fb1cfe..103a264839 100644 --- a/lib/iris/tests/unit/pandas/__init__.py +++ b/lib/iris/tests/unit/pandas/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.pandas` module.""" diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index 6f617439db..fd716bd7c9 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """All unit tests for the :mod:`iris.pandas` module.""" # import iris tests first so that some things can be initialised before @@ -1074,7 +1075,7 @@ def test_ancillary_variable(self): def test_3d_with_2d_coord(self): df = self._create_pandas(index_levels=3) coord_shape = df.index.levshape[:2] - coord_values = np.arange(np.prod(coord_shape)) + coord_values = np.arange(np.product(coord_shape)) coord_name = "foo" df[coord_name] = coord_values.repeat(df.index.levshape[-1]) result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) @@ -1088,7 +1089,7 @@ def test_3d_with_2d_coord(self): def test_coord_varies_all_indices(self): df = self._create_pandas(index_levels=3) coord_shape = df.index.levshape - coord_values = np.arange(np.prod(coord_shape)) + coord_values = np.arange(np.product(coord_shape)) coord_name = "foo" df[coord_name] = coord_values result = iris.pandas.as_cubes(df, aux_coord_cols=[coord_name]) @@ -1104,7 +1105,7 @@ def test_category_coord(self): # increment. df = self._create_pandas(index_levels=2) coord_shape = df.index.levshape - coord_values = np.arange(np.prod(coord_shape)) + coord_values = np.arange(np.product(coord_shape)) coord_name = "foo" # Create a repeating value along a dimension. diff --git a/lib/iris/tests/unit/plot/__init__.py b/lib/iris/tests/unit/plot/__init__.py index 7481cdafa3..f589a29e0d 100644 --- a/lib/iris/tests/unit/plot/__init__.py +++ b/lib/iris/tests/unit/plot/__init__.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.plot` module.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/_blockplot_common.py b/lib/iris/tests/unit/plot/_blockplot_common.py index e3e88304fa..455b416164 100644 --- a/lib/iris/tests/unit/plot/_blockplot_common.py +++ b/lib/iris/tests/unit/plot/_blockplot_common.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Common test code for `iris.plot.pcolor` and `iris.plot.pcolormesh`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py index 3e25f0aadb..4dfc6d7f68 100644 --- a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py +++ b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot._check_bounds_contiguity_and_mask` function.""" diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py index cfbb15cdef..633dea85c4 100644 --- a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py +++ b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot._check_geostationary_coords_and_convert function.""" @@ -36,7 +37,7 @@ def setUp(self): ) def _test(self, geostationary=True): - # Reusable test for when Geostationary is present OR absent. + # Re-usable test for when Geostationary is present OR absent. if geostationary: # A Geostationary projection WILL be processed. projection_spec = Geostationary diff --git a/lib/iris/tests/unit/plot/test__fixup_dates.py b/lib/iris/tests/unit/plot/test__fixup_dates.py index d155f30969..0abef01e41 100644 --- a/lib/iris/tests/unit/plot/test__fixup_dates.py +++ b/lib/iris/tests/unit/plot/test__fixup_dates.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot._fixup_dates` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn.py b/lib/iris/tests/unit/plot/test__get_plot_defn.py index 512dc7f0b2..c69173dc70 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot._get_plot_defn` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py index dcd8fac9e1..631f9bd24e 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot._get_plot_defn_custom_coords_picked` function.""" diff --git a/lib/iris/tests/unit/plot/test__get_plot_objects.py b/lib/iris/tests/unit/plot/test__get_plot_objects.py index fbccbe94fb..8586faa756 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_objects.py +++ b/lib/iris/tests/unit/plot/test__get_plot_objects.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot._get_plot_objects` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py index 8d4054b35a..8e2d4f226b 100644 --- a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py +++ b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.__replace_axes_with_cartopy_axes` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_contour.py b/lib/iris/tests/unit/plot/test_contour.py index 43c0564ff4..823b3270d0 100644 --- a/lib/iris/tests/unit/plot/test_contour.py +++ b/lib/iris/tests/unit/plot/test_contour.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.contour` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_contourf.py b/lib/iris/tests/unit/plot/test_contourf.py index 64ab87f879..de84e88a52 100644 --- a/lib/iris/tests/unit/plot/test_contourf.py +++ b/lib/iris/tests/unit/plot/test_contourf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.contourf` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_hist.py b/lib/iris/tests/unit/plot/test_hist.py index feef8f1062..8a74ff8701 100644 --- a/lib/iris/tests/unit/plot/test_hist.py +++ b/lib/iris/tests/unit/plot/test_hist.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.hist` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/plot/test_outline.py b/lib/iris/tests/unit/plot/test_outline.py index dc1b27487b..de59287362 100644 --- a/lib/iris/tests/unit/plot/test_outline.py +++ b/lib/iris/tests/unit/plot/test_outline.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.outline` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_pcolor.py b/lib/iris/tests/unit/plot/test_pcolor.py index 219df4d446..1cde9e8822 100644 --- a/lib/iris/tests/unit/plot/test_pcolor.py +++ b/lib/iris/tests/unit/plot/test_pcolor.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.pcolor` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_pcolormesh.py b/lib/iris/tests/unit/plot/test_pcolormesh.py index a5525770f2..f4e84e5765 100644 --- a/lib/iris/tests/unit/plot/test_pcolormesh.py +++ b/lib/iris/tests/unit/plot/test_pcolormesh.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.pcolormesh` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py index db33862a7e..1ed2da1b13 100644 --- a/lib/iris/tests/unit/plot/test_plot.py +++ b/lib/iris/tests/unit/plot/test_plot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.plot` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_points.py b/lib/iris/tests/unit/plot/test_points.py index 0d713e3d84..e1a23eff83 100644 --- a/lib/iris/tests/unit/plot/test_points.py +++ b/lib/iris/tests/unit/plot/test_points.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.points` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/plot/test_scatter.py b/lib/iris/tests/unit/plot/test_scatter.py index f3b2ec1592..c5cd9cb2f2 100644 --- a/lib/iris/tests/unit/plot/test_scatter.py +++ b/lib/iris/tests/unit/plot/test_scatter.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.plot.scatter` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/__init__.py b/lib/iris/tests/unit/quickplot/__init__.py index 1ce65d9647..471ef0f6a5 100644 --- a/lib/iris/tests/unit/quickplot/__init__.py +++ b/lib/iris/tests/unit/quickplot/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.quickplot` module.""" diff --git a/lib/iris/tests/unit/quickplot/test_contour.py b/lib/iris/tests/unit/quickplot/test_contour.py index 2f3bb1a45d..8e3db7c3e0 100644 --- a/lib/iris/tests/unit/quickplot/test_contour.py +++ b/lib/iris/tests/unit/quickplot/test_contour.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.contour` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_contourf.py b/lib/iris/tests/unit/quickplot/test_contourf.py index 55c9940821..e510e661ae 100644 --- a/lib/iris/tests/unit/quickplot/test_contourf.py +++ b/lib/iris/tests/unit/quickplot/test_contourf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.contourf` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_outline.py b/lib/iris/tests/unit/quickplot/test_outline.py index 4dd924b749..70d96372fa 100644 --- a/lib/iris/tests/unit/quickplot/test_outline.py +++ b/lib/iris/tests/unit/quickplot/test_outline.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.outline` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_pcolor.py b/lib/iris/tests/unit/quickplot/test_pcolor.py index 79f6904e12..2e559d6308 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolor.py +++ b/lib/iris/tests/unit/quickplot/test_pcolor.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.pcolor` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_pcolormesh.py b/lib/iris/tests/unit/quickplot/test_pcolormesh.py index 826f0e7121..32ae3ed716 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolormesh.py +++ b/lib/iris/tests/unit/quickplot/test_pcolormesh.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.pcolormesh` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_plot.py b/lib/iris/tests/unit/quickplot/test_plot.py index 35e1eae470..0a36a3fa4e 100644 --- a/lib/iris/tests/unit/quickplot/test_plot.py +++ b/lib/iris/tests/unit/quickplot/test_plot.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.plot` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_points.py b/lib/iris/tests/unit/quickplot/test_points.py index b28c37bf87..3810cdd343 100644 --- a/lib/iris/tests/unit/quickplot/test_points.py +++ b/lib/iris/tests/unit/quickplot/test_points.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.points` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/quickplot/test_scatter.py b/lib/iris/tests/unit/quickplot/test_scatter.py index db3e9948a0..c1cf853970 100644 --- a/lib/iris/tests/unit/quickplot/test_scatter.py +++ b/lib/iris/tests/unit/quickplot/test_scatter.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.quickplot.scatter` function.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/representation/__init__.py b/lib/iris/tests/unit/representation/__init__.py index 19824735c1..e943ad149b 100644 --- a/lib/iris/tests/unit/representation/__init__.py +++ b/lib/iris/tests/unit/representation/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris._representation` module.""" diff --git a/lib/iris/tests/unit/representation/cube_printout/__init__.py b/lib/iris/tests/unit/representation/cube_printout/__init__.py index 15e84606db..50ab3f8e45 100644 --- a/lib/iris/tests/unit/representation/cube_printout/__init__.py +++ b/lib/iris/tests/unit/representation/cube_printout/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris._representation.cube_printout` module.""" diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py index 20d5c47e01..65fb115243 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris._representation.cube_printout.CubePrintout`.""" import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/representation/cube_printout/test_Table.py b/lib/iris/tests/unit/representation/cube_printout/test_Table.py index ff9b6cf51b..e5dba52c61 100644 --- a/lib/iris/tests/unit/representation/cube_printout/test_Table.py +++ b/lib/iris/tests/unit/representation/cube_printout/test_Table.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris._representation.cube_printout.Table`.""" from iris._representation.cube_printout import Table import iris.tests as tests diff --git a/lib/iris/tests/unit/representation/cube_summary/__init__.py b/lib/iris/tests/unit/representation/cube_summary/__init__.py index 684221e6d2..c20a621ba2 100644 --- a/lib/iris/tests/unit/representation/cube_summary/__init__.py +++ b/lib/iris/tests/unit/representation/cube_summary/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris._representation.cube_summary` module.""" diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index 1280c3b38f..d81f680df5 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :class:`iris._representation.cube_summary.CubeSummary`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/test_Future.py b/lib/iris/tests/unit/test_Future.py index 00f6b82519..f0c161b0c4 100644 --- a/lib/iris/tests/unit/test_Future.py +++ b/lib/iris/tests/unit/test_Future.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.Future` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/test_sample_data_path.py b/lib/iris/tests/unit/test_sample_data_path.py index aff2c1088f..ebf3b8108c 100644 --- a/lib/iris/tests/unit/test_sample_data_path.py +++ b/lib/iris/tests/unit/test_sample_data_path.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for :func:`iris.sample_data_path` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/tests/__init__.py b/lib/iris/tests/unit/tests/__init__.py index b0c801b816..b8d27d34d3 100644 --- a/lib/iris/tests/unit/tests/__init__.py +++ b/lib/iris/tests/unit/tests/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.tests` package.""" diff --git a/lib/iris/tests/unit/tests/stock/__init__.py b/lib/iris/tests/unit/tests/stock/__init__.py index ad31134ad4..f91390c2b3 100644 --- a/lib/iris/tests/unit/tests/stock/__init__.py +++ b/lib/iris/tests/unit/tests/stock/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.tests.stock` module.""" diff --git a/lib/iris/tests/unit/tests/stock/test_netcdf.py b/lib/iris/tests/unit/tests/stock/test_netcdf.py index eb1c289c37..54d7b895cc 100644 --- a/lib/iris/tests/unit/tests/stock/test_netcdf.py +++ b/lib/iris/tests/unit/tests/stock/test_netcdf.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.tests.stock.netcdf` module.""" import shutil diff --git a/lib/iris/tests/unit/tests/test_IrisTest.py b/lib/iris/tests/unit/tests/test_IrisTest.py index ef895e45b6..10de2a7760 100644 --- a/lib/iris/tests/unit/tests/test_IrisTest.py +++ b/lib/iris/tests/unit/tests/test_IrisTest.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.tests.IrisTest` class.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/time/__init__.py b/lib/iris/tests/unit/time/__init__.py index fdbb082434..3483b92e62 100644 --- a/lib/iris/tests/unit/time/__init__.py +++ b/lib/iris/tests/unit/time/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.time` module.""" diff --git a/lib/iris/tests/unit/time/test_PartialDateTime.py b/lib/iris/tests/unit/time/test_PartialDateTime.py index 8223f4a518..cfffafea2c 100644 --- a/lib/iris/tests/unit/time/test_PartialDateTime.py +++ b/lib/iris/tests/unit/time/test_PartialDateTime.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the `iris.time.PartialDateTime` class.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/__init__.py b/lib/iris/tests/unit/util/__init__.py index ce94a18f4e..9aed566a19 100644 --- a/lib/iris/tests/unit/util/__init__.py +++ b/lib/iris/tests/unit/util/__init__.py @@ -1,5 +1,6 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Unit tests for the :mod:`iris.util` module.""" diff --git a/lib/iris/tests/unit/util/test__coord_regular.py b/lib/iris/tests/unit/util/test__coord_regular.py index bd9f8f3430..a5e9aca9ed 100644 --- a/lib/iris/tests/unit/util/test__coord_regular.py +++ b/lib/iris/tests/unit/util/test__coord_regular.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test elements of :mod:`iris.util` that deal with checking coord regularity. Specifically, this module tests the following functions: diff --git a/lib/iris/tests/unit/util/test__is_circular.py b/lib/iris/tests/unit/util/test__is_circular.py index 67099f49d6..e67eb38294 100644 --- a/lib/iris/tests/unit/util/test__is_circular.py +++ b/lib/iris/tests/unit/util/test__is_circular.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util._is_circular`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test__mask_array.py b/lib/iris/tests/unit/util/test__mask_array.py index 2245576de9..91a5aca1b4 100644 --- a/lib/iris/tests/unit/util/test__mask_array.py +++ b/lib/iris/tests/unit/util/test__mask_array.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util._mask_array""" import dask.array as da diff --git a/lib/iris/tests/unit/util/test__slice_data_with_keys.py b/lib/iris/tests/unit/util/test__slice_data_with_keys.py index 9c93041521..061a2f5b37 100644 --- a/lib/iris/tests/unit/util/test__slice_data_with_keys.py +++ b/lib/iris/tests/unit/util/test__slice_data_with_keys.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.util._slice_data_with_keys`. diff --git a/lib/iris/tests/unit/util/test_array_equal.py b/lib/iris/tests/unit/util/test_array_equal.py index 38b9652443..77631907a1 100644 --- a/lib/iris/tests/unit/util/test_array_equal.py +++ b/lib/iris/tests/unit/util/test_array_equal.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.array_equal`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_broadcast_to_shape.py b/lib/iris/tests/unit/util/test_broadcast_to_shape.py index c060967edf..3df1634ba5 100644 --- a/lib/iris/tests/unit/util/test_broadcast_to_shape.py +++ b/lib/iris/tests/unit/util/test_broadcast_to_shape.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.broadcast_to_shape`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_column_slices_generator.py b/lib/iris/tests/unit/util/test_column_slices_generator.py index fbb5a8f588..899c6b98ba 100644 --- a/lib/iris/tests/unit/util/test_column_slices_generator.py +++ b/lib/iris/tests/unit/util/test_column_slices_generator.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.column_slices_generator`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py index 65e3dec93b..ec8f9904f1 100644 --- a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py +++ b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.demote_dim_coord_to_aux_coord`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_describe_diff.py b/lib/iris/tests/unit/util/test_describe_diff.py index 74bd71389e..0bb13cab94 100644 --- a/lib/iris/tests/unit/util/test_describe_diff.py +++ b/lib/iris/tests/unit/util/test_describe_diff.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.describe_diff`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py index de5308a7fa..13aa1e2af4 100644 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ b/lib/iris/tests/unit/util/test_equalise_attributes.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Unit tests for the :func:`iris.util.equalise_attributes` function. @@ -13,13 +14,8 @@ import numpy as np -from iris.coords import AuxCoord -from iris.cube import Cube, CubeAttrsDict +from iris.cube import Cube import iris.tests.stock -from iris.tests.unit.common.metadata.test_CubeMetadata import ( - _TEST_ATTRNAME, - make_attrsdict, -) from iris.util import equalise_attributes @@ -157,111 +153,5 @@ def test_complex_somecommon(self): ) -class TestSplitattributes: - """ - Extra testing for cases where attributes differ specifically by type - - That is, where there is a new possibility of 'mismatch' due to the newer "typing" - of attributes as global or local. - - Specifically, it is now possible that although - "cube1.attributes.keys() == cube2.attributes.keys()", - AND "cube1.attributes[k] == cube2.attributes[k]" for all keys, - YET STILL (possibly) "cube1.attributes != cube2.attributes" - """ - - @staticmethod - def _sample_splitattrs_cube(attr_global_local): - attrs = CubeAttrsDict( - globals=make_attrsdict(attr_global_local[0]), - locals=make_attrsdict(attr_global_local[1]), - ) - return Cube([0], attributes=attrs) - - @staticmethod - def check_equalised_result(cube1, cube2): - equalise_attributes([cube1, cube2]) - # Note: "X" represents a missing attribute, as in test_CubeMetadata - return [ - ( - cube1.attributes.globals.get(_TEST_ATTRNAME, "X") - + cube1.attributes.locals.get(_TEST_ATTRNAME, "X") - ), - ( - cube2.attributes.globals.get(_TEST_ATTRNAME, "X") - + cube2.attributes.locals.get(_TEST_ATTRNAME, "X") - ), - ] - - def test__global_and_local__bothsame(self): - # A trivial case showing that the original globals+locals are both preserved. - cube1 = self._sample_splitattrs_cube("AB") - cube2 = self._sample_splitattrs_cube("AB") - result = self.check_equalised_result(cube1, cube2) - assert result == ["AB", "AB"] - - def test__globals_different(self): - cube1 = self._sample_splitattrs_cube("AX") - cube2 = self._sample_splitattrs_cube("BX") - result = self.check_equalised_result(cube1, cube2) - assert result == ["XX", "XX"] - - def test__locals_different(self): - cube1 = self._sample_splitattrs_cube("XA") - cube2 = self._sample_splitattrs_cube("XB") - result = self.check_equalised_result(cube1, cube2) - assert result == ["XX", "XX"] - - def test__oneglobal_onelocal__different(self): - cube1 = self._sample_splitattrs_cube("AX") - cube2 = self._sample_splitattrs_cube("XB") - result = self.check_equalised_result(cube1, cube2) - assert result == ["XX", "XX"] - - # This case fails without the split-attributes fix. - def test__oneglobal_onelocal__same(self): - cube1 = self._sample_splitattrs_cube("AX") - cube2 = self._sample_splitattrs_cube("XA") - result = self.check_equalised_result(cube1, cube2) - assert result == ["XX", "XX"] - - def test__sameglobals_onelocal__different(self): - cube1 = self._sample_splitattrs_cube("AB") - cube2 = self._sample_splitattrs_cube("AX") - result = self.check_equalised_result(cube1, cube2) - assert result == ["XX", "XX"] - - # This case fails without the split-attributes fix. - def test__sameglobals_onelocal__same(self): - cube1 = self._sample_splitattrs_cube("AA") - cube2 = self._sample_splitattrs_cube("AX") - result = self.check_equalised_result(cube1, cube2) - assert result == ["XX", "XX"] - - # This case fails without the split-attributes fix. - def test__differentglobals_samelocals(self): - cube1 = self._sample_splitattrs_cube("AC") - cube2 = self._sample_splitattrs_cube("BC") - result = self.check_equalised_result(cube1, cube2) - assert result == ["XX", "XX"] - - -class TestNonCube: - # Just to assert that we can do operations on non-cube components (like Coords), - # in fact effectively, anything with a ".attributes". - # Even though the docstring does not admit this, we test it because we put in - # special code to preserve it when adding the split-attribute handling. - def test(self): - attrs = [1, 1, 2] - coords = [ - AuxCoord([0], attributes={"a": attr, "b": "all_the_same"}) - for attr in attrs - ] - equalise_attributes(coords) - assert all( - coord.attributes == {"b": "all_the_same"} for coord in coords - ) - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index c27f4f1dcb..cff878a294 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Test function :func:`iris.util.test_file_is_newer`. diff --git a/lib/iris/tests/unit/util/test_find_discontiguities.py b/lib/iris/tests/unit/util/test_find_discontiguities.py index 6965541320..9e043c71bd 100644 --- a/lib/iris/tests/unit/util/test_find_discontiguities.py +++ b/lib/iris/tests/unit/util/test_find_discontiguities.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.find_discontiguities""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_guess_coord_axis.py b/lib/iris/tests/unit/util/test_guess_coord_axis.py deleted file mode 100644 index d946565196..0000000000 --- a/lib/iris/tests/unit/util/test_guess_coord_axis.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Test function :func:`iris.util.guess_coord_axis`.""" - -import pytest - -from iris.util import guess_coord_axis - - -class TestGuessCoord: - @pytest.mark.parametrize( - "coordinate, axis", - [ - ("longitude", "X"), - ("grid_longitude", "X"), - ("projection_x_coordinate", "X"), - ("latitude", "Y"), - ("grid_latitude", "Y"), - ("projection_y_coordinate", "Y"), - ], - ) - def test_coord(self, coordinate, axis, sample_coord): - sample_coord.standard_name = coordinate - assert guess_coord_axis(sample_coord) == axis - - @pytest.mark.parametrize( - "units, axis", - [ - ("hPa", "Z"), - ("days since 1970-01-01 00:00:00", "T"), - ], - ) - def test_units(self, units, axis, sample_coord): - sample_coord.units = units - assert guess_coord_axis(sample_coord) == axis - - @pytest.mark.parametrize( - "ignore_axis, result", - [ - (True, None), - (False, "X"), - ], - ) - def test_ignore_axis(self, ignore_axis, result, sample_coord): - sample_coord.standard_name = "longitude" - sample_coord.ignore_axis = ignore_axis - - assert guess_coord_axis(sample_coord) == result diff --git a/lib/iris/tests/unit/util/test_mask_cube.py b/lib/iris/tests/unit/util/test_mask_cube.py index 7237f0491c..0123d0cca5 100644 --- a/lib/iris/tests/unit/util/test_mask_cube.py +++ b/lib/iris/tests/unit/util/test_mask_cube.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.mask_cube""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index 197c06e449..a6374f97ad 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.new_axis`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py index 8ad9cbf4c2..0e1e56fee5 100644 --- a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py +++ b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.promote_aux_coord_to_dim_coord`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_reverse.py b/lib/iris/tests/unit/util/test_reverse.py index b6da468e7f..7d9a669a9d 100644 --- a/lib/iris/tests/unit/util/test_reverse.py +++ b/lib/iris/tests/unit/util/test_reverse.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.reverse`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_rolling_window.py b/lib/iris/tests/unit/util/test_rolling_window.py index 533e5d5633..3644da9c9c 100644 --- a/lib/iris/tests/unit/util/test_rolling_window.py +++ b/lib/iris/tests/unit/util/test_rolling_window.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.rolling_window`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_squeeze.py b/lib/iris/tests/unit/util/test_squeeze.py index cb4b55c1e6..b5f0a91b99 100644 --- a/lib/iris/tests/unit/util/test_squeeze.py +++ b/lib/iris/tests/unit/util/test_squeeze.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.squeeze`.""" # Import iris.tests first so that some things can be initialised before diff --git a/lib/iris/tests/unit/util/test_unify_time_units.py b/lib/iris/tests/unit/util/test_unify_time_units.py index 2d7a3b6d64..8bee046dad 100644 --- a/lib/iris/tests/unit/util/test_unify_time_units.py +++ b/lib/iris/tests/unit/util/test_unify_time_units.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """Test function :func:`iris.util.array_equal`.""" # import iris tests first so that some things can be initialised before diff --git a/lib/iris/time.py b/lib/iris/time.py index 6ba85a0051..51aac3d46d 100644 --- a/lib/iris/time.py +++ b/lib/iris/time.py @@ -1,17 +1,19 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Time handling. -"""Time handling.""" +""" import functools @functools.total_ordering class PartialDateTime: - """Allow partial comparisons against datetime-like objects. - + """ A :class:`PartialDateTime` object specifies values for some subset of the calendar/time fields (year, month, hour, etc.) for comparing with :class:`datetime.datetime`-like instances. @@ -43,7 +45,7 @@ class PartialDateTime: #: A dummy value provided as a workaround to allow comparisons with #: :class:`datetime.datetime`. #: See http://bugs.python.org/issue8005. - #: NB. It doesn't even matter what this value is. + # NB. It doesn't even matter what this value is. timetuple = None def __init__( @@ -56,28 +58,20 @@ def __init__( second=None, microsecond=None, ): - """Allow partial comparisons against datetime-like objects. - - Parameters - ---------- - year : int - The year number as an integer, or None. - month : int - The month number as an integer, or None. - day : int - The day number as an integer, or None. - hour : int - The hour number as an integer, or None. - minute : int - The minute number as an integer, or None. - second : int - The second number as an integer, or None. - microsecond : int - The microsecond number as an integer, or None. - - Examples - -------- - To select any days of the year after the 3rd of April: + """ + Allows partial comparisons against datetime-like objects. + + Args: + + * year (int): + * month (int): + * day (int): + * hour (int): + * minute (int): + * second (int): + * microsecond (int): + + For example, to select any days of the year after the 3rd of April: >>> from iris.time import PartialDateTime >>> import datetime @@ -92,12 +86,20 @@ def __init__( False """ + + #: The year number as an integer, or None. self.year = year + #: The month number as an integer, or None. self.month = month + #: The day number as an integer, or None. self.day = day + #: The hour number as an integer, or None. self.hour = hour + #: The minute number as an integer, or None. self.minute = minute + #: The second number as an integer, or None. self.second = second + #: The microsecond number as an integer, or None. self.microsecond = microsecond def __repr__(self): diff --git a/lib/iris/util.py b/lib/iris/util.py index 10a58fdef0..c040b72b54 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Miscellaneous utility functions. @@ -257,17 +258,10 @@ def guess_coord_axis(coord): This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. - The ``guess_coord_axis`` behaviour can be skipped by setting the coordinate property ``ignore_axis`` - to ``False``. - """ - axis = None - if hasattr(coord, "ignore_axis") and coord.ignore_axis is True: - return axis - - elif coord.standard_name in ( + if coord.standard_name in ( "longitude", "grid_longitude", "projection_x_coordinate", @@ -2071,50 +2065,24 @@ def equalise_attributes(cubes): See more at :doc:`/userguide/real_and_lazy_data`. """ - # deferred import to avoid circularity problem - from iris.common._split_attribute_dicts import ( - _convert_splitattrs_to_pairedkeys_dict, - ) - - cube_attrs = [cube.attributes for cube in cubes] - - # Convert all the input dictionaries to ones with 'paired' keys, so each key - # becomes a pair, ('local'/'global', attribute-name), making them specific to each - # "type", i.e. global or local. - # This is needed to ensure that afterwards all cubes will have identical - # attributes, E.G. it treats an attribute which is global on one cube and local - # on another as *not* the same. This is essential to its use in making merges work. - # - # This approach does also still function with "ordinary" dictionaries, or - # :class:`iris.common.mixin.LimitedAttributeDict`, though somewhat inefficiently, - # so the routine works on *other* objects bearing attributes, i.e. not just Cubes. - # That is also important since the original code allows that (though the docstring - # does not admit it). - cube_attrs = [ - _convert_splitattrs_to_pairedkeys_dict(dic) for dic in cube_attrs - ] - + removed = [] # Work out which attributes are identical across all the cubes. - common_keys = list(cube_attrs[0].keys()) + common_keys = list(cubes[0].attributes.keys()) keys_to_remove = set(common_keys) - for attrs in cube_attrs[1:]: - cube_keys = list(attrs.keys()) + for cube in cubes[1:]: + cube_keys = list(cube.attributes.keys()) keys_to_remove.update(cube_keys) common_keys = [ key for key in common_keys - if (key in cube_keys and np.all(attrs[key] == cube_attrs[0][key])) + if ( + key in cube_keys + and np.all(cube.attributes[key] == cubes[0].attributes[key]) + ) ] keys_to_remove.difference_update(common_keys) - # Convert back from the resulting 'paired' keys set, extracting just the - # attribute-name parts, as a set of names to be discarded. - # Note: we don't care any more what type (global/local) these were : we will - # simply remove *all* attributes with those names. - keys_to_remove = set(key_pair[1] for key_pair in keys_to_remove) - - # Remove all the non-matching attributes. - removed = [] + # Remove all the other attributes. for cube in cubes: deleted_attributes = { key: cube.attributes.pop(key) @@ -2122,7 +2090,6 @@ def equalise_attributes(cubes): if key in cube.attributes } removed.append(deleted_attributes) - return removed diff --git a/pyproject.toml b/pyproject.toml index 88b39f1601..4f9ade1351 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ authors = [ classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", - "License :: OSI Approved :: BSD License", + "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", "Operating System :: MacOS", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", @@ -48,7 +48,7 @@ keywords = [ "ugrid", "visualisation", ] -license = {text = "BSD-3-Clause"} +license = {text = "LGPL-3.0-or-later"} name = "scitools-iris" requires-python = ">=3.9" @@ -59,7 +59,7 @@ Documentation = "https://scitools-iris.readthedocs.io/en/stable/" Issues = "https://github.com/SciTools/iris/issues" [tool.setuptools] -license-files = ["LICENSE"] +license-files = ["COPYING", "COPYING.LESSER"] zip-safe = false [tool.setuptools.dynamic] diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 631227ca51..9c5ea32d8e 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,50 +1,61 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 261e2a16d1b94dedb72e8d7119ea263c3e0f5a5c4eb2730980eda055cd4683ec +# input_hash: 90bea26e2629b01270a880c650dfec7b34c38d9b6c6ddb4f8c9fee205d0e1ad6 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda#26322ec5d7712c3ded99dd656142b8ce +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff +https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -53,177 +64,223 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe -https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda#f3a8c32aa764c3e7188b4b810fc9d6ce -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e +https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.12-hd12c33a_0_cpython.conda#eb6f1df105f37daedd6dca78523baa75 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 -https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_1.conda#1f95722c94f00b69af69a066c7433714 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py310hd8f1fbe_9.conda#e2047ad2af52c01845f58b580c6cbd5c +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py310hc6cd4ac_0.conda#7f987c519edb4df04d21a282678368cf +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py310hc6cd4ac_0.conda#b903ef2ce154e97f621fe30d999227ad +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 +https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda#b8d67603d43b23ce7e988a5d81a7ab79 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1.tar.bz2#ad5647e517ba68e2868ef2e6e6ff7723 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda#b74e07a054c479e45a83a83fc5be713c -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py310hd41b1e2_0.conda#dc5263dcaa1347e5a456ead3537be27d +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_0.conda#5597d9f9778af6883ae64f0e7d39416c +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 -https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h2372a71_1.conda#cb25177acf28cc35cfa6c1ac1c679e22 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py310ha4c1d20_0.conda#188e72aa313da668464e35309e9a32b0 +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h1fa729e_0.conda#b0f0a014fc04012c05f39df15fe270ce https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb +https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py310h2372a71_0.conda#b631b889b0b4bc2fca7b8b977ca484b2 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_1.conda#bb010e368de4940771368bc3dc4c63e7 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda#b23e0147fa5f7a9380e06334c7266ad5 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py310h2372a71_0.conda#1c510e74c87dc9b8fe1f7f9e8dbcef96 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a +https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b +https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py310h2372a71_0.conda#33c03cd5711885c920ddff676fb84f98 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py310h2372a71_0.conda#c2dcff257e040bcda00e2a30a9d85333 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py310hd41b1e2_0.conda#684399f9ddc0b9d6f3b6164f6107098e +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py310h2372a71_0.conda#13df1c4ea94f2e3326b15da1999e5999 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_0.conda#4efe3a76fe724778a7235a2046b53233 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py310h2372a71_0.conda#f939fe2998c888a77b310926a6c666f3 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py310h01dd4db_0.conda#95d87a906d88b5824d7d36eeef091dba -https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py310h582fbeb_0.conda#adcc7ea52e4d39d0a93f6a2ef36c7fd4 +https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h056c13c_1.conda#32d925cfd330e0cbb72b7618558a44e8 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py310hc6cd4ac_0.conda#be1a7e420b7bac4ee02353d0e3161918 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h278f3c1_0.conda#f2d3f2542a2467f479e809ac7b901ac2 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py310hb13e2d6_0.conda#d3147cfbf72d6ae7bba10562208f6def -https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310h32c33b7_4.conda#124211262afed349430d9a3de6b51e8f +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py310hf38f957_0.conda#9b55c9041c5a7f80f184a2cb05ec9663 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py310h7cbd5c2_1.conda#11e0099d4571b4974c04386e4ce679ed +https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py310h24ef57a_1.conda#a689e86d7bbab67f889fc384aa72b088 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py310hc6cd4ac_4.conda#345beb10601d5360a15c033d68165a4f https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py310h1f7b6fc_0.conda#31beda75384647959d5792a1a7dc571a -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda#85d2aaa7af046528d339da1e813c3a9f -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py310h2372a71_1.conda#dfcf64f67961eb9686676f96fdb4b4d1 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py310hcc13569_0.conda#30a39c1064e5efc578d83c2a5f7cd749 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h1f7b6fc_1.conda#be6f0382440ccbf9fb01bb19ab1f1fc0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py310hb13e2d6_0.conda#f0063b2885bfae11324a00a693f88781 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310hc3e127f_1.conda#fdaca8d27b3af78d617521eb37b1d055 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310h7cbd5c2_0.conda#7bfbace0788f477da1c26e10a358692d +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb +https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_4.conda#0ca55ca20891d393846695354b32ebc5 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py310h62c0568_0.conda#3cbbc7d0b54df02c9a006d3de14911d9 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py310hba70d50_100.conda#e19392760c7e4da3b9cb0ee5bf61bc4b -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h1f7b6fc_1.conda#857b828a13cdddf568958f7575b25b22 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310hcc13569_1.conda#31ef447724fb19066a9d00a660dab1bd -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310h278f3c1_0.conda#65d42fe14f56d55df8e93d67fa14c92d +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py310ha4c1d20_0.conda#300d3b434872eb84965864f0fcc5b5da +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py310h04931ad_4.conda#db878a0696f9a7980171fd3cf29cca22 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py310hff52083_0.conda#7e454b4a61754714a4a4d183641374da +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py310h6f5dce6_101.conda#0d50bea104512f2728676a8bff8840d3 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index d85b20ee07..42e5224fe1 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,50 +1,61 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 51321f928e4883d91354d6e049004532b17675ee9629854a199b34e0854e0bf9 +# input_hash: b73fe0fbcf5caf5854030c02a6233bae6e4061e9f4175a5d8810c6bb3d7701b2 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda#d786502c97404c94d7d58d258a445a65 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-3_cp311.conda#c2e2630ddb68cf52eec74dc7dfab20b5 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff +https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -53,176 +64,222 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe -https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e +https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.4-hab00c5b_0_cpython.conda#1c628861a2a126b9fc9363ca1b7d014e +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 -https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_1.conda#cce9e7c3f1c307f2a5fb08a2922d6164 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py311ha362b79_9.conda#ced5340f5dc6cff43a80deac8d0e398f +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py311hb755f60_0.conda#88cc84238dda72e11285d9cfcbe43e51 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py311hb755f60_0.conda#257dfede48699e2e6372528d08399e5a +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 +https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1.tar.bz2#46d451f575392c01dc193069bd89766d +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py311h9547e67_0.conda#3ac85c6c226e2a2e4b17864fc2ca88ff +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_0.conda#9904dc4adb5d547cb21e136f98cb24b0 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 -https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h459d7ec_1.conda#490d7fa8675afd1aa6f1b2332d156a45 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py311h64a7726_0.conda#71fd6f1734a0fa64d8f852ae7156ec45 +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb +https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py311h459d7ec_0.conda#60b5332b3989fda37884b92c7afd6a91 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_1.conda#52719a74ad130de8fb5d047dc91f247a -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py311h2582759_0.conda#dfcc3e6e30d6ec2b2bb416fcd8ff4dc1 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.conda#a700fcb5cedd3e72d0c75d095c7a6eda -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py311h459d7ec_0.conda#12b1c374ee90a1aa11ea921858394dc8 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a +https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b +https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py311h459d7ec_0.conda#5b24692ece82f89e5cb9a469d9619731 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py311h9547e67_0.conda#daf3f23397ab2265d0cdfa339f3627ba +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py311h459d7ec_0.conda#3c2c65575c28b23afc5e4ff721a2fc9f +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_0.conda#5c416db47b7816e437eaf0d46e5c3a3d +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py311h459d7ec_0.conda#8c1ac2c00995248898220c4c1a9d81ab +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py311ha6c5da5_0.conda#83a988daf5c49e57f7d2086fb6781fe8 -https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py311h0b84326_0.conda#4b24acdc1fbbae9da03147e7d2cf8c8a +https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py311hb755f60_0.conda#17d25ab64a32872b349579fdb07bbdb2 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py311h64a7726_0.conda#fd2f142dcd680413b5ede5d0fb799205 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_4.conda#75d504c6787edc377ebdba087a26a61b +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py311h54ef318_0.conda#2631a9e423855fb586c05f8a5ee8b177 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py311h320fe9a_1.conda#5f92f46bd33917832a99d1660b4075ac +https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py311ha169711_1.conda#92633556d37e88ce45193374d408072c +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_4.conda#3cff4c98f775ff6439b95bb7917702e9 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py311h459d7ec_1.conda#45b8d355bbcdd27588c2d266bcfdff84 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py311h320fe9a_0.conda#3ea3486e16d559dfcb539070ed330a1e -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py311h64a7726_0.conda#9ac5334f1b5ed072d3dbc342503d7868 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311h2032efe_1.conda#4ba860ff851768615b1a25b788022750 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_0.conda#1271b2375735e2aaa6d6770dbe2ad087 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb +https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_4.conda#1e105c1a8ea2163507726144b401eb1b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py311h54ef318_0.conda#9f80753bc008bfc9b95f39d9ff9f1694 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py311he8ad708_100.conda#597b1ad6cb7011b7561c20ea30295cae -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_1.conda#cd36a89a048ad2bcc6d8b43f648fb1d0 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_1.conda#10d1806e20da040c58c36deddf51c70c -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py311h64a7726_0.conda#356da36102fc1eeb8a81e6d79e53bc7e +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_4.conda#afe5363b88d2e97266063558a6599bd0 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py311h38be061_0.conda#c056ffab165096669389e5a4eea4dc4d +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311h9a7c333_101.conda#1dc70c7c3352c0ff1f861d866860db37 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index fa5fa80250..e598fba992 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,50 +1,61 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: f50dc073e5fb2216547509366957a7e99607a06a604840563bff4dd4b5daedcb +# input_hash: a96712105b515671c42bd403fde393d6f10f99a02267d05c771ab9ca88f64093 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-4_cp39.conda#bfe4b3259a8ac6cdf0037752904da6a7 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.22.1-hd590300_0.conda#8430bd266c7b2cfbda403f7585d5ee86 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff +https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_0.conda#68223671a2b68cdf7241eb4679ab2dd4 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda#e5ac5227582d6c83ccf247288c0eb095 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -53,176 +64,223 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xf86vidmodeproto-2.3.1-h7f98852_1002.tar.bz2#3ceea9668625c18f19530de98b15d5b0 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe -https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.6-h232c23b_0.conda#427a3e59d66cb5d145020bd9c6493334 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.33-hf1915f5_2.conda#a55ff0ed12efd86cf3a3dfb750adb950 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-h783c2da_1.conda#70052d6c1e84643e30ffefb21ab6950f -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-hfc55251_7.conda#32ae18eb2a687912fc9e92a501c0a11b +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda#c6f951789c888f7bbd2dd6858eab69de +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-h5cf9203_2.conda#dbfb446bd165f61f9c82aed9188e297a +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_0.conda#8ad377fb60abab446a9f02c62b3c2190 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_2.conda#b2f09078f50b9e859aca3f0dc1cc8b7e +https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 +https://conda.anaconda.org/conda-forge/linux-64/python-3.9.17-h0755675_0_cpython.conda#384886ac3580bba3541ce65c992eb192 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-hd590300_1.conda#9bfac7ccd94d54fd21a0501296d60424 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h8ee46fc_1.conda#632413adcd8bc16b515cab87a2932913 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-hd590300_1.conda#e995b155d938b6779da6ace6c6b13816 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h8ee46fc_1.conda#90108a432fb5c6150ccfee3f03388656 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda#7590b76c3d11d21caa44f3fc38ac584a https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 -https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_1.conda#c48418c8b35f1d59ae9ae1174812b40a -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py39h5a03fae_9.conda#d1601752c6f47af7bedf838be3d8ca6b +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda#7f3dbc9179b4dde7da98dfb151d0ad22 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda#313516e9a4b08b12dfb1e1cd390a96e3 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.6-unix_pyh707e725_0.conda#64dbb3b205546691a61204d1cfb208e3 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py39h3d6467e_0.conda#bfde3cf098e298b81d1c1cbc9c79ab59 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.0-py39h3d6467e_0.conda#3d700ccea39ca04cb8b6210ac653e0b1 +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.2-pyhd8ed1ab_0.conda#de4cb3384374e1411f0454edcf546cdb https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.2-pyhd8ed1ab_0.conda#53522ec72e6adae42bd373ef58357230 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.76.4-hfc55251_0.conda#76ac435b8668f636a39fcb155c3543fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 +https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_1.conda#c9f74d717e5a2847a9f8b779c54130f2 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1.tar.bz2#41679a052a8ce841c74df1ebc802e411 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-16.0.6-default_h4d60ac6_1.conda#d4f1b86334951062797b483a01b0c765 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py39h7633fee_0.conda#f668e146a2ed03a4e62ffbb98b3115fb +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_0.conda#9c858d105816f454c6b64f3e19184b60 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 -https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39hd1e30aa_1.conda#c2e412b0f11e5983bcfc35d9beb91ecb +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py39h6183b62_0.conda#f1c358d06344bd7f9a293f9af4b9b8fc +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.2.0-pyhd8ed1ab_0.conda#7263924c642d22e311d9e59b839f1b33 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39h72bdee0_0.conda#1d54d3a75c3192ab7655d9c3d16809f1 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb +https://conda.anaconda.org/conda-forge/noarch/pygments-2.16.1-pyhd8ed1ab_0.conda#40e5cb18165466773619e5c963f00a7b +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py39hd1e30aa_0.conda#756cb152772a225587a05ca0ec68fc08 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_1.conda#37218233bcdc310e4fde6453bc1b40d8 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.0.0-pyhd8ed1ab_0.conda#5a7739d0f57ee64133c9d32e6507c46d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-2.0.0-pyhd8ed1ab_0.conda#f5580336fe091d46f9a2ea97da044550 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.8.0-pyha770c72_0.conda#5b1be40a26d10a06f6d4f1f9e19fa0c7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda#1da984bbb6e765743e13388ba7b7b2c8 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py39hd1e30aa_0.conda#da334eecb1ea2248e28294c49e6f6d89 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda#c39d6a09fe819de4951c2642629d9115 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.1-pyhd8ed1ab_0.conda#8f467ba2db2b5470d297953d9c1f9c7d +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h8ee46fc_1.conda#9d7bcddf49cbf727730af10e71022c73 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.39-hd590300_0.conda#d88c7fc8a11858fb14761832e4da1954 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a +https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda#2da0451b54c4563c32490cb1b7cf68a1 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b +https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.45.1-py39hd1e30aa_0.conda#616bc0b442acefebdbe97c7b885d771e -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda#2de6a9bc8083b49f09b2f6eb28d3ba3c +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.0-py39h7633fee_0.conda#54e6f32e448fdc273606011f0940d076 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_0.conda#434246edfc30e20c0847d4c2caff0a53 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.42.0-py39hd1e30aa_0.conda#03e44d84ea9dd2432a633407401e5688 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.4-hfc55251_0.conda#dbcec5fd9c6c8be24b23575048755a59 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.1-nompi_h4f84152_100.conda#ff9ae10aa224826c07da7ef26cb0b717 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.0.1-pyhd8ed1ab_0.conda#d978c61aa5fc2c69380d53ad56b5ae86 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 +https://conda.anaconda.org/conda-forge/linux-64/libclang-16.0.6-default_h1cdf331_1.conda#af08bc8704b09630241c50bd9fc3de4a +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h5d7e998_3.conda#c91ea308d7bf70b62ddda568478aa03b +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py39had0adad_0.conda#eeaa413fddccecb2ab7f747bdb55b07f -https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py39haaeba84_0.conda#f97a95fab7c69678ebf6b57396b1323e +https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda#e2783aa3f9235225eec92f9081c5b801 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_4.conda#8f349ca16d30950aa00870484d9d30c4 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.0-pyhd8ed1ab_0.conda#3cfe9b9e958e7238a386933c75d190db https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hf1c3bca_1.conda#ae6bfe65e81d9b59a71cc01a2858650f +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.11-py39h3d6467e_0.conda#4eaef850715aff114e2126a2f1a7b1f0 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda#f96688577f1faa58096d06a45136afa2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda#18badd8fa3648d1beb1fcc7f2e0f756e +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h0f8d45d_0.conda#180d4312005bc93f257e2997a8ee41cb +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.3-h977cf35_1.conda#410ed3b168e5a139d12ebaf4143072cd +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.0.1-pyhd8ed1ab_0.conda#54661981fd331e20847d8a49543dd9af https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda#a19fa6cacf80c8a366572853d5890eb4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py39h474f0d3_0.conda#459a58eda3e74dd5e3d596c618e7f20a -https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39hce394fd_4.conda#4b6e79000ec3a495f429b2c1092ed63b -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py39h40cae4c_1.conda#cfe677f02e507f76d6767379e4ff09a9 +https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda#0809187ef9b89a3d94a5c24d13936236 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.0-py39h5ed0f51_1.conda#9c455b3b3b55f13b2094932740cd3efb +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_4.conda#b83a218fa97e9963c858d0db651a7506 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0.conda#3b8ef3a2d80f3d89d0ae7e3c975e6c57 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.7-pyhd8ed1ab_0.conda#db990278c2c00b268eed778de44f6057 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py39h44dd56e_0.conda#baea2f5dfb3ab7b1c836385d2e1daca7 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.11.0-pyhd8ed1ab_0.conda#3bf8f5c3fbab9e0cfffdf5914f021854 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.32-pyhd8ed1ab_0.conda#3ef8e9bab1bfaf900bb0a5db8c0c742c -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py39hd1e30aa_1.conda#ca63612907462c8e36edcc9bbacc253e -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py39hddac248_0.conda#961b398d8c421a3752e26f01f2dcbdac -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py39h474f0d3_0.conda#4b401c1516417b4b14aa1249d2f7929d -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h6404dd3_1.conda#05623249055d99c51cde021b525611db +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.8.0-pyhd8ed1ab_0.conda#160a92928fc4a0ca40a64b586a2cf671 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.3-h938bd60_1.conda#1f317eb7f00db75f4112a07476345376 +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.26-pyhd8ed1ab_0.conda#1ca86f154e13f4aa20b48e20d6bbf924 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.2-py39h0126182_0.conda#61cee808ff7830fcceeb4f336cc738b1 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb +https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_4.conda#81310d21bf9d91754c1220c585bb72d6 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.11.0-pyhd8ed1ab_0.conda#a1ee8e3043eee1649f98704ea3e6feae -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.24.2-pyhd8ed1ab_0.conda#a218f3be8ab6185a475c8168a86e18ae +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39h40cae4c_0.conda#24b4bf92e26a46217e37e5928927116b +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.8.0-pyhd8ed1ab_0.conda#974b4a00b0e100e341cd9f179b05f574 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py39he9076e7_0.conda#6085411aa2f0b2b801d3b46e1d3b83c5 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py39h4282601_100.conda#d2809fbf0d8ae7b8ca92c456cb44a7d4 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.5.0-pyha770c72_0.conda#964e3d762e427661c59263435a14c492 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h44dd56e_1.conda#90c5165691fdcb5a9f43907e32ea48b4 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39hddac248_1.conda#8dd2eb1e7aa9a33a92a75bdcea3f0dd0 -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.3-pyha770c72_0.conda#dd64a0e440754ed97610b3e6b502b6b1 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py39h0f8d45d_0.conda#74b1d479057aa11a70779c83262df85e +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h01ceb2d_12.conda#60fd4bdf187f88bac57cdc1a052f2811 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.1-py39h6183b62_0.conda#81212684c03e970520656f1a62ab9d39 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h7e745eb_109.conda#9e208615247477427acbd0900ca7038f +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py39h52134e7_4.conda#e12391692d70732bf1df08b7ecf40095 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.2-py39hf3d152e_0.conda#6ce223b8b14df8bdfa72ac2a10c2fad3 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hec59055_101.conda#c84dbed01258db73689f72abc01c5e1a +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py39h4218a78_101.conda#8f5c25bb7accd1954d8b7fc689c5975c +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_ha7f9e30_1.conda#f3516df9a5e2b2ef3e3be2b350f9e93d +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.6-pyhd8ed1ab_0.conda#5bba7b5823474cb3fcd4e4cbf942da61 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.4-pyhd8ed1ab_0.conda#73dcd0eb2252cbd1530fd1e6e3cbbb03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.3-pyhd8ed1ab_0.conda#fb4d6329a57e20e03d7aecd18c7ca918 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.5-pyhd8ed1ab_0.conda#85466265b76473cc1d02420056cbc4e3 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.7-pyhd8ed1ab_0.conda#01e35beea8aff61cdb445b90a7adf7d4 diff --git a/requirements/py310.yml b/requirements/py310.yml index ced05dd987..2ba8abb7ae 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -16,9 +16,9 @@ dependencies: - cftime >=1.5 - dask-core >=2022.9.0 - libnetcdf !=4.9.1 - - matplotlib-base >=3.5 + - matplotlib >=3.5 - netcdf4 - - numpy >1.21, !=1.24.3 + - numpy >=1.21, !=1.24.3 - python-xxhash - pyproj - scipy @@ -35,7 +35,6 @@ dependencies: - python-stratify # Test dependencies. - - asv_runner - distributed - filelock - imagehash >=4.0 diff --git a/requirements/py311.yml b/requirements/py311.yml index 5f2b23850e..80e112d850 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -16,9 +16,9 @@ dependencies: - cftime >=1.5 - dask-core >=2022.9.0 - libnetcdf !=4.9.1 - - matplotlib-base >=3.5 + - matplotlib >=3.5 - netcdf4 - - numpy >1.21, !=1.24.3 + - numpy >=1.21, !=1.24.3 - python-xxhash - pyproj - scipy @@ -35,7 +35,6 @@ dependencies: - python-stratify # Test dependencies. - - asv_runner - distributed - filelock - imagehash >=4.0 diff --git a/requirements/py39.yml b/requirements/py39.yml index a5b32748e3..ed6a5eda54 100644 --- a/requirements/py39.yml +++ b/requirements/py39.yml @@ -16,9 +16,9 @@ dependencies: - cftime >=1.5 - dask-core >=2022.9.0 - libnetcdf !=4.9.1 - - matplotlib-base >=3.5 + - matplotlib >=3.5 - netcdf4 - - numpy >1.21, !=1.24.3 + - numpy >=1.21, !=1.24.3 - python-xxhash - pyproj - scipy @@ -35,7 +35,6 @@ dependencies: - python-stratify # Test dependencies. - - asv_runner - distributed - filelock - imagehash >=4.0 diff --git a/requirements/pypi-core.txt b/requirements/pypi-core.txt index e286bb97bc..7937f73b4f 100644 --- a/requirements/pypi-core.txt +++ b/requirements/pypi-core.txt @@ -5,8 +5,8 @@ dask[array]>=2022.9.0 # libnetcdf!=4.9.1 (not available on PyPI) matplotlib>=3.5 netcdf4 -numpy>1.21,!=1.24.3 +numpy>=1.21,!=1.24.3 pyproj scipy shapely!=1.8.3 -xxhash +xxhash \ No newline at end of file diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 8e3b24aac6..51a31ef971 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A script to convert the standard names information from the provided XML file into a Python dictionary format. @@ -26,8 +27,9 @@ STD_VALUES_FILE_TEMPLATE = ''' # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ This file contains a dictionary of standard value names that are mapped to another dictionary of other standard name attributes. Currently only diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py index 94f2d96829..5d7dd2abf2 100755 --- a/tools/release_do_nothing.py +++ b/tools/release_do_nothing.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A do-nothing script to hand-hold through the Iris release process. @@ -278,11 +279,6 @@ def finalise_whats_new( whatsnew_title += " [release candidate]" # TODO: automate message = f"In {rsts.release.name}: set the page title to:\n{whatsnew_title}\n" - if not is_release_candidate: - message += ( - "\nBe sure to remove any existing mentions of release " - "candidate from the title.\n" - ) _wait_for_done(message) message = ( @@ -530,63 +526,7 @@ def update_conda_forge( _wait_for_done(message) if is_release_candidate: - message = ( - "Visit the conda-forge feedstock branches page:\n" - "https://github.com/conda-forge/iris-feedstock/branches" - ) - _wait_for_done(message) - - message = ( - "Find the release candidate branch - " - "`rc`/`release-candidate`/similar.\n" - ) - rc_branch = _get_input( - message, - "Input the name of the release candidate branch" - ) - - message = ( - f"Is the latest commit on {rc_branch} over 1 month ago?" - ) - archive_rc = None - while archive_rc is None: - age_check = _get_input(message, "y / n") - if age_check.casefold() == "y".casefold(): - archive_rc = True - elif age_check.casefold() == "n".casefold(): - archive_rc = False - else: - _report_problem("Invalid entry. Please try again ...") - - if archive_rc: - # We chose this odd handling of release candidate branches because - # a persistent branch will gradually diverge as `main` receives - # automatic and manual maintenance (where recreating these on - # another branch is often beyond Iris dev expertise). Advised - # practice from conda-forge is also liable to evolve over time. - # Since there is no benefit to a continuous Git history on the - # release candidate branch, the simplest way to keep it aligned - # with best practice is to regularly create a fresh branch from - # `main`. - - date_string = datetime.today().strftime("%Y%m%d") - message = ( - f"Archive the {rc_branch} branch by appending _{date_string} " - "to its name.\n" - f"e.g. rc_{date_string}\n\n" - f"({__file__} includes an explanation of this in the comments)." - ) - _wait_for_done(message) - - message = ( - "Follow the latest conda-forge guidance for creating a new " - "release candidate branch from the `main` branch:\n" - "https://conda-forge.org/docs/maintainer/knowledge_base.html#pre-release-builds\n\n" - "Config file(s) should point to the `rc_iris` label.\n" - ) - rc_branch = _get_input(message, "Input the name of your new branch") - - upstream_branch = rc_branch + upstream_branch = "release-candidate" else: upstream_branch = "main" diff --git a/tools/update_lockfiles.py b/tools/update_lockfiles.py index a81ab8cafc..073f86cda6 100755 --- a/tools/update_lockfiles.py +++ b/tools/update_lockfiles.py @@ -1,7 +1,8 @@ # Copyright Iris contributors # -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A command line utility for generating conda-lock files for the environments that nox uses for testing each different supported version of python. From 05ebf77c4e02c2c5f73f164a1031585eef0aee08 Mon Sep 17 00:00:00 2001 From: stephenworsley <49274989+stephenworsley@users.noreply.github.com> Date: Fri, 1 Mar 2024 17:59:04 +0000 Subject: [PATCH 2/4] Cherry-pick: Fix usage of map_blocks in AreaWeighted and elsewhere (#5767) * fix usage of map_blocks * fix map_blocks for non-lazy data * add benchmark * unskip benchmark * add benchmark * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * remove benchmarks * remove unnecessary import * What's New entry. * map_complete_blocks docstring. * map_complete_blocks returns. * Typo. * Typo. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo --- lib/iris/_lazy_data.py | 47 +++++++++++++++++++++-------- lib/iris/analysis/__init__.py | 10 +++--- lib/iris/analysis/_area_weighted.py | 9 +++--- lib/iris/analysis/_regrid.py | 10 +++--- 4 files changed, 46 insertions(+), 30 deletions(-) diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 4c294a7d2f..7d966ad772 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -389,10 +389,11 @@ def lazy_elementwise(lazy_array, elementwise_op): return da.map_blocks(elementwise_op, lazy_array, dtype=dtype) -def map_complete_blocks(src, func, dims, out_sizes): +def map_complete_blocks(src, func, dims, out_sizes, *args, **kwargs): """Apply a function to complete blocks. Complete means that the data is not chunked along the chosen dimensions. + Uses :func:`dask.array.map_blocks` to implement the mapping. Args: @@ -404,27 +405,47 @@ def map_complete_blocks(src, func, dims, out_sizes): Dimensions that cannot be chunked. * out_sizes (tuple of int): Output size of dimensions that cannot be chunked. + *args : tuple + Additional arguments to pass to `func`. + **kwargs : dict + Additional keyword arguments to pass to `func`. + + Returns + ------- + Array-like + + See Also + -------- + :func:`dask.array.map_blocks` : The function used for the mapping. """ + data = None + result = None + if is_lazy_data(src): data = src elif not hasattr(src, "has_lazy_data"): # Not a lazy array and not a cube. So treat as ordinary numpy array. - return func(src) + result = func(src, *args, **kwargs) elif not src.has_lazy_data(): - return func(src.data) + result = func(src.data, *args, **kwargs) else: data = src.lazy_data() - # Ensure dims are not chunked - in_chunks = list(data.chunks) - for dim in dims: - in_chunks[dim] = src.shape[dim] - data = data.rechunk(in_chunks) + if result is None and data is not None: + # Ensure dims are not chunked + in_chunks = list(data.chunks) + for dim in dims: + in_chunks[dim] = src.shape[dim] + data = data.rechunk(in_chunks) - # Determine output chunks - out_chunks = list(data.chunks) - for dim, size in zip(dims, out_sizes): - out_chunks[dim] = size + # Determine output chunks + out_chunks = list(data.chunks) + for dim, size in zip(dims, out_sizes): + out_chunks[dim] = size - return data.map_blocks(func, chunks=out_chunks, dtype=src.dtype) + result = data.map_blocks( + func, *args, chunks=out_chunks, dtype=src.dtype, **kwargs + ) + + return result diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index f00c3dd850..26064bc771 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1472,18 +1472,16 @@ def _percentile(data, percent, fast_percentile_method=False, **kwargs): percent = [percent] percent = np.array(percent) - # Perform the percentile calculation. - _partial_percentile = functools.partial( + result = iris._lazy_data.map_complete_blocks( + data, _calc_percentile, + (-1,), + percent.shape, percent=percent, fast_percentile_method=fast_percentile_method, **kwargs, ) - result = iris._lazy_data.map_complete_blocks( - data, _partial_percentile, (-1,), percent.shape - ) - # Check whether to reduce to a scalar result, as per the behaviour # of other aggregators. if result.shape == (1,): diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index edead3948a..956afc8ba0 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -1097,8 +1097,11 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( ) = regrid_info # Calculate new data array for regridded cube. - regrid = functools.partial( + new_data = map_complete_blocks( + src_cube, _regrid_area_weighted_array, + (src_y_dim, src_x_dim), + meshgrid_x.shape, x_dim=src_x_dim, y_dim=src_y_dim, weights_info=weights_info, @@ -1106,10 +1109,6 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( mdtol=mdtol, ) - new_data = map_complete_blocks( - src_cube, regrid, (src_y_dim, src_x_dim), meshgrid_x.shape - ) - # Wrap up the data as a Cube. _regrid_callback = functools.partial( diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 4592a0ede7..0039b337e8 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -956,9 +956,11 @@ def __call__(self, src): x_dim = src.coord_dims(src_x_coord)[0] y_dim = src.coord_dims(src_y_coord)[0] - # Define regrid function - regrid = functools.partial( + data = map_complete_blocks( + src, self._regrid, + (y_dim, x_dim), + sample_grid_x.shape, x_dim=x_dim, y_dim=y_dim, src_x_coord=src_x_coord, @@ -969,10 +971,6 @@ def __call__(self, src): extrapolation_mode=self._extrapolation_mode, ) - data = map_complete_blocks( - src, regrid, (y_dim, x_dim), sample_grid_x.shape - ) - # Wrap up the data as a Cube. _regrid_callback = functools.partial( self._regrid, From 789d75fd3524d6929da32c70a6a1180d05410048 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Mon, 4 Mar 2024 14:04:03 +0000 Subject: [PATCH 3/4] What's New patch. --- docs/src/whatsnew/3.7.rst | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/3.7.rst b/docs/src/whatsnew/3.7.rst index 71ce4da735..f5070b341d 100644 --- a/docs/src/whatsnew/3.7.rst +++ b/docs/src/whatsnew/3.7.rst @@ -36,6 +36,21 @@ This document explains the changes made to Iris for this release any issues or feature requests for improving Iris. Enjoy! +v3.7.1 (04 Mar 2024) +==================== + +.. dropdown:: v3.7.1 Patches + :color: primary + :icon: alert + :animate: fade-in + + The patches in this release of Iris include: + + #. `@stephenworsley`_ fixed a potential memory leak for Iris uses of + :func:`dask.array.map_blocks`; known specifically to be a problem in the + :class:`iris.analysis.AreaWeighted` regridder. (:pull:`5767`) + + 📢 Announcements ================ @@ -47,7 +62,7 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles lazy data. (:pull:`5307`) - + .. _concat_warnings: #. `@acchamber`_ added error and warning messages about coordinate overlaps to @@ -70,11 +85,11 @@ This document explains the changes made to Iris for this release ============= #. `@acchamber`_ fixed a bug with :func:`~iris.util.unify_time_units` so it does not block - concatenation through different data types in rare instances. (:pull:`5372`) + concatenation through different data types in rare instances. (:pull:`5372`) #. `@acchamber`_ removed some obsolete code that prevented extraction of time points from cubes with bounded times (:pull:`5175`) - + .. _cftime_warnings: #. `@rcomer`_ modified pp-loading to avoid a ``cftime`` warning for non-standard From 638699878091ae9092e2544e38695ab00e8892a8 Mon Sep 17 00:00:00 2001 From: Martin Yeo Date: Mon, 4 Mar 2024 14:20:17 +0000 Subject: [PATCH 4/4] Retrospective linkcheck fixes. --- docs/src/conf.py | 2 ++ docs/src/whatsnew/1.4.rst | 12 ++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/src/conf.py b/docs/src/conf.py index 7f7322c1f8..9d4d22e61f 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -387,6 +387,8 @@ def _dotv(version): "https://www.metoffice.gov.uk/", "https://biggus.readthedocs.io/", "https://stickler-ci.com/", + "https://twitter.com/scitools_iris", + "https://stackoverflow.com/questions/tagged/python-iris", ] # list of sources to exclude from the build. diff --git a/docs/src/whatsnew/1.4.rst b/docs/src/whatsnew/1.4.rst index 989198296c..d0c234adef 100644 --- a/docs/src/whatsnew/1.4.rst +++ b/docs/src/whatsnew/1.4.rst @@ -58,7 +58,7 @@ Features * Use the latest release of Cartopy, v0.8.0. -.. _OPeNDAP: http://www.opendap.org/about +.. _OPeNDAP: https://www.opendap.org/ .. _exp-regrid: Experimental Regridding Enhancements @@ -107,7 +107,7 @@ systems. This uses the ESMF library functions, via the ESMPy interface. For example:: - from iris.experimental.regrid_conservative import regrid_conservative_via_esmpy + from iris.experimental.regrid_conservative import regrid_conservative_via_esmpy regridded_cube = regrid_conservative_via_esmpy(source_cube, target_grid_cube) @@ -128,7 +128,7 @@ See :mod:`iris.pandas` for more details. Load Cubes From the Internet via OPeNDAP ---------------------------------------- -Cubes can now be loaded directly from the internet, via OPeNDAP_. +Cubes can now be loaded directly from the internet, via OPeNDAP_. For example:: @@ -190,13 +190,13 @@ The solution is still under discussion: See :issue:`519`. Simplified Resource Configuration --------------------------------- -A new configuration variable called :data:`iris.config.TEST_DATA_DIR` +A new configuration variable called :data:`iris.config.TEST_DATA_DIR` has been added, replacing the previous combination of :data:`iris.config.MASTER_DATA_REPOSITORY` and :data:`iris.config.DATA_REPOSITORY`. This constant should be the path to a directory containing the test data required by the unit tests. It can be set by adding a ``test_data_dir`` entry to the ``Resources`` section of -``site.cfg``. See :mod:`iris.config` for more details. +``site.cfg``. See :mod:`iris.config` for more details. .. _grib_params: @@ -208,7 +208,7 @@ Extended GRIB Parameter Translation - Now translates some codes on GRIB2 output. - Some GRIB2 params may load with a different standard_name. - + .. _one-d-linear: