[Git][debian-gis-team/metpy][upstream] New upstream version 1.6.2+ds

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Tue Apr 16 09:07:29 BST 2024



Antonio Valentino pushed to branch upstream at Debian GIS Project / metpy


Commits:
30b0a372 by Antonio Valentino at 2024-04-16T06:47:11+00:00
New upstream version 1.6.2+ds
- - - - -


28 changed files:

- .codespellignore
- docs/conf.py
- docs/userguide/gempak.rst
- examples/calculations/Smoothing.py
- examples/gridding/Inverse_Distance_Verification.py
- examples/gridding/Natural_Neighbor_Verification.py
- pyproject.toml
- setup.cfg
- src/metpy/_version.py
- src/metpy/calc/basic.py
- src/metpy/calc/kinematics.py
- src/metpy/calc/thermo.py
- src/metpy/calc/tools.py
- src/metpy/io/gempak.py
- src/metpy/io/gini.py
- src/metpy/io/metar.py
- src/metpy/io/nexrad.py
- src/metpy/plots/declarative.py
- src/metpy/static-data-manifest.txt
- src/metpy/units.py
- src/metpy/xarray.py
- tests/calc/test_basic.py
- tests/calc/test_calc_tools.py
- tests/calc/test_thermo.py
- tests/io/test_gempak.py
- tests/io/test_nexrad.py
- tests/test_xarray.py
- tests/units/test_units.py


Changes:

=====================================
.codespellignore
=====================================
@@ -6,12 +6,11 @@
     thta = 2
                 lambda grid: grid if grid.PARM in parameter else False,
                             col_head.SELV,
-                            'SELV': col_head.SELV,
+                'SELV': col_head.SELV,
                                 col_head.SELV,
                                 row_head.SELV,
-                           'SELV': row_head.SELV,
-                       'SELV': col_head.SELV,
-                           'SELV': col_head.SELV,
+                'SELV': row_head.SELV,
+                'SELV': col_head.SELV,
 Klystron Warmup    Integer*2  N/A  0 to 1  1  0=Normal, 1=Preheat  146
 # GFS, NAM, RAP, or other gridded dataset (e.g., NARR).
 # This attribute can be set to False if the vector components are grid relative (e.g., for NAM


=====================================
docs/conf.py
=====================================
@@ -206,6 +206,10 @@ pygments_style = 'sphinx'
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 html_theme = 'pydata_sphinx_theme'
+
+# Use the version set in CI as necessary, which allows building "release" docs on a
+# maintenance branch--strip leading 'v' since our json file doesn't have the v on the 'version'
+doc_version = os.environ.get('DOC_VERSION', 'dev' if 'dev' in version else version).lstrip('v')
 html_theme_options = {
     'external_links': [
         {'name': 'Release Notes', 'url': 'https://github.com/Unidata/MetPy/releases'},
@@ -240,7 +244,7 @@ html_theme_options = {
     'navbar_end': ['navbar-icon-links', 'theme-switcher'],
     'switcher': {
         'json_url': 'https://unidata.github.io/MetPy/pst-versions.json',
-        'version_match': 'dev' if 'dev' in version else f'v{version}',
+        'version_match': doc_version
     },
     'navigation_with_keys': False
 }
@@ -441,7 +445,9 @@ linkcheck_ignore = [
     r'https://doi\.org/10\.1029/2010GL045777',
     r'https://doi\.org/10\.1098/rspa\.2004\.1430',
     # Currently giving certificate errors on GitHub
-    r'https://library.wmo.int/.*'
+    r'https://library.wmo.int/.*',
+    # For some reason GHA gets a 403 from Stack Overflow
+    r'https://stackoverflow.com/questions/tagged/metpy'
     ]
 
 # Dictionary of URL redirects allowed


=====================================
docs/userguide/gempak.rst
=====================================
@@ -18,6 +18,7 @@ blue is uncertain of parity, and white is unevaluated.
    <style type="text/css">
     .wy-table-responsive {border-style:solid; border-width:1px;}
     .wy-table-responsive td, th{border-style:solid;border-width:1px;}
+    .wy-table-responsive td:nth-child(3){word-break: break-word;}
     .wy-table-responsive .tg-implemented{background-color: #D9EAD3}
     .wy-table-responsive .tg-notimplemented{background-color: #F4CDCD}
     .wy-table-responsive .tg-yes{background-color: #93C47D}
@@ -329,7 +330,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented">Absolute vorticity</td>
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.absolute_vorticity.html">metpy.calc.absolute_vorticity</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -393,7 +394,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented">Total deformation</td>
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.total_deformation.html#metpy.calc.total_deformation">metpy.calc.total_deformation</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -417,7 +418,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented">Divergence</td>
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.h_divergence.html#metpy.calc.h_divergence">metpy.calc.h_divergence</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -465,7 +466,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented">Frontogenesis</td>
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.frontogenesis.html#metpy.calc.frontogenesis">metpy.calc.frontogenesis</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -511,9 +512,9 @@ blue is uncertain of parity, and white is unevaluated.
       <tr>
         <td class="tg-implemented">LAP(S)</td>
         <td class="tg-implemented">Laplacian operator</td>
-        <td class="tg-implemented"><a href="../api/generated/metpy.calc.laplacian.html#metpy.calc.laplacian">metpy.calc.laplacian</a></td>
+        <td class="tg-implemented"><a href="../api/generated/metpy.calc.geospatial_laplacian.html#metpy.calc.geospatial_laplacian">metpy.calc.geospatial_laplacian</a></td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-no">Yes - Different Answer</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -674,7 +675,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.potential_vorticity_baroclinic.html#metpy.calc.potential_vorticity_baroclinic">metpy.calc.potential_vorticity_baroclinic</a>
                                    <br><a href="../api/generated/metpy.calc.potential_vorticity_baroclinic.html#metpy.calc.potential_vorticity_barotropic">metpy.calc.potential_vorticity_barotropic</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -754,7 +755,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented">Shearing deformation</td>
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.shearing_deformation.html#metpy.calc.shearing_deformation">metpy.calc.shearing_deformation</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -786,7 +787,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented">Stretching deformation</td>
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.stretching_deformation.html#metpy.calc.stretching_deformation">metpy.calc.stretching_deformation</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -906,7 +907,7 @@ blue is uncertain of parity, and white is unevaluated.
         <td class="tg-implemented">Vorticity</td>
         <td class="tg-implemented"><a href="../api/generated/metpy.calc.v_vorticity.html#metpy.calc.v_vorticity">metpy.calc.v_vorticity</a></td>
         <td class="tg-yes">Yes</td>
-        <td class="tg-yes">Yes* Still needs spherical correction terms</td>
+        <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
       </tr>
       <tr>
@@ -1040,7 +1041,7 @@ blue is uncertain of parity, and white is unevaluated.
       <tr>
         <td class="tg-implemented">GRAD(S)</td>
         <td class="tg-implemented">Gradient of a scalar</td>
-        <td class="tg-implemented"><a href="../api/generated/metpy.calc.gradient.html#metpy.calc.gradient">metpy.calc.gradient</a></td>
+        <td class="tg-implemented"><a href="../api/generated/metpy.calc.geospatial_gradient.html#metpy.calc.geospatial_gradient">metpy.calc.geospatial_gradient</a></td>
         <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>
         <td class="tg-yes">Yes</td>


=====================================
examples/calculations/Smoothing.py
=====================================
@@ -22,11 +22,11 @@ import metpy.calc as mpcalc
 
 ###########################################
 # Start with a base pattern with random noise
-np.random.seed(61461542)
+rng = np.random.default_rng(61461542)
 size = 128
 x, y = np.mgrid[:size, :size]
 distance = np.sqrt((x - size / 2) ** 2 + (y - size / 2) ** 2)
-raw_data = np.random.random((size, size)) * 0.3 + distance / distance.max() * 0.7
+raw_data = rng.random((size, size)) * 0.3 + distance / distance.max() * 0.7
 
 fig, ax = plt.subplots(1, 1, figsize=(4, 4))
 ax.set_title('Raw Data')


=====================================
examples/gridding/Inverse_Distance_Verification.py
=====================================
@@ -46,9 +46,9 @@ def draw_circle(ax, x, y, r, m, label):
 # Generate random x and y coordinates, and observation values proportional to x * y.
 #
 # Set up two test grid locations at (30, 30) and (60, 60).
-np.random.seed(100)
 
-pts = np.random.randint(0, 100, (10, 2))
+pts = np.array([[8, 24], [67, 87], [79, 48], [10, 94], [52, 98],
+                [53, 66], [98, 14], [34, 24], [15, 60], [58, 16]])
 xp = pts[:, 0]
 yp = pts[:, 1]
 zp = xp**2 / 1000


=====================================
examples/gridding/Natural_Neighbor_Verification.py
=====================================
@@ -67,9 +67,11 @@ from metpy.interpolate.points import natural_neighbor_point
 # estimate a value using natural neighbor interpolation.
 #
 # The locations of these observations are then used to generate a Delaunay triangulation.
-np.random.seed(100)
 
-pts = np.random.randint(0, 100, (10, 2))
+# Some randomly selected points
+pts = np.array([[8, 24], [67, 87], [79, 48], [10, 94], [52, 98],
+                [53, 66], [98, 14], [34, 24], [15, 60], [58, 16]])
+
 xp = pts[:, 0]
 yp = pts[:, 1]
 zp = (pts[:, 0] * pts[:, 0]) / 1000


=====================================
pyproject.toml
=====================================
@@ -1,5 +1,5 @@
 [build-system]
-requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4"]
+requires = ["setuptools>=61", "wheel", "setuptools_scm[toml]>=3.4"]
 build-backend = "setuptools.build_meta"
 
 [project]
@@ -113,13 +113,15 @@ filterwarnings = [
     "ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pint.facets.plain.quantity:575",
     # PyProj automatically dispatching for single point, will be waiting for NumPy 2.0 to address
     # See: https://github.com/pyproj4/pyproj/issues/1309
-    "ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pyproj.geod:404"
+    "ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pyproj.geod:404",
+    # Pandas >=2.2 warns about PyArrow being a future dependency
+    'ignore:\nPyarrow will become a required dependency of pandas:DeprecationWarning',
 ]
 
 [tool.ruff]
 line-length = 95
 exclude = ["docs", "build", "src/metpy/io/_metar_parser/metar_parser.py"]
-select = ["A", "B", "C", "D", "E", "E226", "F", "G", "I", "N", "Q", "R", "S", "T", "U", "W"]
+select = ["A", "B", "C", "CPY001", "D", "E", "E226", "F", "G", "I", "N", "NPY", "Q", "R", "S", "SIM", "T", "U", "W"]
 ignore = ["F405", "I001", "RET504", "RET505", "RET506", "RET507", "RUF100"]
 preview = true
 explicit-preview-rules = true
@@ -129,7 +131,7 @@ explicit-preview-rules = true
 "docs/doc-server.py" = ["T201"]
 "examples/*.py" = ["D", "T201", "B018"]
 "src/metpy/_vendor/xarray.py" = ["UP032"]
-"src/metpy/deprecation.py" = ["UP032"]
+"src/metpy/deprecation.py" = ["CPY001", "UP032"]
 "src/metpy/testing.py" = ["S101"]
 "src/metpy/io/nexrad.py" = ["S101"]
 "tests/*/*.py" = ["S101"]
@@ -139,11 +141,16 @@ explicit-preview-rules = true
 "tools/nexrad_msgs/parse_spec.py" = ["B028", "S101"]
 "tutorials/*.py" = ["D", "T201", "B018"]
 
-[tool.ruff.flake8-quotes]
+[tool.ruff.lint.flake8-copyright]
+# Needed to add a comma
+notice-rgx = "(?i)Copyright\\s+(\\(C\\)\\s+)?\\d{4}([-,]\\d{4})*"
+author = "MetPy Developers"
+
+[tool.ruff.lint.flake8-quotes]
 inline-quotes = "single"
 multiline-quotes = "double"
 
-[tool.ruff.isort]
+[tool.ruff.lint.isort]
 known-first-party = ["metpy", "flake8_metpy"]
 force-single-line = false
 relative-imports-order = "closest-to-furthest"
@@ -151,10 +158,10 @@ force-sort-within-sections = true
 order-by-type = false
 combine-as-imports = true
 
-[tool.ruff.mccabe]
+[tool.ruff.lint.mccabe]
 max-complexity = 61
 
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
 convention = "numpy"
 
 [tool.setuptools_scm]


=====================================
setup.cfg
=====================================
@@ -10,11 +10,10 @@ rst-roles = class, data, doc, func, meth, mod
 rst-directives = plot, versionchanged
 known-modules = matplotlib:[matplotlib,mpl_toolkits],netcdf4:[netCDF4]
 exclude = docs build src/metpy/io/_metar_parser/metar_parser.py
-select = C E301 E302 E303 E304 E305 E306 I R
+select = E301 E302 E303 E304 E305 E306 I R
 ignore = F405 W503 RST902 SIM106
 per-file-ignores = examples/*.py: D MPY001
                    tutorials/*.py: D MPY001
-                   src/metpy/deprecation.py: C801
                    src/metpy/calc/*.py: RST306
                    src/metpy/interpolate/*.py: RST306
                    src/metpy/io/*.py: RST306


=====================================
src/metpy/_version.py
=====================================
@@ -8,16 +8,29 @@ def get_version():
     """Get MetPy's version.
 
     Either get it from package metadata, or get it using version control information if
-    a development install.
+    an editable installation.
     """
+    from importlib.metadata import distribution, PackageNotFoundError
+
     try:
-        from setuptools_scm import get_version
-        return get_version(root='../..', relative_to=__file__,
-                           version_scheme='post-release')
-    except (ImportError, LookupError):
-        from importlib.metadata import PackageNotFoundError, version
-
-        try:
-            return version(__package__)
-        except PackageNotFoundError:
-            return 'Unknown'
+        dist = distribution(__package__)
+
+        # First see if we can find this file from pip to check for an editable install
+        if direct := dist.read_text('direct_url.json'):
+            import json
+
+            # Parse file and look for editable key
+            info = json.loads(direct)
+            if info.get('dir_info', {}).get('editable'):
+                import contextlib
+
+                # If editable try to get version using setuptools_scm
+                with contextlib.suppress(ImportError, LookupError):
+                    from setuptools_scm import get_version
+                    return get_version(root='../..', relative_to=__file__,
+                                       version_scheme='post-release')
+
+        # With any error or not an editable install, we use the version from the metadata
+        return dist.version
+    except PackageNotFoundError:
+        return 'Unknown'


=====================================
src/metpy/calc/basic.py
=====================================
@@ -52,6 +52,13 @@ def wind_speed(u, v):
     --------
     wind_components
 
+    Examples
+    --------
+    >>> from metpy.calc import wind_speed
+    >>> from metpy.units import units
+    >>> wind_speed(10. * units('m/s'), 10. * units('m/s'))
+    <Quantity(14.1421356, 'meter / second')>
+
     """
     return np.hypot(u, v)
 
@@ -88,6 +95,13 @@ def wind_direction(u, v, convention='from'):
     In the case of calm winds (where `u` and `v` are zero), this function returns a direction
     of 0.
 
+    Examples
+    --------
+    >>> from metpy.calc import wind_direction
+    >>> from metpy.units import units
+    >>> wind_direction(10. * units('m/s'), 10. * units('m/s'))
+    <Quantity(225.0, 'degree')>
+
     """
     wdir = units.Quantity(90., 'deg') - np.arctan2(-v, -u)
     origshape = wdir.shape
@@ -141,7 +155,7 @@ def wind_components(speed, wind_direction):
     >>> from metpy.calc import wind_components
     >>> from metpy.units import units
     >>> wind_components(10. * units('m/s'), 225. * units.deg)
-     (<Quantity(7.07106781, 'meter / second')>, <Quantity(7.07106781, 'meter / second')>)
+    (<Quantity(7.07106781, 'meter / second')>, <Quantity(7.07106781, 'meter / second')>)
 
     .. versionchanged:: 1.0
        Renamed ``wdir`` parameter to ``wind_direction``
@@ -906,10 +920,7 @@ def smooth_window(scalar_grid, window, passes=1, normalize_weights=True):
         raise ValueError('The shape of the smoothing window must be odd in all dimensions.')
 
     # Optionally normalize the supplied weighting window
-    if normalize_weights:
-        weights = window / np.sum(window)
-    else:
-        weights = window
+    weights = window / np.sum(window) if normalize_weights else window
 
     # Set indexes
     # Inner index for the centered array elements that are affected by the smoothing


=====================================
src/metpy/calc/kinematics.py
=====================================
@@ -629,10 +629,7 @@ def geostrophic_wind(height, dx=None, dy=None, latitude=None, x_dim=-1, y_dim=-2
 
     """
     f = coriolis_parameter(latitude)
-    if height.dimensionality['[length]'] == 2.0:
-        norm_factor = 1. / f
-    else:
-        norm_factor = mpconsts.g / f
+    norm_factor = 1. / f if height.dimensionality['[length]'] == 2.0 else mpconsts.g / f
 
     dhdx, dhdy = geospatial_gradient(height, dx=dx, dy=dy, x_dim=x_dim, y_dim=y_dim,
                                      parallel_scale=parallel_scale,


=====================================
src/metpy/calc/thermo.py
=====================================
@@ -963,7 +963,7 @@ def parcel_profile(pressure, temperature, dewpoint):
     >>> Td = dewpoint_from_relative_humidity(T, rh)
     >>> # computer parcel temperature
     >>> parcel_profile(p, T[0], Td[0]).to('degC')
-    <Quantity([  29.3          28.61221952   25.22214738   23.46097535   21.5835928
+    <Quantity([  29.3          28.61221952   25.22214738   23.46097684   21.5835928
     19.57260398   17.40636185   15.05748615   12.49064866    9.6592539
         6.50023491    2.92560365   -1.19172846   -6.04257884  -11.92497517
     -19.3176536   -28.97672464  -41.94444385  -50.01173076  -59.30936248
@@ -2375,7 +2375,7 @@ def cape_cin(pressure, temperature, dewpoint, parcel_profile, which_lfc='bottom'
     >>> prof = parcel_profile(p, T[0], Td[0]).to('degC')
     >>> # calculate surface based CAPE/CIN
     >>> cape_cin(p, T, Td, prof)
-    (<Quantity(4703.77306, 'joule / kilogram')>, <Quantity(0, 'joule / kilogram')>)
+    (<Quantity(4703.77308, 'joule / kilogram')>, <Quantity(0, 'joule / kilogram')>)
 
     See Also
     --------
@@ -2441,10 +2441,7 @@ def cape_cin(pressure, temperature, dewpoint, parcel_profile, which_lfc='bottom'
                         parcel_temperature_profile=parcel_profile, which=which_el)
 
     # No EL and we use the top reading of the sounding.
-    if np.isnan(el_pressure):
-        el_pressure = pressure[-1].magnitude
-    else:
-        el_pressure = el_pressure.magnitude
+    el_pressure = pressure[-1].magnitude if np.isnan(el_pressure) else el_pressure.magnitude
 
     # Difference between the parcel path and measured temperature profiles
     y = (parcel_profile - temperature).to(units.degK)
@@ -3016,7 +3013,7 @@ def most_unstable_cape_cin(pressure, temperature, dewpoint, **kwargs):
     >>> Td = dewpoint_from_relative_humidity(T, rh)
     >>> # calculate most unstbale CAPE/CIN
     >>> most_unstable_cape_cin(p, T, Td)
-    (<Quantity(4703.77306, 'joule / kilogram')>, <Quantity(0, 'joule / kilogram')>)
+    (<Quantity(4703.77308, 'joule / kilogram')>, <Quantity(0, 'joule / kilogram')>)
 
     See Also
     --------
@@ -3175,9 +3172,9 @@ def downdraft_cape(pressure, temperature, dewpoint):
     >>> # calculate dewpoint
     >>> Td = dewpoint_from_relative_humidity(T, rh)
     >>> downdraft_cape(p, T, Td)
-    (<Quantity(1222.67968, 'joule / kilogram')>, <Quantity([1008. 1000.  950.
+    (<Quantity(1222.67967, 'joule / kilogram')>, <Quantity([1008. 1000.  950.
     900.  850.  800.  750.  700.  650.  600.], 'hectopascal')>, <Quantity([17.50959548
-    17.20643425 15.237249 13.12607097 10.85045704 8.38243809 5.68671014 2.71808363
+    17.20643425 15.237249 13.12607097 10.85045704 8.38243809 5.68671014 2.71808368
     -0.58203825 -4.29053485], 'degree_Celsius')>)
 
     See Also


=====================================
src/metpy/calc/tools.py
=====================================
@@ -298,12 +298,7 @@ def reduce_point_density(points, radius, priority=None):
 
     # Need to use sorted indices rather than sorting the position
     # so that the keep mask matches *original* order.
-    if priority is not None:
-        # Need to sort the locations in decreasing priority.
-        sorted_indices = np.argsort(priority)[::-1]
-    else:
-        # Take advantage of iterator nature of range here to avoid making big lists
-        sorted_indices = range(len(points))
+    sorted_indices = range(len(points)) if priority is None else np.argsort(priority)[::-1]
 
     # Keep all good points initially
     keep = np.logical_and.reduce(good_vals, axis=-1)
@@ -1830,6 +1825,13 @@ def angle_to_direction(input_angle, full=False, level=3):
     direction
         The directional text
 
+    Examples
+    --------
+    >>> from metpy.calc import angle_to_direction
+    >>> from metpy.units import units
+    >>> angle_to_direction(225. * units.deg)
+    'SW'
+
     """
     try:  # strip units temporarily
         origin_units = input_angle.units
@@ -1843,8 +1845,11 @@ def angle_to_direction(input_angle, full=False, level=3):
     else:
         scalar = False
 
+    np_input_angle = np.array(input_angle).astype(float)
+    origshape = np_input_angle.shape
+    ndarray = len(origshape) > 1
     # clean any numeric strings, negatives, and None does not handle strings with alphabet
-    input_angle = units.Quantity(np.array(input_angle).astype(float), origin_units)
+    input_angle = units.Quantity(np_input_angle, origin_units)
     input_angle[input_angle < 0] = np.nan
 
     # Normalize between 0 - 360
@@ -1860,8 +1865,10 @@ def angle_to_direction(input_angle, full=False, level=3):
         err_msg = 'Level of complexity cannot be less than 1 or greater than 3!'
         raise ValueError(err_msg)
 
-    angle_dict = {i * BASE_DEGREE_MULTIPLIER.m * nskip: dir_str
-                  for i, dir_str in enumerate(DIR_STRS[::nskip])}
+    angle_dict = {
+        i * BASE_DEGREE_MULTIPLIER.m * nskip: dir_str
+        for i, dir_str in enumerate(DIR_STRS[::nskip])
+    }
     angle_dict[MAX_DEGREE_ANGLE.m] = 'N'  # handle edge case of 360.
     angle_dict[UND_ANGLE] = UND
 
@@ -1877,18 +1884,25 @@ def angle_to_direction(input_angle, full=False, level=3):
     # ['N', 'N', 'NE', 'NE', 'E', 'E', 'SE', 'SE',
     #  'S', 'S', 'SW', 'SW', 'W', 'W', 'NW', 'NW']
 
-    multiplier = np.round(
-        (norm_angles / BASE_DEGREE_MULTIPLIER / nskip) - 0.001).m
-    round_angles = (multiplier * BASE_DEGREE_MULTIPLIER.m * nskip)
+    multiplier = np.round((norm_angles / BASE_DEGREE_MULTIPLIER / nskip) - 0.001).m
+    round_angles = multiplier * BASE_DEGREE_MULTIPLIER.m * nskip
     round_angles[np.where(np.isnan(round_angles))] = UND_ANGLE
-
-    dir_str_arr = itemgetter(*round_angles)(angle_dict)  # for array
-    if not full:
-        return dir_str_arr
-
-    dir_str_arr = ','.join(dir_str_arr)
-    dir_str_arr = _unabbreviate_direction(dir_str_arr)
-    return dir_str_arr.replace(',', ' ') if scalar else dir_str_arr.split(',')
+    if ndarray:
+        round_angles = round_angles.flatten()
+    dir_str_arr = itemgetter(*round_angles)(angle_dict)  # returns str or tuple
+    if full:
+        dir_str_arr = ','.join(dir_str_arr)
+        dir_str_arr = _unabbreviate_direction(dir_str_arr)
+        dir_str_arr = dir_str_arr.split(',')
+        if scalar:
+            return dir_str_arr[0]
+        else:
+            return np.array(dir_str_arr).reshape(origshape)
+    else:
+        if scalar:
+            return dir_str_arr
+        else:
+            return np.array(dir_str_arr).reshape(origshape)
 
 
 def _unabbreviate_direction(abb_dir_str):


=====================================
src/metpy/io/gempak.py
=====================================
@@ -489,43 +489,25 @@ def _wx_to_wnum(wx1, wx2, wx3, missing=-9999):
     Notes
     -----
     See GEMAPK function PT_WNMT.
-    """
-    metar_codes = [
-        'BR', 'DS', 'DU', 'DZ', 'FC', 'FG', 'FU', 'GR', 'GS',
-        'HZ', 'IC', 'PL', 'PO', 'RA', 'SA', 'SG', 'SN', 'SQ',
-        'SS', 'TS', 'UP', 'VA', '+DS', '-DZ', '+DZ', '+FC',
-        '-GS', '+GS', '-PL', '+PL', '-RA', '+RA', '-SG',
-        '+SG', '-SN', '+SN', '+SS', 'BCFG', 'BLDU', 'BLPY',
-        'BLSA', 'BLSN', 'DRDU', 'DRSA', 'DRSN', 'FZDZ', 'FZFG',
-        'FZRA', 'MIFG', 'PRFG', 'SHGR', 'SHGS', 'SHPL', 'SHRA',
-        'SHSN', 'TSRA', '+BLDU', '+BLSA', '+BLSN', '-FZDZ',
-        '+FZDZ', '+FZFG', '-FZRA', '+FZRA', '-SHGS', '+SHGS',
-        '-SHPL', '+SHPL', '-SHRA', '+SHRA', '-SHSN', '+SHSN',
-        '-TSRA', '+TSRA'
-    ]
-
-    gempak_wnum = [
-        9, 33, 8, 2, -2, 9, 7, 4, 25, 6, 36, 23, 40, 1, 35, 24, 3, 10,
-        35, 5, 41, 11, 68, 17, 18, -1, 61, 62, 57, 58, 13, 14, 59, 60, 20,
-        21, 69, 9, 33, 34, 35, 32, 33, 35, 32, 19, 30, 15, 31, 9, 27, 67,
-        63, 16, 22, 66, 68, 69, 70, 53, 54, 30, 49, 50, 67, 67, 75, 76, 51,
-        52, 55, 56, 77, 78
-    ]
-
-    if wx1 in metar_codes:
-        wn1 = gempak_wnum[metar_codes.index(wx1)]
-    else:
-        wn1 = 0
-
-    if wx2 in metar_codes:
-        wn2 = gempak_wnum[metar_codes.index(wx2)]
-    else:
-        wn2 = 0
 
-    if wx3 in metar_codes:
-        wn3 = gempak_wnum[metar_codes.index(wx3)]
-    else:
-        wn3 = 0
+    """
+    metar_to_gempak_wnum = {'BR': 9, 'DS': 33, 'DU': 8, 'DZ': 2, 'FC': -2, 'FG': 9, 'FU': 7,
+                            'GR': 4, 'GS': 25, 'HZ': 6, 'IC': 36, 'PL': 23, 'PO': 40, 'RA': 1,
+                            'SA': 35, 'SG': 24, 'SN': 3, 'SQ': 10, 'SS': 35, 'TS': 5, 'UP': 41,
+                            'VA': 11, '+DS': 68, '-DZ': 17, '+DZ': 18, '+FC': -1, '-GS': 61,
+                            '+GS': 62, '-PL': 57, '+PL': 58, '-RA': 13, '+RA': 14, '-SG': 59,
+                            '+SG': 60, '-SN': 20, '+SN': 21, '+SS': 69, 'BCFG': 9, 'BLDU': 33,
+                            'BLPY': 34, 'BLSA': 35, 'BLSN': 32, 'DRDU': 33, 'DRSA': 35,
+                            'DRSN': 32, 'FZDZ': 19, 'FZFG': 30, 'FZRA': 15, 'MIFG': 31,
+                            'PRFG': 9, 'SHGR': 27, 'SHGS': 67, 'SHPL': 63, 'SHRA': 16,
+                            'SHSN': 22, 'TSRA': 66, '+BLDU': 68, '+BLSA': 69, '+BLSN': 70,
+                            '-FZDZ': 53, '+FZDZ': 54, '+FZFG': 30, '-FZRA': 49, '+FZRA': 50,
+                            '-SHGS': 67, '+SHGS': 67, '-SHPL': 75, '+SHPL': 76, '-SHRA': 51,
+                            '+SHRA': 52, '-SHSN': 55, '+SHSN': 56, '-TSRA': 77, '+TSRA': 78}
+
+    wn1 = metar_to_gempak_wnum.get(wx1, 0)
+    wn2 = metar_to_gempak_wnum.get(wx2, 0)
+    wn3 = metar_to_gempak_wnum.get(wx3, 0)
 
     if all(w >= 0 for w in [wn1, wn2, wn3]):
         wnum = wn3 * 80 * 80 + wn2 * 80 + wn1
@@ -663,7 +645,7 @@ class GempakFile:
                                     'NavigationBlock')
 
             if navb_size != nav_stuct.size // BYTES_PER_WORD:
-                raise ValueError('Navigation block size does not match GEMPAK specification')
+                raise ValueError('Navigation block size does not match GEMPAK specification.')
             else:
                 self.navigation_block = (
                     self._buffer.read_struct(nav_stuct)
@@ -683,7 +665,7 @@ class GempakFile:
 
             if anlb_size not in [anlb1_struct.size // BYTES_PER_WORD,
                                  anlb2_struct.size // BYTES_PER_WORD]:
-                raise ValueError('Analysis block size does not match GEMPAK specification')
+                raise ValueError('Analysis block size does not match GEMPAK specification.')
             else:
                 anlb_type = self._buffer.read_struct(struct.Struct(self.prefmt + 'f'))[0]
                 self._buffer.jump_to(anlb_start)
@@ -759,9 +741,9 @@ class GempakFile:
 
     def _swap_bytes(self, binary):
         """Swap between little and big endian."""
-        self.swaped_bytes = (struct.pack('@i', 1) != binary)
+        self.swapped_bytes = (struct.pack('@i', 1) != binary)
 
-        if self.swaped_bytes:
+        if self.swapped_bytes:
             if sys.byteorder == 'little':
                 self.prefmt = '>'
                 self.endian = 'big'
@@ -950,9 +932,9 @@ class GempakGrid(GempakFile):
             if self._buffer.read_int(4, self.endian, False) == USED_FLAG:
                 self.column_headers.append(self._buffer.read_struct(column_headers_fmt))
 
-        self._gdinfo = []
+        self._gdinfo = set()
         for n, head in enumerate(self.column_headers):
-            self._gdinfo.append(
+            self._gdinfo.add(
                 Grid(
                     n,
                     head.GTM1[0],
@@ -972,7 +954,7 @@ class GempakGrid(GempakFile):
 
     def gdinfo(self):
         """Return grid information."""
-        return self._gdinfo
+        return sorted(self._gdinfo)
 
     def _get_crs(self):
         """Create CRS from GEMPAK navigation block."""
@@ -1262,7 +1244,7 @@ class GempakGrid(GempakFile):
             level2 = [level2]
 
         # Figure out which columns to extract from the file
-        matched = self._gdinfo.copy()
+        matched = sorted(self._gdinfo)
 
         if parameter is not None:
             matched = filter(
@@ -1309,9 +1291,8 @@ class GempakGrid(GempakFile):
 
         grids = []
         irow = 0  # Only one row for grids
-        for icol, col_head in enumerate(self.column_headers):
-            if icol not in gridno:
-                continue
+        for icol in gridno:
+            col_head = self.column_headers[icol]
             for iprt, part in enumerate(self.parts):
                 pointer = (self.prod_desc.data_block_ptr
                            + (irow * self.prod_desc.columns * self.prod_desc.parts)
@@ -1409,7 +1390,7 @@ class GempakSounding(GempakFile):
 
         self.merged = 'SNDT' in (part.name for part in self.parts)
 
-        self._sninfo = []
+        self._sninfo = set()
         for irow, row_head in enumerate(self.row_headers):
             for icol, col_head in enumerate(self.column_headers):
                 pointer = (self.prod_desc.data_block_ptr
@@ -1420,7 +1401,7 @@ class GempakSounding(GempakFile):
                 data_ptr = self._buffer.read_int(4, self.endian, False)
 
                 if data_ptr:
-                    self._sninfo.append(
+                    self._sninfo.add(
                         Sounding(
                             irow,
                             icol,
@@ -1437,144 +1418,140 @@ class GempakSounding(GempakFile):
 
     def sninfo(self):
         """Return sounding information."""
-        return self._sninfo
+        return sorted(self._sninfo)
 
     def _unpack_merged(self, sndno):
         """Unpack merged sounding data."""
         soundings = []
-        for irow, row_head in enumerate(self.row_headers):
-            for icol, col_head in enumerate(self.column_headers):
-                if (irow, icol) not in sndno:
+        for irow, icol in sndno:
+            row_head = self.row_headers[irow]
+            col_head = self.column_headers[icol]
+            sounding = {
+                'STID': col_head.STID,
+                'STNM': col_head.STNM,
+                'SLAT': col_head.SLAT,
+                'SLON': col_head.SLON,
+                'SELV': col_head.SELV,
+                'STAT': col_head.STAT,
+                'COUN': col_head.COUN,
+                'DATE': row_head.DATE,
+                'TIME': row_head.TIME,
+            }
+            for iprt, part in enumerate(self.parts):
+                pointer = (self.prod_desc.data_block_ptr
+                           + (irow * self.prod_desc.columns * self.prod_desc.parts)
+                           + (icol * self.prod_desc.parts + iprt))
+                self._buffer.jump_to(self._start, _word_to_position(pointer))
+                self.data_ptr = self._buffer.read_int(4, self.endian, False)
+                if not self.data_ptr:
                     continue
-                sounding = {'STID': col_head.STID,
-                            'STNM': col_head.STNM,
-                            'SLAT': col_head.SLAT,
-                            'SLON': col_head.SLON,
-                            'SELV': col_head.SELV,
-                            'STAT': col_head.STAT,
-                            'COUN': col_head.COUN,
-                            'DATE': row_head.DATE,
-                            'TIME': row_head.TIME,
-                            }
-                for iprt, part in enumerate(self.parts):
-                    pointer = (self.prod_desc.data_block_ptr
-                               + (irow * self.prod_desc.columns * self.prod_desc.parts)
-                               + (icol * self.prod_desc.parts + iprt))
-                    self._buffer.jump_to(self._start, _word_to_position(pointer))
-                    self.data_ptr = self._buffer.read_int(4, self.endian, False)
-                    if not self.data_ptr:
-                        continue
-                    self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
-                    self.data_header_length = self._buffer.read_int(4, self.endian, False)
-                    data_header = self._buffer.set_mark()
-                    self._buffer.jump_to(data_header,
-                                         _word_to_position(part.header_length + 1))
-                    lendat = self.data_header_length - part.header_length
-
-                    fmt_code = {
-                        DataTypes.real: 'f',
-                        DataTypes.realpack: 'i',
-                        DataTypes.character: 's',
-                    }.get(part.data_type)
-
-                    if fmt_code is None:
-                        raise NotImplementedError(f'No methods for data type {part.data_type}')
-
-                    if fmt_code == 's':
-                        lendat *= BYTES_PER_WORD
-
-                    packed_buffer = (
-                        self._buffer.read_struct(
-                            struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
-                        )
+                self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
+                self.data_header_length = self._buffer.read_int(4, self.endian, False)
+                data_header = self._buffer.set_mark()
+                self._buffer.jump_to(data_header,
+                                     _word_to_position(part.header_length + 1))
+                lendat = self.data_header_length - part.header_length
+
+                fmt_code = {
+                    DataTypes.real: 'f',
+                    DataTypes.realpack: 'i',
+                }.get(part.data_type)
+
+                if fmt_code is None:
+                    raise NotImplementedError(f'No methods for data type {part.data_type}')
+
+                packed_buffer = (
+                    self._buffer.read_struct(
+                        struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
                     )
+                )
 
-                    parameters = self.parameters[iprt]
-                    nparms = len(parameters['name'])
+                parameters = self.parameters[iprt]
+                nparms = len(parameters['name'])
 
-                    if part.data_type == DataTypes.realpack:
-                        unpacked = self._unpack_real(packed_buffer, parameters, lendat)
-                        for iprm, param in enumerate(parameters['name']):
-                            sounding[param] = unpacked[iprm::nparms]
-                    else:
-                        for iprm, param in enumerate(parameters['name']):
-                            sounding[param] = np.array(
-                                packed_buffer[iprm::nparms], dtype=np.float32
-                            )
+                if part.data_type == DataTypes.realpack:
+                    unpacked = self._unpack_real(packed_buffer, parameters, lendat)
+                    for iprm, param in enumerate(parameters['name']):
+                        sounding[param] = unpacked[iprm::nparms]
+                else:
+                    for iprm, param in enumerate(parameters['name']):
+                        sounding[param] = np.array(
+                            packed_buffer[iprm::nparms], dtype=np.float32
+                        )
 
-                soundings.append(sounding)
+            soundings.append(sounding)
         return soundings
 
     def _unpack_unmerged(self, sndno):
         """Unpack unmerged sounding data."""
         soundings = []
-        for irow, row_head in enumerate(self.row_headers):
-            for icol, col_head in enumerate(self.column_headers):
-                if (irow, icol) not in sndno:
+        for irow, icol in sndno:
+            row_head = self.row_headers[irow]
+            col_head = self.column_headers[icol]
+            sounding = {
+                'STID': col_head.STID,
+                'STNM': col_head.STNM,
+                'SLAT': col_head.SLAT,
+                'SLON': col_head.SLON,
+                'SELV': col_head.SELV,
+                'STAT': col_head.STAT,
+                'COUN': col_head.COUN,
+                'DATE': row_head.DATE,
+                'TIME': row_head.TIME,
+            }
+            for iprt, part in enumerate(self.parts):
+                pointer = (self.prod_desc.data_block_ptr
+                           + (irow * self.prod_desc.columns * self.prod_desc.parts)
+                           + (icol * self.prod_desc.parts + iprt))
+                self._buffer.jump_to(self._start, _word_to_position(pointer))
+                self.data_ptr = self._buffer.read_int(4, self.endian, False)
+                if not self.data_ptr:
                     continue
-                sounding = {'STID': col_head.STID,
-                            'STNM': col_head.STNM,
-                            'SLAT': col_head.SLAT,
-                            'SLON': col_head.SLON,
-                            'SELV': col_head.SELV,
-                            'STAT': col_head.STAT,
-                            'COUN': col_head.COUN,
-                            'DATE': row_head.DATE,
-                            'TIME': row_head.TIME,
-                            }
-                for iprt, part in enumerate(self.parts):
-                    pointer = (self.prod_desc.data_block_ptr
-                               + (irow * self.prod_desc.columns * self.prod_desc.parts)
-                               + (icol * self.prod_desc.parts + iprt))
-                    self._buffer.jump_to(self._start, _word_to_position(pointer))
-                    self.data_ptr = self._buffer.read_int(4, self.endian, False)
-                    if not self.data_ptr:
-                        continue
-                    self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
-                    self.data_header_length = self._buffer.read_int(4, self.endian, False)
-                    data_header = self._buffer.set_mark()
-                    self._buffer.jump_to(data_header,
-                                         _word_to_position(part.header_length + 1))
-                    lendat = self.data_header_length - part.header_length
-
-                    fmt_code = {
-                        DataTypes.real: 'f',
-                        DataTypes.realpack: 'i',
-                        DataTypes.character: 's',
-                    }.get(part.data_type)
-
-                    if fmt_code is None:
-                        raise NotImplementedError(f'No methods for data type {part.data_type}')
-
-                    if fmt_code == 's':
-                        lendat *= BYTES_PER_WORD
-
-                    packed_buffer = (
-                        self._buffer.read_struct(
-                            struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
-                        )
+                self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
+                self.data_header_length = self._buffer.read_int(4, self.endian, False)
+                data_header = self._buffer.set_mark()
+                self._buffer.jump_to(data_header,
+                                     _word_to_position(part.header_length + 1))
+                lendat = self.data_header_length - part.header_length
+
+                fmt_code = {
+                    DataTypes.real: 'f',
+                    DataTypes.realpack: 'i',
+                    DataTypes.character: 's',
+                }.get(part.data_type)
+
+                if fmt_code is None:
+                    raise NotImplementedError(f'No methods for data type {part.data_type}')
+
+                if fmt_code == 's':
+                    lendat *= BYTES_PER_WORD
+
+                packed_buffer = (
+                    self._buffer.read_struct(
+                        struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
                     )
+                )
 
-                    parameters = self.parameters[iprt]
-                    nparms = len(parameters['name'])
-                    sounding[part.name] = {}
-
-                    if part.data_type == DataTypes.realpack:
-                        unpacked = self._unpack_real(packed_buffer, parameters, lendat)
-                        for iprm, param in enumerate(parameters['name']):
-                            sounding[part.name][param] = unpacked[iprm::nparms]
-                    elif part.data_type == DataTypes.character:
-                        for iprm, param in enumerate(parameters['name']):
-                            sounding[part.name][param] = (
-                                self._decode_strip(packed_buffer[iprm])
-                            )
-                    else:
-                        for iprm, param in enumerate(parameters['name']):
-                            sounding[part.name][param] = (
-                                np.array(packed_buffer[iprm::nparms], dtype=np.float32)
-                            )
+                parameters = self.parameters[iprt]
+                nparms = len(parameters['name'])
+                sounding[part.name] = {}
+
+                if part.data_type == DataTypes.realpack:
+                    unpacked = self._unpack_real(packed_buffer, parameters, lendat)
+                    for iprm, param in enumerate(parameters['name']):
+                        sounding[part.name][param] = unpacked[iprm::nparms]
+                elif part.data_type == DataTypes.character:
+                    for iprm, param in enumerate(parameters['name']):
+                        sounding[part.name][param] = (
+                            self._decode_strip(packed_buffer[iprm])
+                        )
+                else:
+                    for iprm, param in enumerate(parameters['name']):
+                        sounding[part.name][param] = (
+                            np.array(packed_buffer[iprm::nparms], dtype=np.float32)
+                        )
 
-                soundings.append(self._merge_sounding(sounding))
+            soundings.append(self._merge_sounding(sounding))
         return soundings
 
     def _merge_significant_temps(self, merged, parts, section, pbot):
@@ -2177,7 +2154,7 @@ class GempakSounding(GempakFile):
             country = [c.upper() for c in country]
 
         # Figure out which columns to extract from the file
-        matched = self._sninfo.copy()
+        matched = sorted(self._sninfo)
 
         if station_id is not None:
             matched = filter(
@@ -2216,10 +2193,7 @@ class GempakSounding(GempakFile):
 
         sndno = [(s.DTNO, s.SNDNO) for s in matched]
 
-        if self.merged:
-            data = self._unpack_merged(sndno)
-        else:
-            data = self._unpack_unmerged(sndno)
+        data = self._unpack_merged(sndno) if self.merged else self._unpack_unmerged(sndno)
 
         soundings = []
         for snd in data:
@@ -2299,7 +2273,7 @@ class GempakSurface(GempakFile):
 
         self._get_surface_type()
 
-        self._sfinfo = []
+        self._sfinfo = set()
         if self.surface_type == 'standard':
             for irow, row_head in enumerate(self.row_headers):
                 for icol, col_head in enumerate(self.column_headers):
@@ -2312,7 +2286,7 @@ class GempakSurface(GempakFile):
                         data_ptr = self._buffer.read_int(4, self.endian, False)
 
                         if data_ptr:
-                            self._sfinfo.append(
+                            self._sfinfo.add(
                                 Surface(
                                     irow,
                                     icol,
@@ -2338,7 +2312,7 @@ class GempakSurface(GempakFile):
                     data_ptr = self._buffer.read_int(4, self.endian, False)
 
                     if data_ptr:
-                        self._sfinfo.append(
+                        self._sfinfo.add(
                             Surface(
                                 irow,
                                 icol,
@@ -2364,7 +2338,7 @@ class GempakSurface(GempakFile):
                         data_ptr = self._buffer.read_int(4, self.endian, False)
 
                         if data_ptr:
-                            self._sfinfo.append(
+                            self._sfinfo.add(
                                 Surface(
                                     irow,
                                     icol,
@@ -2383,15 +2357,22 @@ class GempakSurface(GempakFile):
 
     def sfinfo(self):
         """Return station information."""
-        return self._sfinfo
+        return sorted(self._sfinfo)
 
     def _get_surface_type(self):
-        """Determine type of surface file."""
-        if len(self.row_headers) == 1:
+        """Determine type of surface file.
+
+        Notes
+        -----
+        See GEMPAK SFLIB documentation for type definitions.
+        """
+        if (len(self.row_headers) == 1
+           and 'DATE' in self.column_keys
+           and 'STID' in self.column_keys):
             self.surface_type = 'ship'
-        elif 'DATE' in self.row_keys:
+        elif 'DATE' in self.row_keys and 'STID' in self.column_keys:
             self.surface_type = 'standard'
-        elif 'DATE' in self.column_keys:
+        elif 'DATE' in self.column_keys and 'STID' in self.row_keys:
             self.surface_type = 'climate'
         else:
             raise TypeError('Unknown surface data type')
@@ -2414,92 +2395,91 @@ class GempakSurface(GempakFile):
     def _unpack_climate(self, sfcno):
         """Unpack a climate surface data file."""
         stations = []
-        for icol, col_head in enumerate(self.column_headers):
-            for irow, row_head in enumerate(self.row_headers):
-                if (irow, icol) not in sfcno:
+        for irow, icol in sfcno:
+            col_head = self.column_headers[icol]
+            row_head = self.row_headers[irow]
+            station = {
+                'STID': row_head.STID,
+                'STNM': row_head.STNM,
+                'SLAT': row_head.SLAT,
+                'SLON': row_head.SLON,
+                'SELV': row_head.SELV,
+                'STAT': row_head.STAT,
+                'COUN': row_head.COUN,
+                'STD2': row_head.STD2,
+                'SPRI': row_head.SPRI,
+                'DATE': col_head.DATE,
+                'TIME': col_head.TIME,
+            }
+            for iprt, part in enumerate(self.parts):
+                pointer = (self.prod_desc.data_block_ptr
+                           + (irow * self.prod_desc.columns * self.prod_desc.parts)
+                           + (icol * self.prod_desc.parts + iprt))
+                self._buffer.jump_to(self._start, _word_to_position(pointer))
+                self.data_ptr = self._buffer.read_int(4, self.endian, False)
+                if not self.data_ptr:
                     continue
-                station = {'STID': row_head.STID,
-                           'STNM': row_head.STNM,
-                           'SLAT': row_head.SLAT,
-                           'SLON': row_head.SLON,
-                           'SELV': row_head.SELV,
-                           'STAT': row_head.STAT,
-                           'COUN': row_head.COUN,
-                           'STD2': row_head.STD2,
-                           'SPRI': row_head.SPRI,
-                           'DATE': col_head.DATE,
-                           'TIME': col_head.TIME,
-                           }
-                for iprt, part in enumerate(self.parts):
-                    pointer = (self.prod_desc.data_block_ptr
-                               + (irow * self.prod_desc.columns * self.prod_desc.parts)
-                               + (icol * self.prod_desc.parts + iprt))
-                    self._buffer.jump_to(self._start, _word_to_position(pointer))
-                    self.data_ptr = self._buffer.read_int(4, self.endian, False)
-                    if not self.data_ptr:
-                        continue
-                    self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
-                    self.data_header_length = self._buffer.read_int(4, self.endian, False)
-                    data_header = self._buffer.set_mark()
-                    self._buffer.jump_to(data_header,
-                                         _word_to_position(part.header_length + 1))
-                    lendat = self.data_header_length - part.header_length
-
-                    fmt_code = {
-                        DataTypes.real: 'f',
-                        DataTypes.realpack: 'i',
-                        DataTypes.character: 's',
-                    }.get(part.data_type)
-
-                    if fmt_code is None:
-                        raise NotImplementedError(f'No methods for data type {part.data_type}')
-
-                    if fmt_code == 's':
-                        lendat *= BYTES_PER_WORD
-
-                    packed_buffer = (
-                        self._buffer.read_struct(
-                            struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
-                        )
+                self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
+                self.data_header_length = self._buffer.read_int(4, self.endian, False)
+                data_header = self._buffer.set_mark()
+                self._buffer.jump_to(data_header,
+                                     _word_to_position(part.header_length + 1))
+                lendat = self.data_header_length - part.header_length
+
+                fmt_code = {
+                    DataTypes.real: 'f',
+                    DataTypes.realpack: 'i',
+                    DataTypes.character: 's',
+                }.get(part.data_type)
+
+                if fmt_code is None:
+                    raise NotImplementedError(f'No methods for data type {part.data_type}')
+
+                if fmt_code == 's':
+                    lendat *= BYTES_PER_WORD
+
+                packed_buffer = (
+                    self._buffer.read_struct(
+                        struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
                     )
+                )
 
-                    parameters = self.parameters[iprt]
+                parameters = self.parameters[iprt]
 
-                    if part.data_type == DataTypes.realpack:
-                        unpacked = self._unpack_real(packed_buffer, parameters, lendat)
-                        for iprm, param in enumerate(parameters['name']):
-                            station[param] = unpacked[iprm]
-                    elif part.data_type == DataTypes.character:
-                        for iprm, param in enumerate(parameters['name']):
-                            station[param] = self._decode_strip(packed_buffer[iprm])
-                    else:
-                        for iprm, param in enumerate(parameters['name']):
-                            station[param] = np.array(
-                                packed_buffer[iprm], dtype=np.float32
-                            )
+                if part.data_type == DataTypes.realpack:
+                    unpacked = self._unpack_real(packed_buffer, parameters, lendat)
+                    for iprm, param in enumerate(parameters['name']):
+                        station[param] = unpacked[iprm]
+                elif part.data_type == DataTypes.character:
+                    for iprm, param in enumerate(parameters['name']):
+                        station[param] = self._decode_strip(packed_buffer[iprm])
+                else:
+                    for iprm, param in enumerate(parameters['name']):
+                        station[param] = np.array(
+                            packed_buffer[iprm], dtype=np.float32
+                        )
 
-                stations.append(station)
+            stations.append(station)
         return stations
 
     def _unpack_ship(self, sfcno):
         """Unpack ship (moving observation) surface data file."""
         stations = []
-        irow = 0
-        for icol, col_head in enumerate(self.column_headers):
-            if (irow, icol) not in sfcno:
-                continue
-            station = {'STID': col_head.STID,
-                       'STNM': col_head.STNM,
-                       'SLAT': col_head.SLAT,
-                       'SLON': col_head.SLON,
-                       'SELV': col_head.SELV,
-                       'STAT': col_head.STAT,
-                       'COUN': col_head.COUN,
-                       'STD2': col_head.STD2,
-                       'SPRI': col_head.SPRI,
-                       'DATE': col_head.DATE,
-                       'TIME': col_head.TIME,
-                       }
+        for irow, icol in sfcno:  # irow should always be zero
+            col_head = self.column_headers[icol]
+            station = {
+                'STID': col_head.STID,
+                'STNM': col_head.STNM,
+                'SLAT': col_head.SLAT,
+                'SLON': col_head.SLON,
+                'SELV': col_head.SELV,
+                'STAT': col_head.STAT,
+                'COUN': col_head.COUN,
+                'STD2': col_head.STD2,
+                'SPRI': col_head.SPRI,
+                'DATE': col_head.DATE,
+                'TIME': col_head.TIME,
+            }
             for iprt, part in enumerate(self.parts):
                 pointer = (self.prod_desc.data_block_ptr
                            + (irow * self.prod_desc.columns * self.prod_desc.parts)
@@ -2554,69 +2534,69 @@ class GempakSurface(GempakFile):
     def _unpack_standard(self, sfcno):
         """Unpack a standard surface data file."""
         stations = []
-        for irow, row_head in enumerate(self.row_headers):
-            for icol, col_head in enumerate(self.column_headers):
-                if (irow, icol) not in sfcno:
+        for irow, icol in sfcno:
+            row_head = self.row_headers[irow]
+            col_head = self.column_headers[icol]
+            station = {
+                'STID': col_head.STID,
+                'STNM': col_head.STNM,
+                'SLAT': col_head.SLAT,
+                'SLON': col_head.SLON,
+                'SELV': col_head.SELV,
+                'STAT': col_head.STAT,
+                'COUN': col_head.COUN,
+                'STD2': col_head.STD2,
+                'SPRI': col_head.SPRI,
+                'DATE': row_head.DATE,
+                'TIME': row_head.TIME,
+            }
+            for iprt, part in enumerate(self.parts):
+                pointer = (self.prod_desc.data_block_ptr
+                           + (irow * self.prod_desc.columns * self.prod_desc.parts)
+                           + (icol * self.prod_desc.parts + iprt))
+                self._buffer.jump_to(self._start, _word_to_position(pointer))
+                self.data_ptr = self._buffer.read_int(4, self.endian, False)
+                if not self.data_ptr:
                     continue
-                station = {'STID': col_head.STID,
-                           'STNM': col_head.STNM,
-                           'SLAT': col_head.SLAT,
-                           'SLON': col_head.SLON,
-                           'SELV': col_head.SELV,
-                           'STAT': col_head.STAT,
-                           'COUN': col_head.COUN,
-                           'STD2': col_head.STD2,
-                           'SPRI': col_head.SPRI,
-                           'DATE': row_head.DATE,
-                           'TIME': row_head.TIME,
-                           }
-                for iprt, part in enumerate(self.parts):
-                    pointer = (self.prod_desc.data_block_ptr
-                               + (irow * self.prod_desc.columns * self.prod_desc.parts)
-                               + (icol * self.prod_desc.parts + iprt))
-                    self._buffer.jump_to(self._start, _word_to_position(pointer))
-                    self.data_ptr = self._buffer.read_int(4, self.endian, False)
-                    if not self.data_ptr:
-                        continue
-                    self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
-                    self.data_header_length = self._buffer.read_int(4, self.endian, False)
-                    data_header = self._buffer.set_mark()
-                    self._buffer.jump_to(data_header,
-                                         _word_to_position(part.header_length + 1))
-                    lendat = self.data_header_length - part.header_length
-
-                    fmt_code = {
-                        DataTypes.real: 'f',
-                        DataTypes.realpack: 'i',
-                        DataTypes.character: 's',
-                    }.get(part.data_type)
-
-                    if fmt_code is None:
-                        raise NotImplementedError(f'No methods for data type {part.data_type}')
-
-                    if fmt_code == 's':
-                        lendat *= BYTES_PER_WORD
-
-                    packed_buffer = (
-                        self._buffer.read_struct(
-                            struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
-                        )
+                self._buffer.jump_to(self._start, _word_to_position(self.data_ptr))
+                self.data_header_length = self._buffer.read_int(4, self.endian, False)
+                data_header = self._buffer.set_mark()
+                self._buffer.jump_to(data_header,
+                                     _word_to_position(part.header_length + 1))
+                lendat = self.data_header_length - part.header_length
+
+                fmt_code = {
+                    DataTypes.real: 'f',
+                    DataTypes.realpack: 'i',
+                    DataTypes.character: 's',
+                }.get(part.data_type)
+
+                if fmt_code is None:
+                    raise NotImplementedError(f'No methods for data type {part.data_type}')
+
+                if fmt_code == 's':
+                    lendat *= BYTES_PER_WORD
+
+                packed_buffer = (
+                    self._buffer.read_struct(
+                        struct.Struct(f'{self.prefmt}{lendat}{fmt_code}')
                     )
+                )
 
-                    parameters = self.parameters[iprt]
+                parameters = self.parameters[iprt]
 
-                    if part.data_type == DataTypes.realpack:
-                        unpacked = self._unpack_real(packed_buffer, parameters, lendat)
-                        for iprm, param in enumerate(parameters['name']):
-                            station[param] = unpacked[iprm]
-                    elif part.data_type == DataTypes.character:
-                        for iprm, param in enumerate(parameters['name']):
-                            station[param] = self._decode_strip(packed_buffer[iprm])
-                    else:
-                        for iprm, param in enumerate(parameters['name']):
-                            station[param] = packed_buffer[iprm]
+                if part.data_type == DataTypes.realpack:
+                    unpacked = self._unpack_real(packed_buffer, parameters, lendat)
+                    for iprm, param in enumerate(parameters['name']):
+                        station[param] = unpacked[iprm]
+                elif part.data_type == DataTypes.character:
+                    for iprm, param in enumerate(parameters['name']):
+                        station[param] = self._decode_strip(packed_buffer[iprm])
+                else:
+                    for iprm, param in enumerate(parameters['name']):
+                        station[param] = packed_buffer[iprm]
 
-                stations.append(station)
+            stations.append(station)
         return stations
 
     @staticmethod
@@ -2793,7 +2773,7 @@ class GempakSurface(GempakFile):
             country = [c.upper() for c in country]
 
         # Figure out which columns to extract from the file
-        matched = self._sfinfo.copy()
+        matched = sorted(self._sfinfo)
 
         if station_id is not None:
             matched = filter(


=====================================
src/metpy/io/gini.py
=====================================
@@ -18,7 +18,6 @@ from xarray.backends import BackendEntrypoint
 from xarray.backends.common import AbstractDataStore
 from xarray.coding.times import CFDatetimeCoder
 from xarray.coding.variables import CFMaskCoder
-from xarray.core.utils import FrozenDict
 
 from ._tools import Bits, IOBuffer, NamedStruct, open_as_needed, zlib_decompress_all_frames
 from ..package_tools import Exporter
@@ -368,7 +367,7 @@ class GiniFile(AbstractDataStore):
         variables.extend(self._make_coord_vars())
         variables.extend(self._make_data_vars())
 
-        return FrozenDict(variables)
+        return dict(variables)
 
     def get_attrs(self):
         """Get the global attributes.
@@ -376,8 +375,8 @@ class GiniFile(AbstractDataStore):
         This is used by `xarray.open_dataset`.
 
         """
-        return FrozenDict(satellite=self.prod_desc.creating_entity,
-                          sector=self.prod_desc.sector_id)
+        return {'satellite': self.prod_desc.creating_entity,
+                'sector': self.prod_desc.sector_id}
 
 
 class GiniXarrayBackend(BackendEntrypoint):


=====================================
src/metpy/io/metar.py
=====================================
@@ -348,7 +348,8 @@ def parse_metar_file(filename, *, year=None, month=None):
     ----------
     filename : str or file-like object
         If str, the name of the file to be opened. If `filename` is a file-like object,
-        this will be read from directly.
+        this will be read from directly and needs to be opened in text mode (i.e. ``read()``
+        needs to return a string, not bytes).
     year : int, optional
         Year in which observation was taken, defaults to current year. Keyword-only argument.
     month : int, optional


=====================================
src/metpy/io/nexrad.py
=====================================
@@ -545,12 +545,17 @@ class Level2File:
                 attr = f'VCPAT{num}'
                 dat = self.rda[attr]
                 vcp_hdr = self.vcp_fmt.unpack_from(dat, 0)
-                off = self.vcp_fmt.size
-                els = []
-                for _ in range(vcp_hdr.num_el_cuts):
-                    els.append(self.vcp_el_fmt.unpack_from(dat, off))
-                    off += self.vcp_el_fmt.size
-                self.rda[attr] = vcp_hdr._replace(els=els)
+                # At some point these got changed to spares, so only try to parse the rest if
+                # it looks like the right data.
+                if vcp_hdr.num == num and 0 < 2 * vcp_hdr.size_hw <= len(dat):
+                    off = self.vcp_fmt.size
+                    els = []
+                    for _ in range(vcp_hdr.num_el_cuts):
+                        els.append(self.vcp_el_fmt.unpack_from(dat, off))
+                        off += self.vcp_el_fmt.size
+                    self.rda[attr] = vcp_hdr._replace(els=els)
+                else:  # Otherwise this is just spare and we should dump
+                    self.rda.pop(attr)
 
     msg31_data_hdr_fmt = NamedStruct([('stid', '4s'), ('time_ms', 'L'),
                                       ('date', 'H'), ('az_num', 'H'),
@@ -717,15 +722,8 @@ def float16(val):
     exp = (val >> 10) & 0x1F
     sign = val >> 15
 
-    if exp:
-        value = 2 ** (exp - 16) * (1 + float(frac) / 2**10)
-    else:
-        value = float(frac) / 2**9
-
-    if sign:
-        value *= -1
-
-    return value
+    value = 2 ** (exp - 16) * (1 + float(frac) / 2**10) if exp else float(frac) / 2**9
+    return -value if sign else value
 
 
 def float32(short1, short2):
@@ -1850,10 +1848,10 @@ class Level3File:
         log.debug('Symbology block info: %s', blk)
 
         self.sym_block = []
-        assert blk.divider == -1, ('Bad divider for symbology block: {:d} should be -1'
-                                   .format(blk.divider))
-        assert blk.block_id == 1, ('Bad block ID for symbology block: {:d} should be 1'
-                                   .format(blk.block_id))
+        assert blk.divider == -1, (f'Bad divider for symbology block: {blk.divider} should '
+                                   'be -1')
+        assert blk.block_id == 1, (f'Bad block ID for symbology block: {blk.block_id} should '
+                                   'be 1')
         for _ in range(blk.nlayer):
             layer_hdr = self._buffer.read_struct(self.sym_layer_fmt)
             assert layer_hdr.divider == -1
@@ -1874,10 +1872,10 @@ class Level3File:
     def _unpack_graphblock(self, start, offset):
         self._buffer.jump_to(start, offset)
         hdr = self._buffer.read_struct(self.graph_block_fmt)
-        assert hdr.divider == -1, ('Bad divider for graphical block: {:d} should be -1'
-                                   .format(hdr.divider))
-        assert hdr.block_id == 2, ('Bad block ID for graphical block: {:d} should be 1'
-                                   .format(hdr.block_id))
+        assert hdr.divider == -1, (f'Bad divider for graphical block: {hdr.divider} should '
+                                   f'be -1')
+        assert hdr.block_id == 2, (f'Bad block ID for graphical block: {hdr.block_id} should '
+                                   'be 1')
         self.graph_pages = []
         for page in range(hdr.num_pages):
             page_num = self._buffer.read_int(2, 'big', signed=False)


=====================================
src/metpy/plots/declarative.py
=====================================
@@ -1823,13 +1823,8 @@ class PlotGeometry(MetPyHasTraits):
         """
         color = proposal['value']
 
-        if isinstance(color, str):
-            color = [color]
         # `color` must be a collection if it is not a string
-        else:
-            color = list(color)
-
-        return color
+        return [color] if isinstance(color, str) else list(color)
 
     @staticmethod
     @validate('labels')
@@ -1877,10 +1872,7 @@ class PlotGeometry(MetPyHasTraits):
             geo_obj = geo_obj.geoms[label_hash % len(geo_obj.geoms)]
 
         # Get the list of coordinates of the polygon/line/point
-        if isinstance(geo_obj, Polygon):
-            coords = geo_obj.exterior.coords
-        else:
-            coords = geo_obj.coords
+        coords = geo_obj.exterior.coords if isinstance(geo_obj, Polygon) else geo_obj.coords
 
         return coords[label_hash % len(coords)]
 
@@ -1990,10 +1982,7 @@ class PlotGeometry(MetPyHasTraits):
 
                 # If polygon, put label directly on edge of polygon. If line or point, put
                 # label slightly below line/point.
-                if isinstance(geo_obj, (MultiPolygon, Polygon)):
-                    offset = (0, 0)
-                else:
-                    offset = (0, -12)
+                offset = (0, 0) if isinstance(geo_obj, (MultiPolygon, Polygon)) else (0, -12)
 
                 # Finally, draw the label
                 self._draw_label(label, lon, lat, fontcolor, fontoutline, offset)


=====================================
src/metpy/static-data-manifest.txt
=====================================
@@ -6,6 +6,7 @@ GFS_global.nc feae73f72340ee9e8b04fca92f182638a99316ad7262152809b7ccb9b6691e10
 GFS_test.nc b69ae13179428667f6bc14dada1d5f9af4d3737b2b76a79a6001664e1525df3c
 HI-REGIONAL_4km_3.9_20160616_1715.gini 30896dda51c9f933027d8086f0a86543efce12ea90a52981eccbce2e0ce1529e
 KICX_20170712_1458 94bd4f795832f056f7489a5562acf76de9a9cab1694549562cc3154abb22527c
+KJKL_20240227_102059 9344358cb53f2f9449b5a984d40a7dee8048fe6ecbb7fe219ab2420f8ee18776
 KLTX20050329_100015.gz cad6ad8df707ad63c9ddb7306de869186241dd821517daf21ae0a80f4ce0a58d
 KTLX19990503_235621.gz 7a097251bb7a15dbcdec75812812e41a86c5eb9850f55c3d91d120c2c61e046e
 KTLX20130520_201643_V06.gz 772e01b154a5c966982a6d0aa2fc78bc64f08a9b77165b74dc02d7aa5aa69275
@@ -34,13 +35,20 @@ cubic_test.npz 8b9d01c2177a057b3352bb6c5b775dae6b796d37ba35b4775fcb65300dc06ccf
 dec9_sounding.txt 4f60955bee4a59e2da0c225d778b9a04a149e9a17b4dce6bfefc111240b3b165
 gem_azimuthal.grd 2b1cb2a358135035dbf6926f0e8ba8122f8b3a0477da5183943a1299ee602093
 gem_azimuthal.npz 57d9f69cfa08893ae2d388bda7fb15a1b306ba205e3f15b9e40e963e7db79b0f
+gem_big_endian.grd cf2828181311677081e86253e2b7eb5778450ee92cf6b8161dc0635687b1fd9d
+gem_climate.csv 24c562fac9d5b81c08092ce5cf5522b57c74a63a184864e211016110fbc983c6
+gem_climate.sfc 92f7fc278e16a9f2fbdb054b6c2a0f9f2aea420658841687e34ad2ad9a6552a7
 gem_conical.grd 0184d05a596d623171135fc3243060aef69da8601c5ef8448bb6f39914474c3d
 gem_conical.npz fd77f4e4fb2884087bc022c957277b4ec11bb21a35c9d3e261b64df425b1da28
 gem_cylindrical.grd 1b2ee56e4ee50d8d3012aa319f24a7c09e092db8e8a5a8f58e2d857a8fd1520f
 gem_cylindrical.npz 68ea33a58b24b651254dd4d2c935c33fe4f414ffd404296c9098b6404a37e408
+gem_little_endian.grd 12c5c094c6505697c89426f362aa9bd6ba7de12f25c690ea73fb6d0159925536
+gem_merged_nopack.csv 57ac3d9dc6b2461f0d0b8739452dceaa88f11fc8d55f75d8f05ed0981af0bf97
+gem_merged_nopack.snd 141a1122e9638b0dde5d2ec424c03748d93577b84f10bbe7c29d9d3228200a88
 gem_model_mrg.csv 304afb3bb4b662f7adfc8803e8661fc217a691d0b1790d5e0e6d085f198c4257
 gem_model_mrg.snd 6d229a7af8d3e86d8da02403023cdaf0f86b6c982eb8b6f7f84c3836811df014
 gem_multi_time.grd a11746f162f2ea3944573246a477db94026f76e72ce7ecb98bd08c8475e2ca3f
+gem_multilevel_multidate.grd a5642ad733e88e9512d979b7c95049e9572591622d9ac94645622c1723742909
 gem_packing_dec.grd 547cda1ffb5e143623a33afce76a6cfa8c3c543eed04cf388f3005ccf1ba677d
 gem_packing_dec.npz b4b3b838b245c0e0d0d803c049a9169c34fe1c09f267d98df5a0d0586297d18d
 gem_packing_diff.grd 35de37fb378fe1c8cb1be56962ab3496ee079a3c6df4ef838cf1e2e8fd86da3f


=====================================
src/metpy/units.py
=====================================
@@ -82,6 +82,7 @@ def setup_registry(reg):
                '= degreeN')
     reg.define('degrees_east = degree = degrees_E = degreesE = degree_east = degree_E '
                '= degreeE')
+    reg.define('dBz = 1e-18 m^3; logbase: 10; logfactor: 10 = dBZ')
 
     # Alias geopotential meters (gpm) to just meters
     reg.define('@alias meter = gpm')


=====================================
src/metpy/xarray.py
=====================================
@@ -115,10 +115,10 @@ class MetPyDataArrayAccessor:
         >>> temperature = xr.DataArray([[0, 1], [2, 3]] * units.degC, dims=('lat', 'lon'),
         ...                            coords={'lat': [40, 41], 'lon': [-105, -104]})
         >>> temperature.metpy.x
-        <xarray.DataArray 'lon' (lon: 2)>
+        <xarray.DataArray 'lon' (lon: 2)> Size: 16B
         array([-105, -104])
         Coordinates:
-          * lon      (lon) int64 -105 -104
+          * lon      (lon) int64 16B -105 -104
         Attributes:
             _metpy_axis:  x,longitude
 
@@ -338,15 +338,16 @@ class MetPyDataArrayAccessor:
     def _generate_coordinate_map(self):
         """Generate a coordinate map via CF conventions and other methods."""
         coords = self._data_array.coords.values()
-        # Parse all the coordinates, attempting to identify x, longitude, y, latitude,
-        # vertical, time
-        coord_lists = {'time': [], 'vertical': [], 'y': [], 'latitude': [], 'x': [],
-                       'longitude': []}
+        # Parse all the coordinates, attempting to identify longitude, latitude, x, y,
+        # time, vertical, in that order.
+        coord_lists = {'longitude': [], 'latitude': [], 'x': [], 'y': [], 'time': [],
+                       'vertical': []}
         for coord_var in coords:
             # Identify the coordinate type using check_axis helper
             for axis in coord_lists:
                 if check_axis(coord_var, axis):
                     coord_lists[axis].append(coord_var)
+                    break  # Ensure a coordinate variable only goes to one axis
 
         # Fill in x/y with longitude/latitude if x/y not otherwise present
         for geometric, graticule in (('y', 'latitude'), ('x', 'longitude')):


=====================================
tests/calc/test_basic.py
=====================================
@@ -104,7 +104,7 @@ def test_direction_with_north_and_calm(array_type):
 def test_direction_dimensions():
     """Verify wind_direction returns degrees."""
     d = wind_direction(3. * units('m/s'), 4. * units('m/s'))
-    assert str(d.units) == 'degree'
+    assert d.units == units('degree')
 
 
 def test_oceanographic_direction(array_type):


=====================================
tests/calc/test_calc_tools.py
=====================================
@@ -933,6 +933,14 @@ def test_angle_to_direction_level_1():
     assert_array_equal(output_dirs, expected_dirs)
 
 
+def test_angle_to_direction_ndarray():
+    """Test array of angles in degree with a 2d numpy array."""
+    expected_dirs = np.array([['E', 'W'], ['E', 'W']])
+    input_angle = np.array([[90, 270], [90, 270]])
+    output_dirs = angle_to_direction(input_angle, level=1)
+    assert_array_equal(output_dirs, expected_dirs)
+
+
 def test_azimuth_range_to_lat_lon():
     """Test conversion of azimuth and range to lat/lon grid."""
     az = [332.2403, 334.6765, 337.2528, 339.73846, 342.26257]


=====================================
tests/calc/test_thermo.py
=====================================
@@ -1747,14 +1747,14 @@ def test_mixing_ratio_dimensions():
     """Verify mixing ratio returns a dimensionless number."""
     p = 998. * units.mbar
     e = 73.75 * units.hPa
-    assert str(mixing_ratio(e, p).units) == 'dimensionless'
+    assert mixing_ratio(e, p).units == units('dimensionless')
 
 
 def test_saturation_mixing_ratio_dimensions():
     """Verify saturation mixing ratio returns a dimensionless number."""
     p = 998. * units.mbar
     temp = 20 * units.celsius
-    assert str(saturation_mixing_ratio(p, temp).units) == 'dimensionless'
+    assert saturation_mixing_ratio(p, temp).units == units('dimensionless')
 
 
 def test_mixing_ratio_from_rh_dimensions():
@@ -1762,8 +1762,8 @@ def test_mixing_ratio_from_rh_dimensions():
     p = 1000. * units.mbar
     temperature = 0. * units.degC
     rh = 100. * units.percent
-    assert (str(mixing_ratio_from_relative_humidity(p, temperature, rh).units)
-            == 'dimensionless')
+    assert (mixing_ratio_from_relative_humidity(p, temperature, rh).units
+            == units('dimensionless'))
 
 
 @pytest.fixture
@@ -1936,9 +1936,9 @@ def test_dewpoint_specific_humidity_old_signature():
     p = 1013.25 * units.mbar
     temperature = 20. * units.degC
     q = 0.012 * units.dimensionless
-    with pytest.deprecated_call(match='Temperature argument'):
-        with pytest.raises(ValueError, match='changed in version'):
-            dewpoint_from_specific_humidity(q, temperature, p)
+    with (pytest.deprecated_call(match='Temperature argument'),
+          pytest.raises(ValueError, match='changed in version')):
+        dewpoint_from_specific_humidity(q, temperature, p)
 
 
 def test_dewpoint_specific_humidity_kwargs():


=====================================
tests/io/test_gempak.py
=====================================
@@ -7,7 +7,7 @@ from datetime import datetime
 import logging
 
 import numpy as np
-from numpy.testing import assert_allclose, assert_almost_equal
+from numpy.testing import assert_allclose, assert_almost_equal, assert_equal
 import pandas as pd
 import pytest
 
@@ -17,6 +17,18 @@ from metpy.io.gempak import GempakGrid, GempakSounding, GempakSurface
 logging.getLogger('metpy.io.gempak').setLevel(logging.ERROR)
 
 
+ at pytest.mark.parametrize('order', ['little', 'big'])
+def test_byte_swap(order):
+    """"Test byte swapping."""
+    g = get_test_data(f'gem_{order}_endian.grd')
+
+    grid = GempakGrid(g).gdxarray()[0].squeeze()
+
+    reference = np.ones((113, 151), dtype='int32')
+
+    assert_equal(grid, reference)
+
+
 @pytest.mark.parametrize('grid_name', ['none', 'diff', 'dec', 'grib'])
 def test_grid_loading(grid_name):
     """Test reading grids with different packing."""
@@ -94,6 +106,35 @@ def test_merged_sounding():
     np.testing.assert_allclose(gdtar, ddtar, rtol=1e-10, atol=1e-2)
 
 
+def test_merged_sounding_no_packing():
+    """Test loading a merged sounding without data packing."""
+    gso = GempakSounding(get_test_data('gem_merged_nopack.snd')).snxarray(
+        station_id='OUN')
+
+    gpres = gso[0].pressure.values
+    gtemp = gso[0].temp.values.squeeze()
+    gdwpt = gso[0].dwpt.values.squeeze()
+    gdrct = gso[0].drct.values.squeeze()
+    gsped = gso[0].sped.values.squeeze()
+    ghght = gso[0].hght.values.squeeze()
+
+    gempak = pd.read_csv(get_test_data('gem_merged_nopack.csv', as_file_obj=False),
+                         na_values=-9999)
+    dpres = gempak.PRES.values
+    dtemp = gempak.TEMP.values
+    ddwpt = gempak.DWPT.values
+    ddrct = gempak.DRCT.values
+    dsped = gempak.SPED.values
+    dhght = gempak.HGHT.values
+
+    assert_allclose(gpres, dpres, rtol=1e-10, atol=1e-2)
+    assert_allclose(gtemp, dtemp, rtol=1e-10, atol=1e-2)
+    assert_allclose(gdwpt, ddwpt, rtol=1e-10, atol=1e-2)
+    assert_allclose(gdrct, ddrct, rtol=1e-10, atol=1e-2)
+    assert_allclose(gsped, dsped, rtol=1e-10, atol=1e-2)
+    assert_allclose(ghght, dhght, rtol=1e-10, atol=1e-1)
+
+
 @pytest.mark.parametrize('gem,gio,station', [
     ('gem_sigw_hght_unmrg.csv', 'gem_sigw_hght_unmrg.snd', 'TOP'),
     ('gem_sigw_pres_unmrg.csv', 'gem_sigw_pres_unmrg.snd', 'WAML')
@@ -160,6 +201,24 @@ def test_unmerged_sigw_pressure_sounding():
     assert_allclose(ghght, dhght, rtol=1e-10, atol=1e-1)
 
 
+def test_climate_surface():
+    """Test to read a cliamte surface file."""
+    gsf = GempakSurface(get_test_data('gem_climate.sfc'))
+    gstns = gsf.sfjson()
+
+    gempak = pd.read_csv(get_test_data('gem_climate.csv', as_file_obj=False))
+    gempak['YYMMDD/HHMM'] = pd.to_datetime(gempak['YYMMDD/HHMM'], format='%y%m%d/%H%M')
+    gempak = gempak.set_index(['STN', 'YYMMDD/HHMM'])
+
+    for stn in gstns:
+        idx_key = (stn['properties']['station_id'],
+                   stn['properties']['date_time'])
+        gemsfc = gempak.loc[idx_key, :]
+
+        for param, val in stn['values'].items():
+            assert val == pytest.approx(gemsfc[param.upper()])
+
+
 def test_standard_surface():
     """Test to read a standard surface file."""
     skip = ['text', 'spcl']
@@ -261,6 +320,20 @@ def test_date_parsing():
     assert dat == datetime(2000, 1, 2)
 
 
+ at pytest.mark.parametrize('access_type', ['STID', 'STNM'])
+def test_surface_access(access_type):
+    """Test for proper surface retrieval with multi-parameter filter."""
+    g = get_test_data('gem_surface_with_text.sfc')
+    gsf = GempakSurface(g)
+
+    if access_type == 'STID':
+        gsf.sfjson(station_id='MSN', country='US', state='WI',
+                   date_time='202109070000')
+    elif access_type == 'STNM':
+        gsf.sfjson(station_number=726410, country='US', state='WI',
+                   date_time='202109070000')
+
+
 @pytest.mark.parametrize('text_type,date_time', [
     ('text', '202109070000'), ('spcl', '202109071600')
 ])
@@ -276,6 +349,20 @@ def test_surface_text(text_type, date_time):
     assert text == gem_text
 
 
+ at pytest.mark.parametrize('access_type', ['STID', 'STNM'])
+def test_sounding_access(access_type):
+    """Test for proper sounding retrieval with multi-parameter filter."""
+    g = get_test_data('gem_merged_nopack.snd')
+    gso = GempakSounding(g)
+
+    if access_type == 'STID':
+        gso.snxarray(station_id='OUN', country='US', state='OK',
+                     date_time='202101200000')
+    elif access_type == 'STNM':
+        gso.snxarray(station_number=72357, country='US', state='OK',
+                     date_time='202101200000')
+
+
 @pytest.mark.parametrize('text_type', ['txta', 'txtb', 'txtc', 'txpb'])
 def test_sounding_text(text_type):
     """Test for proper decoding of coded message text."""
@@ -313,6 +400,22 @@ def test_special_surface_observation():
     assert stn['vsby'] == 2
 
 
+def test_multi_level_multi_time_access():
+    """Test accessing data with multiple levels and times."""
+    g = get_test_data('gem_multilevel_multidate.grd')
+
+    grid = GempakGrid(g)
+
+    grid.gdxarray(
+        parameter='STPC',
+        date_time='202403040000',
+        coordinate='HGHT',
+        level=0,
+        date_time2='202403050000',
+        level2=1
+    )
+
+
 def test_multi_time_grid():
     """Test files with multiple times on a single grid."""
     g = get_test_data('gem_multi_time.grd')


=====================================
tests/io/test_nexrad.py
=====================================
@@ -109,6 +109,12 @@ def test_msg15():
     assert f.clutter_filter_map['datetime'] == datetime(2013, 5, 19, 5, 15, 0, 0)
 
 
+def test_msg18_novcps():
+    """Check handling of message type 18 with VCP info now spares does not crash."""
+    f = Level2File(get_test_data('KJKL_20240227_102059', as_file_obj=False))
+    assert 'VCPAT11' not in f.rda
+
+
 def test_single_chunk(caplog):
     """Check that Level2File copes with reading a file containing a single chunk."""
     # Need to override the test level set above


=====================================
tests/test_xarray.py
=====================================
@@ -273,6 +273,14 @@ def test_missing_grid_mapping_invalid(test_var_multidim_no_xy):
     assert 'metpy_crs' not in data_var.coords
 
 
+def test_xy_not_vertical(test_ds):
+    """Test not detecting x/y as a vertical coordinate based on metadata."""
+    test_ds.x.attrs['positive'] = 'up'
+    test_ds.y.attrs['positive'] = 'up'
+    data_var = test_ds.metpy.parse_cf('Temperature')
+    assert data_var.metpy.vertical.identical(data_var.coords['isobaric'])
+
+
 def test_missing_grid_mapping_var(caplog):
     """Test behavior when we can't find the variable pointed to by grid_mapping."""
     x = xr.DataArray(np.arange(3),
@@ -407,10 +415,10 @@ def test_resolve_axis_conflict_double_lonlat(test_ds_generic):
     test_ds_generic['d'].attrs['_CoordinateAxisType'] = 'Lat'
     test_ds_generic['e'].attrs['_CoordinateAxisType'] = 'Lon'
 
-    with pytest.warns(UserWarning, match='More than one x coordinate'),\
+    with pytest.warns(UserWarning, match=r'More than one \w+ coordinate'),\
             pytest.raises(AttributeError):
         test_ds_generic['test'].metpy.x
-    with pytest.warns(UserWarning, match='More than one y coordinate'),\
+    with pytest.warns(UserWarning, match=r'More than one \w+ coordinate'),\
             pytest.raises(AttributeError):
         test_ds_generic['test'].metpy.y
 
@@ -422,10 +430,10 @@ def test_resolve_axis_conflict_double_xy(test_ds_generic):
     test_ds_generic['d'].attrs['standard_name'] = 'projection_x_coordinate'
     test_ds_generic['e'].attrs['standard_name'] = 'projection_y_coordinate'
 
-    with pytest.warns(UserWarning, match='More than one x coordinate'),\
+    with pytest.warns(UserWarning, match=r'More than one \w+ coordinate'),\
             pytest.raises(AttributeError):
         test_ds_generic['test'].metpy.x
-    with pytest.warns(UserWarning, match='More than one y coordinate'),\
+    with pytest.warns(UserWarning, match=r'More than one \w+ coordinate'),\
             pytest.raises(AttributeError):
         test_ds_generic['test'].metpy.y
 


=====================================
tests/units/test_units.py
=====================================
@@ -8,7 +8,8 @@ import numpy as np
 import pandas as pd
 import pytest
 
-from metpy.testing import assert_array_almost_equal, assert_array_equal, assert_nan
+from metpy.testing import (assert_almost_equal, assert_array_almost_equal, assert_array_equal,
+                           assert_nan)
 from metpy.units import (check_units, concatenate, is_quantity,
                          pandas_dataframe_to_unit_arrays, units)
 
@@ -176,6 +177,7 @@ def test_added_degrees_units():
     assert units('degrees_north').to_base_units().units == units.radian
     assert units('degrees_east') == units('degrees')
     assert units('degrees_east').to_base_units().units == units.radian
+    assert_almost_equal(0 * units.dBz, 1 * units('mm^6/m^3'))
 
 
 def test_is_quantity():
@@ -193,7 +195,7 @@ def test_is_quantity_multiple():
 def test_gpm_unit():
     """Test that the gpm unit does alias to meters."""
     x = 1 * units('gpm')
-    assert str(x.units) == 'meter'
+    assert x.units == units('meter')
 
 
 def test_assert_nan():
@@ -210,7 +212,7 @@ def test_assert_nan_checks_units():
 
 def test_percent_units():
     """Test that percent sign units are properly parsed and interpreted."""
-    assert str(units('%').units) == 'percent'
+    assert units('%').units == units('percent')
 
 
 @pytest.mark.parametrize(



View it on GitLab: https://salsa.debian.org/debian-gis-team/metpy/-/commit/30b0a37203785156f59e90aea82c385c54709b4e

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/metpy/-/commit/30b0a37203785156f59e90aea82c385c54709b4e
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20240416/a852e8f2/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list