[Git][debian-gis-team/owslib][master] 6 commits: New upstream version 0.32.1
Bas Couwenberg (@sebastic)
gitlab at salsa.debian.org
Thu Jan 23 18:42:46 GMT 2025
Bas Couwenberg pushed to branch master at Debian GIS Project / owslib
Commits:
e1b31660 by Bas Couwenberg at 2025-01-23T19:18:12+01:00
New upstream version 0.32.1
- - - - -
733cc25b by Bas Couwenberg at 2025-01-23T19:18:16+01:00
Update upstream source from tag 'upstream/0.32.1'
Update to upstream version '0.32.1'
with Debian dir f236fe4f3770d0c0a85780498c215bf46c4b6780
- - - - -
f530d634 by Bas Couwenberg at 2025-01-23T19:18:35+01:00
New upstream release.
- - - - -
e99d016b by Bas Couwenberg at 2025-01-23T19:38:31+01:00
Skip online tests and doctests.
- - - - -
b54e533e by Bas Couwenberg at 2025-01-23T19:38:34+01:00
Use autopkgtest-pkg-pybuild testsuite.
- - - - -
f588d367 by Bas Couwenberg at 2025-01-23T19:39:08+01:00
Set distribution to unstable.
- - - - -
26 changed files:
- .github/workflows/main.yml
- .readthedocs.yaml
- debian/changelog
- debian/control
- debian/rules
- owslib/__init__.py
- owslib/coverage/wcs110.py
- owslib/feature/wfs100.py
- owslib/feature/wfs110.py
- owslib/feature/wfs200.py
- owslib/iso.py
- owslib/map/wms111.py
- owslib/map/wms130.py
- owslib/ogcapi/__init__.py
- owslib/ogcapi/coverages.py
- owslib/util.py
- requirements-dev.txt
- tests/test_csw_geonetwork.py
- tests/test_iso_parsing.py
- tests/test_ogcapi_connectedsystems_osh.py
- + tests/test_ogcapi_coverages.py
- tests/test_ogcapi_records_pycsw.py
- tests/test_wfs_generic.py
- tests/test_wms_getmap.py
- tests/test_wmts.py
- tox.ini
Changes:
=====================================
.github/workflows/main.yml
=====================================
@@ -8,9 +8,6 @@ jobs:
strategy:
matrix:
python-version: ["3.10", "3.11"]
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- COVERALLS_SERVICE_NAME: github
steps:
- uses: actions/checkout at master
- uses: actions/setup-python at v5
@@ -27,8 +24,24 @@ jobs:
pip3 install -r docs/requirements.txt
- name: run tests ⚙️
run: python3 -m pytest
+ - name: run tests in offline mode
+ if: matrix.python-version == '3.10'
+ run: |
+ python3 -m pytest \
+ -m "not online" \
+ --disable-socket \
+ --deselect="tests/doctests/wcs_thredds.txt::wcs_thredds.txt" \
+ --deselect="tests/doctests/wfs_MapServerWFSCapabilities.txt::wfs_MapServerWFSCapabilities.txt" \
+ --deselect="tests/doctests/wms_geoserver_mass_gis.txt::wms_geoserver_mass_gis.txt"
- name: run coveralls ⚙️
run: coveralls
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ COVERALLS_PARALLEL: true
+ COVERALLS_FLAG_NAME: "${{ matrix.python-version }}"
+ COVERALLS_SERVICE_NAME: github
+ COVERALLS_SERVICE_JOB_ID: "${{ github.run_id }}"
+ COVERALLS_SERVICE_NUMBER: "${{ github.workflow }}-${{ github.run_number }}"
- name: build docs 🏗️
run: cd docs && make html
- name: run flake8 ⚙️
=====================================
.readthedocs.yaml
=====================================
@@ -20,3 +20,6 @@ sphinx:
python:
install:
- requirements: docs/requirements.txt
+
+formats:
+ - pdf
=====================================
debian/changelog
=====================================
@@ -1,3 +1,12 @@
+owslib (0.32.1-1) unstable; urgency=medium
+
+ * Team upload.
+ * New upstream release.
+ * Skip online tests and doctests.
+ * Use autopkgtest-pkg-pybuild testsuite.
+
+ -- Bas Couwenberg <sebastic at debian.org> Thu, 23 Jan 2025 19:38:58 +0100
+
owslib (0.32.0-1) unstable; urgency=medium
* Team upload.
=====================================
debian/control
=====================================
@@ -11,6 +11,8 @@ Build-Depends: debhelper-compat (= 13),
python3-dateutil,
python3-lxml,
python3-pytest,
+ python3-pytest-cov,
+ python3-pytest-socket,
python3-requests,
python3-setuptools,
python3-yaml
@@ -19,6 +21,7 @@ Vcs-Browser: https://salsa.debian.org/debian-gis-team/owslib
Vcs-Git: https://salsa.debian.org/debian-gis-team/owslib.git
Homepage: https://github.com/geopython/OWSLib
Rules-Requires-Root: no
+Testsuite: autopkgtest-pkg-pybuild
Package: python3-owslib
Architecture: all
=====================================
debian/rules
=====================================
@@ -2,9 +2,7 @@
# -*- makefile -*-
export PYBUILD_NAME=owslib
+export PYBUILD_TEST_ARGS=-vv -m "not online" -k "not doctests"
%:
dh $@ --buildsystem pybuild
-
-# Skip tests, require internet access
-override_dh_auto_test:
=====================================
owslib/__init__.py
=====================================
@@ -1 +1 @@
-__version__ = '0.32.0'
+__version__ = '0.32.1'
=====================================
owslib/coverage/wcs110.py
=====================================
@@ -176,7 +176,7 @@ class WebCoverageService_1_1_0(WCSBase):
request['identifier'] = identifier
# request['identifier'] = ','.join(identifier)
if bbox:
- request['boundingbox'] = ','.join([repr(x) for x in bbox])
+ request['boundingbox'] = ','.join([str(x) for x in bbox])
if time:
request['timesequence'] = ','.join(time)
request['format'] = format
=====================================
owslib/feature/wfs100.py
=====================================
@@ -73,48 +73,6 @@ class WebFeatureService_1_0_0(object):
Implements IWebFeatureService.
"""
-
- def __new__(
- self,
- url,
- version,
- xml,
- parse_remote_metadata=False,
- timeout=30,
- headers=None,
- username=None,
- password=None,
- auth=None,
- ):
- """ overridden __new__ method
-
- @type url: string
- @param url: url of WFS capabilities document
- @type xml: string
- @param xml: elementtree object
- @type parse_remote_metadata: boolean
- @param parse_remote_metadata: whether to fully process MetadataURL elements
- @param headers: HTTP headers to send with requests
- @param timeout: time (in seconds) after which requests should timeout
- @param username: service authentication username
- @param password: service authentication password
- @param auth: instance of owslib.util.Authentication
- @return: initialized WebFeatureService_1_0_0 object
- """
- obj = object.__new__(self)
- obj.__init__(
- url,
- version,
- xml,
- parse_remote_metadata,
- timeout,
- headers=headers,
- username=username,
- password=password,
- auth=auth,
- )
- return obj
-
def __getitem__(self, name):
""" check contents dictionary to allow dict like access to service layers"""
if name in list(self.__getattribute__("contents").keys()):
@@ -268,7 +226,7 @@ class WebFeatureService_1_0_0(object):
if featureid:
request["featureid"] = ",".join(featureid)
elif bbox and typename:
- request["bbox"] = ",".join([repr(x) for x in bbox])
+ request["bbox"] = ",".join([str(x) for x in bbox])
elif filter and typename:
request["filter"] = str(filter)
=====================================
owslib/feature/wfs110.py
=====================================
@@ -56,48 +56,6 @@ class WebFeatureService_1_1_0(WebFeatureService_):
Implements IWebFeatureService.
"""
-
- def __new__(
- self,
- url,
- version,
- xml,
- parse_remote_metadata=False,
- timeout=30,
- headers=None,
- username=None,
- password=None,
- auth=None,
- ):
- """ overridden __new__ method
-
- @type url: string
- @param url: url of WFS capabilities document
- @type xml: string
- @param xml: elementtree object
- @type parse_remote_metadata: boolean
- @param parse_remote_metadata: whether to fully process MetadataURL elements
- @param headers: HTTP headers to send with requests
- @param timeout: time (in seconds) after which requests should timeout
- @param username: service authentication username
- @param password: service authentication password
- @param auth: instance of owslib.util.Authentication
- @return: initialized WebFeatureService_1_1_0 object
- """
- obj = object.__new__(self)
- obj.__init__(
- url,
- version,
- xml,
- parse_remote_metadata,
- timeout,
- headers=headers,
- username=username,
- password=password,
- auth=auth,
- )
- return obj
-
def __getitem__(self, name):
""" check contents dictionary to allow dict like access to service layers"""
if name in list(self.__getattribute__("contents").keys()):
=====================================
owslib/feature/wfs200.py
=====================================
@@ -45,48 +45,6 @@ class WebFeatureService_2_0_0(WebFeatureService_):
Implements IWebFeatureService.
"""
-
- def __new__(
- self,
- url,
- version,
- xml,
- parse_remote_metadata=False,
- timeout=30,
- headers=None,
- username=None,
- password=None,
- auth=None,
- ):
- """ overridden __new__ method
-
- @type url: string
- @param url: url of WFS capabilities document
- @type xml: string
- @param xml: elementtree object
- @type parse_remote_metadata: boolean
- @param parse_remote_metadata: whether to fully process MetadataURL elements
- @param headers: HTTP headers to send with requests
- @param timeout: time (in seconds) after which requests should timeout
- @param username: service authentication username
- @param password: service authentication password
- @param auth: instance of owslib.util.Authentication
- @return: initialized WebFeatureService_2_0_0 object
- """
- obj = object.__new__(self)
- obj.__init__(
- url,
- version,
- xml,
- parse_remote_metadata,
- timeout,
- headers=headers,
- username=username,
- password=password,
- auth=auth,
- )
- return obj
-
def __getitem__(self, name):
""" check contents dictionary to allow dict like access to service layers"""
if name in list(self.__getattribute__("contents").keys()):
=====================================
owslib/iso.py
=====================================
@@ -27,6 +27,41 @@ def get_namespaces():
namespaces = get_namespaces()
+def testFirstCharOrAnchor(md,xpath):
+ """ function which checks if the first matching element is either charstring or anchor, if anchor, returns {name, url}, else {name} """
+ r = {'name': '', 'url': ''}
+ val = md.find(util.nspath_eval(xpath+'/gco:CharacterString', namespaces))
+ r['name'] = util.testXMLValue(val)
+ if r['name'] in [None,'']:
+ val = md.find(util.nspath_eval(xpath+'/gmx:Anchor', namespaces))
+ if val is not None:
+ r['name'] = util.testXMLValue(val)
+ r['url'] = val.attrib.get(util.nspath_eval('xlink:href', namespaces))
+ return r
+
+def testAllCharOrAnchor(md,xpath,aslist=True):
+ """
+ function which checks for each matching element if it is charstring or anchor, returns {name, uri}
+ the aslist parameter indicates to return 2 lists or single list of objects
+ """
+ ro = []
+ for i in md.findall(util.nspath_eval(xpath+'/gco:CharacterString', namespaces)):
+ r = {'name': '', 'url': ''}
+ r['name'] = util.testXMLValue(i)
+ ro.append(r)
+ for i in md.findall(util.nspath_eval(xpath+'/gmx:Anchor', namespaces)):
+ r = {'name': '', 'url': ''}
+ if i is not None:
+ r['name'] = util.testXMLValue(i)
+ r['url'] = i.attrib.get(util.nspath_eval('xlink:href', namespaces))
+ ro.append(r)
+ if aslist:
+ return ro
+ else:
+ return {'name': [i.get('name', '') for i in ro],
+ 'url': [i.get('url', '') for i in ro]}
+
+
class MD_Metadata(object):
""" Process gmd:MD_Metadata """
@@ -223,7 +258,9 @@ class CI_ResponsibleParty(object):
if md is None:
self.name = None
+ self.name_url = None
self.organization = None
+ self.organization_url = None
self.position = None
self.phone = None
self.fax = None
@@ -236,11 +273,14 @@ class CI_ResponsibleParty(object):
self.onlineresource = None
self.role = None
else:
- val = md.find(util.nspath_eval('gmd:individualName/gco:CharacterString', namespaces))
- self.name = util.testXMLValue(val)
- val = md.find(util.nspath_eval('gmd:organisationName/gco:CharacterString', namespaces))
- self.organization = util.testXMLValue(val)
+ frm = testFirstCharOrAnchor(md,'gmd:individualName')
+ self.name = frm['name']
+ self.name_url = frm['url']
+
+ frm = testFirstCharOrAnchor(md,'gmd:organisationName')
+ self.organization = frm['name']
+ self.organization_url = frm['url']
val = md.find(util.nspath_eval('gmd:positionName/gco:CharacterString', namespaces))
self.position = util.testXMLValue(val)
@@ -495,22 +535,9 @@ class MD_DataIdentification(object):
if val is not None:
self.classification.append(val)
- self.otherconstraints = []
- for i in md.findall(util.nspath_eval(
- 'gmd:resourceConstraints/gmd:MD_LegalConstraints/gmd:otherConstraints/gco:CharacterString',
- namespaces)):
- val = util.testXMLValue(i)
- if val is not None:
- self.otherconstraints.append(val)
- for i in md.findall(util.nspath_eval(
- 'gmd:resourceConstraints/gmd:MD_LegalConstraints/gmd:otherConstraints/gmx:Anchor',
- namespaces)):
- val = util.testXMLAttribute(i, util.nspath('href', namespaces["xlink"]))
- if val is None:
- val = util.testXMLValue(i)
- if val is not None:
- self.otherconstraints.append(val)
-
+ ocs = testAllCharOrAnchor(md,'gmd:resourceConstraints/gmd:MD_LegalConstraints/gmd:otherConstraints', False)
+ self.otherconstraints = ocs['name']
+ self.otherconstraints_url = ocs['url']
self.securityconstraints = []
for i in md.findall(util.nspath_eval(
@@ -688,25 +715,32 @@ class MD_Distribution(object):
def __init__(self, md=None):
if md is None:
self.format = None
+ self.format_url = None
self.version = None
+ self.version_url = None
+ self.specification = None
+ self.specification_url = None
self.distributor = []
self.online = []
pass
else:
- val = md.find(util.nspath_eval(
- 'gmd:distributionFormat/gmd:MD_Format/gmd:name/gco:CharacterString', namespaces))
- self.format = util.testXMLValue(val)
+ frm = testFirstCharOrAnchor(md,'gmd:distributionFormat/gmd:MD_Format/gmd:name')
+ self.format = frm['name']
+ self.format_url = frm['url']
+
+ vrs = testFirstCharOrAnchor(md,'gmd:distributionFormat/gmd:MD_Format/gmd:version')
+ self.version = vrs['name']
+ self.version_url = vrs['url']
- val = md.find(util.nspath_eval(
- 'gmd:distributionFormat/gmd:MD_Format/gmd:version/gco:CharacterString', namespaces))
- self.version = util.testXMLValue(val)
+ spc = testFirstCharOrAnchor(md,'gmd:distributionFormat/gmd:MD_Format/gmd:specification')
+ self.specification = spc['name']
+ self.specification_url = spc['url']
self.distributor = []
for dist in md.findall(util.nspath_eval('gmd:distributor', namespaces)):
self.distributor.append(MD_Distributor(dist))
self.online = []
-
for ol in md.findall(util.nspath_eval(
'gmd:transferOptions/gmd:MD_DigitalTransferOptions/gmd:onLine/gmd:CI_OnlineResource',
namespaces)):
@@ -718,6 +752,7 @@ class DQ_DataQuality(object):
def __init__(self, md=None):
if md is None:
self.conformancetitle = []
+ self.conformancetitle_url = []
self.conformancedate = []
self.conformancedatetype = []
self.conformancedegree = []
@@ -726,13 +761,9 @@ class DQ_DataQuality(object):
self.specificationtitle = None
self.specificationdate = []
else:
- self.conformancetitle = []
- for i in md.findall(util.nspath_eval(
- 'gmd:report/gmd:DQ_DomainConsistency/gmd:result/gmd:DQ_ConformanceResult/gmd:specification/gmd:CI_Citation/gmd:title/gco:CharacterString',
- namespaces)):
- val = util.testXMLValue(i)
- if val is not None:
- self.conformancetitle.append(val)
+ cts = testAllCharOrAnchor(md,'gmd:report/gmd:DQ_DomainConsistency/gmd:result/gmd:DQ_ConformanceResult/gmd:specification/gmd:CI_Citation/gmd:title', False)
+ self.conformancetitle = cts['name']
+ self.conformancetitle_url = cts['url']
self.conformancedate = []
for i in md.findall(util.nspath_eval(
@@ -758,14 +789,9 @@ class DQ_DataQuality(object):
if val is not None:
self.conformancedegree.append(val)
- val = md.find(util.nspath_eval(
- 'gmd:lineage/gmd:LI_Lineage/gmd:statement/gco:CharacterString', namespaces))
- self.lineage = util.testXMLValue(val)
-
- val = md.find(util.nspath_eval('gmd:lineage/gmd:LI_Lineage/gmd:statement/gmx:Anchor', namespaces))
- if val is not None:
- self.lineage = util.testXMLValue(val)
- self.lineage_url = val.attrib.get(util.nspath_eval('xlink:href', namespaces))
+ lng = testFirstCharOrAnchor(md, 'gmd:lineage/gmd:LI_Lineage/gmd:statement')
+ self.lineage = lng['name']
+ self.lineage_url = lng['url']
val = md.find(util.nspath_eval(
'gmd:report/gmd:DQ_DomainConsistency/gmd:result/gmd:DQ_ConformanceResult/gmd:specification/gmd:CI_Citation/gmd:title/gco:CharacterString',
@@ -843,18 +869,33 @@ class CI_OnlineResource(object):
if md is None:
self.url = None
self.protocol = None
+ self.protocol_url = None
self.name = None
+ self.name_url = None
self.description = None
+ self.description_url = None
self.function = None
+ self.applicationprofile = None
+ self.applicationprofile_url = None
else:
val = md.find(util.nspath_eval('gmd:linkage/gmd:URL', namespaces))
self.url = util.testXMLValue(val)
- val = md.find(util.nspath_eval('gmd:protocol/gco:CharacterString', namespaces))
- self.protocol = util.testXMLValue(val)
+ val = testFirstCharOrAnchor(md,'gmd:protocol')
+ self.protocol = val['name']
+ self.protocol_url = val['url']
- val = md.find(util.nspath_eval('gmd:name/gco:CharacterString', namespaces))
- self.name = util.testXMLValue(val)
+ val = testFirstCharOrAnchor(md,'gmd:name')
+ self.name = val['name']
+ self.name_url = val['url']
+
+ val = testFirstCharOrAnchor(md,'gmd:description')
+ self.description = val['name']
+ self.description_url = val['url']
+
+ val = testFirstCharOrAnchor(md,'gmd:applicationProfile')
+ self.applicationprofile = val['name']
+ self.applicationprofile_url = val['url']
val = md.find(util.nspath_eval('gmd:description/gco:CharacterString', namespaces))
self.description = util.testXMLValue(val)
@@ -954,33 +995,23 @@ class MD_ReferenceSystem(object):
def __init__(self, md=None):
if md is None:
self.code = None
+ self.code_url = None
self.codeSpace = None
+ self.codeSpace_url = None
self.version = None
+ self.version_url = None
else:
- val = md.find(util.nspath_eval(
- 'gmd:referenceSystemIdentifier/gmd:RS_Identifier/gmd:code/gco:CharacterString', namespaces))
- if val is None:
- val = md.find(util.nspath_eval(
- 'gmd:referenceSystemIdentifier/gmd:RS_Identifier/gmd:code/gmx:Anchor', namespaces))
- if val is not None:
- self.code = util.testXMLValue(val)
- else:
- self.code = None
+ val = testFirstCharOrAnchor(md,'gmd:referenceSystemIdentifier/gmd:RS_Identifier/gmd:code')
+ self.code_url = val['url']
+ self.code = val['name']
- val = md.find(util.nspath_eval(
- 'gmd:referenceSystemIdentifier/gmd:RS_Identifier/gmd:codeSpace/gco:CharacterString', namespaces))
- if val is not None:
- self.codeSpace = util.testXMLValue(val)
- else:
- self.codeSpace = None
-
- val = md.find(util.nspath_eval(
- 'gmd:referenceSystemIdentifier/gmd:RS_Identifier/gmd:version/gco:CharacterString', namespaces))
- if val is not None:
- self.version = util.testXMLValue(val)
- else:
- self.version = None
+ val = testFirstCharOrAnchor(md,'gmd:referenceSystemIdentifier/gmd:RS_Identifier/gmd:codeSpace')
+ self.codeSpace_url = val['url']
+ self.codeSpace = val['name']
+ val = testFirstCharOrAnchor(md,'gmd:referenceSystemIdentifier/gmd:RS_Identifier/gmd:version')
+ self.version_url = val['url']
+ self.version = val['name']
def _testCodeListValue(elpath):
""" get gco:CodeListValue_Type attribute, else get text content """
=====================================
owslib/map/wms111.py
=====================================
@@ -172,7 +172,7 @@ class WebMapService_1_1_1(object):
request['height'] = str(size[1])
request['srs'] = str(srs)
- request['bbox'] = ','.join([repr(x) for x in bbox])
+ request['bbox'] = ','.join([str(x) for x in bbox])
request['format'] = str(format)
request['transparent'] = str(transparent).upper()
request['exceptions'] = str(exceptions)
=====================================
owslib/map/wms130.py
=====================================
@@ -182,7 +182,7 @@ class WebMapService_1_3_0(object):
# remapping the srs to crs for the request
request['crs'] = str(srs)
- request['bbox'] = ','.join([repr(x) for x in bbox])
+ request['bbox'] = ','.join([str(x) for x in bbox])
request['format'] = str(format)
request['transparent'] = str(transparent).upper()
request['exceptions'] = str(exceptions)
=====================================
owslib/ogcapi/__init__.py
=====================================
@@ -129,13 +129,19 @@ class API:
@returns: fully constructed URL path
"""
+ def urljoin_(url2, path2):
+ if '//' not in path2:
+ return urljoin(url2, path2)
+ else:
+ return '/'.join([url2.rstrip('/'), path2])
+
url = self.url
if self.url_query_string is not None:
LOGGER.debug('base URL has a query string')
- url = urljoin(url, path)
+ url = urljoin_(url, path)
url = '?'.join([url, self.url_query_string])
else:
- url = urljoin(url, path)
+ url = urljoin_(url, path)
if params:
url = '?'.join([url, urlencode(params)])
=====================================
owslib/ogcapi/coverages.py
=====================================
@@ -48,42 +48,84 @@ class Coverages(Collections):
@type collection_id: string
@param collection_id: id of collection
- @type properties: list
+ @type properties: tuple | list
@param properties: range subset
- @type subset: list of tuples
- @param subset: [(name, lower bound, upper bound)]
- @type scale_size: list of tuples
- @param scale_size: [(axis name, number)]
+ @type subset: list or dict of tuples/lists
+ @param subset:
+ [(name, lower bound, upper bound)]
+ [[name, lower bound, upper bound]]
+ {name: (lower bound, upper bound)}
+ {name: [lower bound, upper bound]}
+ @type scale_size: list of tuples or dict
+ @param scale_size: [(axis name, number)] | {axis name: number}
@type scale_factor: int
@param scale_factor: factor by which to scale the resulting coverage
- @type scale_axes: list of tuples
- @param scale_axes: [(axis name, number)]
+ @type scale_axes: list of tuples or dict
+ @param scale_axes: [(axis name, number)] | {axis name: number}
+ @type datetime: tuple | list | str
+ @param datetime:
+ tuple or list of start/end datetimes, or as 'start/end' string
+ start and end datetimes can be ".." for unbounded value
@returns: coverage data
"""
kwargs_ = {}
- if 'properties' in kwargs:
- kwargs_['properties'] = ','.join(
- [str(x) for x in kwargs['properties']])
+ if isinstance(kwargs.get('properties'), (tuple, list)):
+ kwargs_['properties'] = ','.join([
+ str(x) for x in kwargs['properties']
+ ])
for p in ['scale_axes', 'scale_size']:
if p in kwargs:
p2 = p.replace('_', '-')
+ if isinstance(kwargs[p], (tuple, list)):
+ items = kwargs[p]
+ elif isinstance(kwargs[p], dict):
+ items = [
+ (name, value)
+ for name, value
+ in kwargs[p].items()
+ ]
+ else:
+ continue
kwargs_[p2] = []
- for s in kwargs[p2]:
- val = f'{s[0]}({s[1]},{s[2]})'
- kwargs_[p2].append(val)
-
- if 'subset' in kwargs:
- subsets_list = []
- for s in kwargs['subset']:
- subsets_list.append(f'{s[0]}({s[1]}:{s[2]})')
- kwargs['subset'] = ','.join(subsets_list)
+ kwargs_[p2] = ",".join(
+ f'{s[0]}({s[1]})'
+ for s in items
+ )
if 'scale_factor' in kwargs:
- kwargs_['scale-factor'] = int(kwargs['scale_factor'])
+ scale_f = float(kwargs['scale_factor'])
+ scale_i = int(scale_f)
+ if scale_i == scale_f:
+ kwargs_['scale-factor'] = scale_i
+ else:
+ kwargs_['scale-factor'] = scale_f
+
+ if 'subset' in kwargs:
+ subset_items = []
+ subset_values = kwargs['subset']
+ if isinstance(subset_values, (tuple, list)):
+ subset_items = subset_values
+ elif isinstance(subset_values, dict):
+ subset_items = [
+ (name, *values)
+ for name, values
+ in subset_values.items()
+ ]
+ if subset_items:
+ kwargs_['subset'] = ','.join([
+ f'{s[0]}({s[1]}:{s[2]})'
+ for s in subset_items
+ ])
+
+ if 'datetime' in kwargs:
+ if isinstance(kwargs['datetime'], (tuple, list)):
+ kwargs_['datetime'] = '/'.join(kwargs['datetime'][:2])
+ else:
+ kwargs_['datetime'] = str(kwargs['datetime'])
path = f'collections/{collection_id}/coverage'
=====================================
owslib/util.py
=====================================
@@ -206,7 +206,7 @@ def openURL(url_base, data=None, method='Get', cookies=None, username=None, pass
req = requests.request(method.upper(), url_base, headers=headers, **rkwargs)
- if req.status_code in [400, 401]:
+ if req.status_code in [400, 401, 403]:
raise ServiceException(req.text)
if req.status_code in [404, 500, 502, 503, 504]: # add more if needed
=====================================
requirements-dev.txt
=====================================
@@ -2,6 +2,7 @@
flake8
pytest
pytest-cov
+pytest-socket
Pillow
tox
twine
=====================================
tests/test_csw_geonetwork.py
=====================================
@@ -19,6 +19,7 @@ def test_csw_geonetwork():
SERVICE_URL3 = 'https://metawal.wallonie.be/geonetwork/srv/eng/csw'
+ at pytest.mark.online
@pytest.mark.skipif(not service_ok(SERVICE_URL3),
reason='service is unreachable')
@pytest.mark.parametrize("esn_in", ['full', 'summary'])
=====================================
tests/test_iso_parsing.py
=====================================
@@ -138,7 +138,7 @@ def test_md_parsing_dov():
assert_list(iden.classification, 0)
assert_list(iden.otherconstraints, 2)
- assert iden.otherconstraints[
+ assert iden.otherconstraints_url[
1] == "https://inspire.ec.europa.eu/metadata-codelist/ConditionsApplyingToAccessAndUse/noConditionsApply"
assert iden.otherconstraints[
0] == "Data beschikbaar voor hergebruik volgens de " \
@@ -611,7 +611,37 @@ def test_md_indentifier_anchor():
md = MD_Metadata(md_resource)
assert type(md) is MD_Metadata
assert md.referencesystem.code == 'ETRS89-GRS80'
+ assert md.referencesystem.code_url == 'http://www.opengis.net/def/crs/EPSG/0/4937'
+
iden = md.identification[0]
assert_list(iden.uricode, 1)
assert iden.uricode[0] == 'https://www.nationaalgeoregister.nl/geonetwork/srv/metadata/f44dac86-2228-412f-8355-e56446ca9933'
-
\ No newline at end of file
+ assert iden.contact[0].organization_url == 'http://standaarden.overheid.nl/owms/terms/Ministerie_van_Defensie'
+ assert iden.keywords[0].keywords[0].url == 'http://www.eionet.europa.eu/gemet/nl/inspire-theme/am'
+ assert_list(iden.otherconstraints, 3)
+ assert_list(iden.otherconstraints_url, 3)
+ assert iden.otherconstraints[0] == 'Geen beperkingen'
+ assert iden.otherconstraints_url[0] == 'http://creativecommons.org/publicdomain/mark/1.0/deed.nl'
+ assert iden.otherconstraints[2] == 'Geen beperkingen voor publieke toegang'
+ assert iden.otherconstraints_url[2] == 'http://inspire.ec.europa.eu/metadata-codelist/LimitationsOnPublicAccess/noLimitations'
+
+ dist = md.distribution
+ assert dist.format_url == 'http://www.iana.org/assignments/media-types/application/gml+xml'
+ assert dist.format == 'gml+xml'
+ assert dist.version == 'GML, version 3.2.1'
+ assert dist.specification_url == 'http://inspire.ec.europa.eu/id/document/tg/hy'
+ assert dist.specification == 'Data specificatie hydrografie'
+
+ assert dist.online[0].protocol == 'OGC:WMS'
+ assert dist.online[0].protocol_url == 'http://www.opengis.net/def/serviceType/ogc/wms'
+ assert dist.online[0].applicationprofile == 'view'
+ assert dist.online[0].applicationprofile_url == 'http://inspire.ec.europa.eu/metadata-codelist/SpatialDataServiceType/view'
+
+ assert dist.online[2].protocol == 'INSPIRE Atom'
+ assert dist.online[2].protocol_url == 'https://tools.ietf.org/html/rfc4287'
+ assert dist.online[2].applicationprofile == 'download'
+ assert dist.online[2].applicationprofile_url == 'http://inspire.ec.europa.eu/metadata-codelist/SpatialDataServiceType/download'
+
+ assert md.dataquality.lineage == 'Ministerie van Defensie, Koninklijke Marine, Dienst der Hydrografie'
+ assert md.dataquality.conformancetitle[0] == 'VERORDENING (EU) Nr. 1089/2010 VAN DE COMMISSIE van 23 november 2010 ter uitvoering van Richtlijn 2007/2/EG van het Europees Parlement en de Raad betreffende de interoperabiliteit van verzamelingen ruimtelijke gegevens en van diensten met betrekking tot ruimtelijke gegevens'
+ assert md.dataquality.conformancedegree[0] == 'true'
=====================================
tests/test_ogcapi_connectedsystems_osh.py
=====================================
@@ -16,308 +16,338 @@ from owslib.ogcapi.connectedsystems import Commands, ControlChannels, Datastream
from owslib.util import Authentication
-class OSHFixtures:
- NODE_TEST_OK_URL = 'http://34.67.197.57:8585/sensorhub/test'
- # Directs to OSH hosted test server
- TEST_URL = 'http://34.67.197.57:8585/sensorhub/api/'
- auth = Authentication('auto_test', 'automated_tester24')
- sml_headers = {'Content-Type': 'application/sml+json'}
- json_headers = {'Content-Type': 'application/json'}
- geojson_headers = {'Content-Type': 'application/geo+json'}
- omjson_headers = {'Content-Type': 'application/om+json'}
-
- system_definitions = [
- {
+ at pytest.fixture(scope="session")
+def fixtures():
+ class OSHFixtures:
+ NODE_TEST_OK_URL = 'http://34.67.197.57:8585/sensorhub/test'
+ # Directs to OSH hosted test server
+ TEST_URL = 'http://34.67.197.57:8585/sensorhub/api/'
+ auth = Authentication('auto_test', 'automated_tester24')
+ sml_headers = {'Content-Type': 'application/sml+json'}
+ json_headers = {'Content-Type': 'application/json'}
+ geojson_headers = {'Content-Type': 'application/geo+json'}
+ omjson_headers = {'Content-Type': 'application/om+json'}
+
+ system_definitions = [
+ {
+ "type": "SimpleProcess",
+ "uniqueId": "urn:osh:sensor:testsmlsensor:001",
+ "label": "Test SML Sensor",
+ "description": "A Sensor created from an SML document",
+ "definition": "http://www.w3.org/ns/ssn/Sensor"
+ },
+ {
+ "type": "SimpleProcess",
+ "uniqueId": "urn:osh:sensor:testsmlsensor:002",
+ "label": "Test SML Sensor #2",
+ "description": "A Sensor created from an SML document",
+ "definition": "http://www.w3.org/ns/ssn/Sensor"
+ }
+ ]
+
+ sys_sml_to_update = {
"type": "SimpleProcess",
"uniqueId": "urn:osh:sensor:testsmlsensor:001",
"label": "Test SML Sensor",
"description": "A Sensor created from an SML document",
"definition": "http://www.w3.org/ns/ssn/Sensor"
- },
- {
+ }
+
+ sys_sml_def = {
"type": "SimpleProcess",
- "uniqueId": "urn:osh:sensor:testsmlsensor:002",
- "label": "Test SML Sensor #2",
+ "uniqueId": "urn:osh:sensor:testsmlsensor:solo",
+ "label": "Test SML Sensor - Created on its own",
"description": "A Sensor created from an SML document",
"definition": "http://www.w3.org/ns/ssn/Sensor"
}
- ]
-
- sys_sml_to_update = {
- "type": "SimpleProcess",
- "uniqueId": "urn:osh:sensor:testsmlsensor:001",
- "label": "Test SML Sensor",
- "description": "A Sensor created from an SML document",
- "definition": "http://www.w3.org/ns/ssn/Sensor"
- }
-
- sys_sml_def = {
- "type": "SimpleProcess",
- "uniqueId": "urn:osh:sensor:testsmlsensor:solo",
- "label": "Test SML Sensor - Created on its own",
- "description": "A Sensor created from an SML document",
- "definition": "http://www.w3.org/ns/ssn/Sensor"
- }
-
- sml_component = {
- "type": "SimpleProcess",
- "uniqueId": "urn:osh:sensor:testcomponent:001",
- "label": "Test Component",
- "description": "Test Component Description",
- "definition": "http://www.w3.org/ns/ssn/Sensor"
- }
-
- sml_procedure_test_system = {"type": "SimpleProcess",
- "uniqueId": "urn:osh:sensor:testsensorwithcomponents:001",
- "label": "Test Process/Datastream Sensor",
- "description": "A Sensor created to test procedure/datastream creation",
- "definition": "http://www.w3.org/ns/ssn/Sensor"}
- sml_procedure = {
- "type": "SimpleProcess",
- "id": "123456789",
- "description": "Test Procedure inserted via OWSLib",
- "uniqueId": "urn:osh:sensor:testprocedureows:001",
- "label": "Test Procedure - OWSLib",
- "definition": "http://www.w3.org/ns/sosa/Procedure"
- }
-
- deployment_definition = {
- "type": "Feature",
- "properties": {
- "featureType": "http://www.w3.org/ns/sosa/Deployment",
- "uid": "urn:osh:sensor:testdeployment:001",
- "name": "Test Deployment 001",
- "description": "A test deployment",
- "validTime": ["2024-01-01T00:00:00Z", "2024-12-31T23:59:59Z"]
- },
- # "geometry": "POINT(-80.0 35.0)"
- }
- system_id = 'blid74chqmses'
- deployment_expected_id = "vssamsrio5eb2"
- weatherstation_id = '0s2lbn2n1bnc8'
- datastream_id = 'etbrve0msmrre'
-
- feature_def = {
- "geometry": {
- "type": "Point",
- "coordinates": [-80.0, 35.0]
- },
- "type": "Feature",
- "properties": {
- "featureType": "http://www.w3.org/ns/sosa/Station",
- "uid": "urn:osh:sensor:teststation:001",
- "name": "Test Station 001",
- "description": "A test station",
- "parentSystem at link": {"href": "http://localhost:8585/sensorhub/api/systems/blid74chqmses"},
- "sampledFeature at link": {
- "href": "https://data.example.com/link/to/resource",
- "rel": "alternate",
- "type": "application/json",
- "hreflang": "en-US",
- "title": "Resource Name",
- "uid": "urn:x-org:resourceType:0001",
- "rt": "http://www.example.org/uri/of/concept",
- "if": "http://www.opengis.net/spec/spec-id/version"}
+
+ sml_component = {
+ "type": "SimpleProcess",
+ "uniqueId": "urn:osh:sensor:testcomponent:001",
+ "label": "Test Component",
+ "description": "Test Component Description",
+ "definition": "http://www.w3.org/ns/ssn/Sensor"
}
- }
-
- ds_definition = {
- "name": "Test Datastream",
- "outputName": "Test Output #1",
- "schema": {
- "obsFormat": "application/swe+json",
- "encoding": {
- "type": "JSONEncoding",
- "vectorAsArrays": False
+
+ sml_procedure_test_system = {"type": "SimpleProcess",
+ "uniqueId": "urn:osh:sensor:testsensorwithcomponents:001",
+ "label": "Test Process/Datastream Sensor",
+ "description": "A Sensor created to test procedure/datastream creation",
+ "definition": "http://www.w3.org/ns/ssn/Sensor"}
+ sml_procedure = {
+ "type": "SimpleProcess",
+ "id": "123456789",
+ "description": "Test Procedure inserted via OWSLib",
+ "uniqueId": "urn:osh:sensor:testprocedureows:001",
+ "label": "Test Procedure - OWSLib",
+ "definition": "http://www.w3.org/ns/sosa/Procedure"
+ }
+
+ deployment_definition = {
+ "type": "Feature",
+ "properties": {
+ "featureType": "http://www.w3.org/ns/sosa/Deployment",
+ "uid": "urn:osh:sensor:testdeployment:001",
+ "name": "Test Deployment 001",
+ "description": "A test deployment",
+ "validTime": ["2024-01-01T00:00:00Z", "2024-12-31T23:59:59Z"]
+ },
+ # "geometry": "POINT(-80.0 35.0)"
+ }
+ system_id = 'blid74chqmses'
+ deployment_expected_id = "vssamsrio5eb2"
+ weatherstation_id = '0s2lbn2n1bnc8'
+ datastream_id = 'etbrve0msmrre'
+
+ feature_def = {
+ "geometry": {
+ "type": "Point",
+ "coordinates": [-80.0, 35.0]
},
- "recordSchema": {
- "type": "DataRecord",
- "label": "Test Datastream Record",
- "updatable": False,
- "optional": False,
- "definition": "http://test.com/Record",
- "fields": [
- {
- "type": "Time",
- "label": "Test Datastream Time",
- "updatable": False,
- "optional": False,
- "definition": "http://test.com/Time",
- "name": "timestamp",
- "uom": {
- "href": "http://test.com/TimeUOM"
+ "type": "Feature",
+ "properties": {
+ "featureType": "http://www.w3.org/ns/sosa/Station",
+ "uid": "urn:osh:sensor:teststation:001",
+ "name": "Test Station 001",
+ "description": "A test station",
+ "parentSystem at link": {"href": "http://localhost:8585/sensorhub/api/systems/blid74chqmses"},
+ "sampledFeature at link": {
+ "href": "https://data.example.com/link/to/resource",
+ "rel": "alternate",
+ "type": "application/json",
+ "hreflang": "en-US",
+ "title": "Resource Name",
+ "uid": "urn:x-org:resourceType:0001",
+ "rt": "http://www.example.org/uri/of/concept",
+ "if": "http://www.opengis.net/spec/spec-id/version"}
+ }
+ }
+
+ ds_definition = {
+ "name": "Test Datastream",
+ "outputName": "Test Output #1",
+ "schema": {
+ "obsFormat": "application/swe+json",
+ "encoding": {
+ "type": "JSONEncoding",
+ "vectorAsArrays": False
+ },
+ "recordSchema": {
+ "type": "DataRecord",
+ "label": "Test Datastream Record",
+ "updatable": False,
+ "optional": False,
+ "definition": "http://test.com/Record",
+ "fields": [
+ {
+ "type": "Time",
+ "label": "Test Datastream Time",
+ "updatable": False,
+ "optional": False,
+ "definition": "http://test.com/Time",
+ "name": "timestamp",
+ "uom": {
+ "href": "http://test.com/TimeUOM"
+ }
+ },
+ {
+ "type": "Boolean",
+ "label": "Test Datastream Boolean",
+ "updatable": False,
+ "optional": False,
+ "definition": "http://test.com/Boolean",
+ "name": "testboolean"
}
- },
- {
- "type": "Boolean",
- "label": "Test Datastream Boolean",
- "updatable": False,
- "optional": False,
- "definition": "http://test.com/Boolean",
- "name": "testboolean"
- }
- ]
+ ]
+ }
}
}
- }
-
- systems_api = Systems(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
- procedure_api = Systems(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
- deployment_api = Deployments(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
- sampling_feature_api = SamplingFeatures(TEST_URL, auth=auth, headers=geojson_headers,
- alternate_sampling_feature_url='featuresOfInterest')
- properties_api = Properties(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
- datastream_api = Datastreams(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
- observations_api = Observations(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
- control_channels_api = ControlChannels(TEST_URL, auth=auth,
- headers={'Content-Type': 'application/json'})
- commands_api = Commands(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
- system_events_api = SystemEvents(TEST_URL, auth=auth, headers=omjson_headers)
- system_history_api = SystemHistory(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
-
- def update_dsid(self, ds_id):
- self.datastream_id = ds_id
+ systems_api = Systems(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+ procedure_api = Systems(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+ deployment_api = Deployments(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+ sampling_feature_api = SamplingFeatures(TEST_URL, auth=auth, headers=geojson_headers,
+ alternate_sampling_feature_url='featuresOfInterest')
+ properties_api = Properties(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+ datastream_api = Datastreams(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+ observations_api = Observations(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+ control_channels_api = ControlChannels(TEST_URL, auth=auth,
+ headers={'Content-Type': 'application/json'})
+ commands_api = Commands(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+ system_events_api = SystemEvents(TEST_URL, auth=auth, headers=omjson_headers)
+ system_history_api = SystemHistory(TEST_URL, auth=auth, headers={'Content-Type': 'application/json'})
+
+ def update_dsid(self, ds_id):
+ self.datastream_id = ds_id
+
+ def create_single_system(self):
+ sys_api = Systems(self.TEST_URL, auth=self.auth, headers=self.sml_headers)
+ sys_create_res = sys_api.system_create(json.dumps(self.system_definitions[0]))
+ sys_id = sys_api.response_headers['Location'].split('/')[-1]
+ return sys_id
+
+ def create_single_datastream(self, system_id: str):
+ ds_api = Datastreams(self.TEST_URL, auth=self.auth, headers=self.json_headers)
+ result = ds_api.datastream_create_in_system(system_id, json.dumps(self.ds_definition))
+ ds_id = ds_api.response_headers['Location'].split('/')[-1]
+ return ds_id
+
+ def delete_all_systems(self):
+ # delete datastreams first
+ self.delete_all_datastreams()
+ self.delete_all_sampling_features()
+ sys_api = Systems(self.TEST_URL, auth=self.auth, headers=self.sml_headers)
+ systems = sys_api.systems()
+ for system in systems['items']:
+ self.systems_api.system_delete(system['id'])
+
+ def delete_all_datastreams(self):
+ datastreams = self.datastream_api.datastreams()
+ for ds in datastreams['items']:
+ self.datastream_api.datastream_delete(ds['id'])
+
+ def delete_all_sampling_features(self):
+ sampling_features = self.sampling_feature_api.sampling_features(use_fois=True)
+ for sf in sampling_features['items']:
+ self.sampling_feature_api.sampling_feature_delete(sf['id'], use_fois=True)
+
+ yield OSHFixtures()
-class TestSystems:
- fixtures = OSHFixtures()
- def test_system_readonly(self):
+class TestSystems:
+ @pytest.mark.online
+ def test_system_readonly(self, fixtures):
# get all systems
- res = self.fixtures.systems_api.systems()
+ res = fixtures.systems_api.systems()
assert len(res['items']) > 0
check_ids = ["0s2lbn2n1bnc8", "94n1f19ld7tlc"]
assert [any(sys_id == item['id'] for item in res['items']) for sys_id in check_ids]
# get a single system
- res = self.fixtures.systems_api.system(check_ids[0])
+ res = fixtures.systems_api.system(check_ids[0])
assert res is not None
assert res['id'] == check_ids[0]
@pytest.mark.skip(reason="Skip transactional test")
- def test_system_functions(self):
+ def test_system_functions(self, fixtures):
# insertion of systems
- self.fixtures.systems_api.headers = self.fixtures.sml_headers
- sys_create_res = self.fixtures.systems_api.system_create(json.dumps(self.fixtures.system_definitions))
+ fixtures.systems_api.headers = fixtures.sml_headers
+ sys_create_res = fixtures.systems_api.system_create(json.dumps(fixtures.system_definitions))
assert sys_create_res is not None
# update of system and retrieval
- sml_desc_copy = self.fixtures.sys_sml_to_update.copy()
+ sml_desc_copy = fixtures.sys_sml_to_update.copy()
sml_desc_copy['description'] = 'Updated Description'
sml_str = json.dumps(sml_desc_copy)
- post_systems = self.fixtures.systems_api.system_update('blid74chqmses', sml_str)
+ post_systems = fixtures.systems_api.system_update('blid74chqmses', sml_str)
- check_result = self.fixtures.systems_api.system('blid74chqmses')
+ check_result = fixtures.systems_api.system('blid74chqmses')
assert check_result['properties']['description'] == 'Updated Description'
# deletion of system
- all_systems = self.fixtures.systems_api.systems()
+ all_systems = fixtures.systems_api.systems()
# clear datastreams
- delete_all_datastreams()
+ fixtures.delete_all_datastreams()
for system in all_systems['items']:
- res = self.fixtures.systems_api.system_delete(system['id'])
+ res = fixtures.systems_api.system_delete(system['id'])
assert res == {}
class TestDeployments:
- fixtures = OSHFixtures()
-
@pytest.mark.skip(reason="Skip transactional test")
- def test_deployment_create(self):
- res1 = self.fixtures.deployment_api.deployment_create(json.dumps(self.fixtures.deployment_definition))
+ def test_deployment_create(self, fixtures):
+ res1 = fixtures.deployment_api.deployment_create(json.dumps(fixtures.deployment_definition))
assert res1
- res2 = self.fixtures.deployment_api.deployments()
- assert self.fixtures.deployment_expected_id in [x['id'] for x in res2['items']]
- res3 = self.fixtures.deployment_api.deployment(self.fixtures.deployment_expected_id)
+ res2 = fixtures.deployment_api.deployments()
+ assert fixtures.deployment_expected_id in [x['id'] for x in res2['items']]
+ res3 = fixtures.deployment_api.deployment(fixtures.deployment_expected_id)
assert res3['properties']['name'] == 'Test Deployment 001' and res3[
- 'id'] == self.fixtures.deployment_expected_id
+ 'id'] == fixtures.deployment_expected_id
@pytest.mark.skip(reason="Skip transactional test")
- def test_deployment_update(self):
- self.fixtures.deployment_definition['properties']['description'] = 'Updated Description of Deployment 001'
- res = self.fixtures.deployment_api.deployment_update(self.fixtures.deployment_expected_id,
- json.dumps(self.fixtures.deployment_definition))
+ def test_deployment_update(self, fixtures):
+ fixtures.deployment_definition['properties']['description'] = 'Updated Description of Deployment 001'
+ res = fixtures.deployment_api.deployment_update(fixtures.deployment_expected_id,
+ json.dumps(fixtures.deployment_definition))
assert res is not None
@pytest.mark.skip(reason="Skip transactional test")
- def test_deployment_delete(self):
- res = self.fixtures.deployment_api.deployment_delete(self.fixtures.deployment_expected_id)
+ def test_deployment_delete(self, fixtures):
+ res = fixtures.deployment_api.deployment_delete(fixtures.deployment_expected_id)
assert res is not None
class TestSamplingFeatures:
- fixtures = OSHFixtures()
-
- def test_sampling_features_readonly(self):
- all_features = self.fixtures.sampling_feature_api.sampling_features(use_fois=True)
+ @pytest.mark.online
+ def test_sampling_features_readonly(self, fixtures):
+ all_features = fixtures.sampling_feature_api.sampling_features(use_fois=True)
assert len(all_features['items']) == 51
feature_id = "c4nce3peo8hvc"
- feature = self.fixtures.sampling_feature_api.sampling_feature(feature_id, use_fois=True)
+ feature = fixtures.sampling_feature_api.sampling_feature(feature_id, use_fois=True)
assert feature['id'] == feature_id
assert feature['properties']['name'] == 'Station WS013'
@pytest.mark.skip(reason="Skip transactional test")
- def test_sampling_features_all(self):
+ def test_sampling_features_all(self, fixtures):
# setup
- delete_all_systems()
- system_id = create_single_system()
+ fixtures.delete_all_systems()
+ system_id = fixtures.create_single_system()
# create a sampling feature
- self.fixtures.sampling_feature_api.headers = self.fixtures.geojson_headers
- res = self.fixtures.sampling_feature_api.sampling_feature_create(system_id,
- json.dumps(self.fixtures.feature_def), True)
- assert self.fixtures.sampling_feature_api.response_headers['Location'] is not None
- sampling_feature_id = self.fixtures.sampling_feature_api.response_headers['Location'].split('/')[-1]
+ fixtures.sampling_feature_api.headers = fixtures.geojson_headers
+ res = fixtures.sampling_feature_api.sampling_feature_create(system_id,
+ json.dumps(fixtures.feature_def), True)
+ assert fixtures.sampling_feature_api.response_headers['Location'] is not None
+ sampling_feature_id = fixtures.sampling_feature_api.response_headers['Location'].split('/')[-1]
# get all sampling features
- res = self.fixtures.sampling_feature_api.sampling_features(use_fois=True)
+ res = fixtures.sampling_feature_api.sampling_features(use_fois=True)
assert len(res['items']) > 0
assert any(x['id'] == sampling_feature_id for x in res['items'])
# get the sampling feature we created
- res = self.fixtures.sampling_feature_api.sampling_feature(sampling_feature_id, use_fois=True)
+ res = fixtures.sampling_feature_api.sampling_feature(sampling_feature_id, use_fois=True)
assert res['properties']['name'] == 'Test Station 001'
assert res['properties']['featureType'] == 'http://www.w3.org/ns/sosa/Station'
# get sampling features from a system
- res = self.fixtures.sampling_feature_api.sampling_features_from_system(system_id, use_fois=True)
+ res = fixtures.sampling_feature_api.sampling_features_from_system(system_id, use_fois=True)
assert len(res['items']) > 0
assert any(x['id'] == sampling_feature_id for x in res['items'])
# delete the sampling feature
- res = self.fixtures.sampling_feature_api.sampling_feature_delete(sampling_feature_id, use_fois=True)
- res = self.fixtures.sampling_feature_api.sampling_features(use_fois=True)
+ res = fixtures.sampling_feature_api.sampling_feature_delete(sampling_feature_id, use_fois=True)
+ res = fixtures.sampling_feature_api.sampling_features(use_fois=True)
assert res == {'items': []}
class TestDatastreams:
- fixtures = OSHFixtures()
-
- def test_datastreams_readonly(self):
+ @pytest.mark.online
+ def test_datastreams_readonly(self, fixtures):
ds_id = 'kjg2qrcm40rfk'
- datastreams = self.fixtures.datastream_api.datastreams()
+ datastreams = fixtures.datastream_api.datastreams()
assert len(datastreams['items']) > 0
assert any(x['id'] == ds_id for x in datastreams['items'])
- datastream = self.fixtures.datastream_api.datastream(ds_id)
+ datastream = fixtures.datastream_api.datastream(ds_id)
assert datastream['id'] == ds_id
assert datastream['name'] == "Simulated Weather Station Network - weather"
@pytest.mark.skip(reason="Skip transactional test")
- def test_all_ds_functions(self):
+ def test_all_ds_functions(self, fixtures):
# preflight cleanup
- delete_all_systems()
+ fixtures.delete_all_systems()
# setup systems needed
- self.fixtures.systems_api.headers = self.fixtures.sml_headers
- # systems = self.fixtures.systems_api.system_create(json.dumps(self.fixtures.system_definitions))
- system = create_single_system()
+ fixtures.systems_api.headers = fixtures.sml_headers
+ # systems = fixtures.systems_api.system_create(json.dumps(fixtures.system_definitions))
+ system = fixtures.create_single_system()
# insert a datastream
- ds_def_str = json.dumps(self.fixtures.ds_definition)
- ds_api = Datastreams(self.fixtures.TEST_URL, auth=self.fixtures.auth, headers=self.fixtures.json_headers)
+ ds_def_str = json.dumps(fixtures.ds_definition)
+ ds_api = Datastreams(fixtures.TEST_URL, auth=fixtures.auth, headers=fixtures.json_headers)
datastream_create = ds_api.datastream_create_in_system(system, ds_def_str)
# get the datastream id from Location header
@@ -340,25 +370,24 @@ class TestDatastreams:
class TestObservations:
- fixtures = OSHFixtures()
-
- def test_observations_readonly(self):
+ @pytest.mark.online
+ def test_observations_readonly(self, fixtures):
ds_id = 'kjg2qrcm40rfk'
- observations = self.fixtures.observations_api.observations_of_datastream(ds_id)
+ observations = fixtures.observations_api.observations_of_datastream(ds_id)
assert len(observations['items']) > 0
assert 'result' in observations['items'][0]
- observation_of_ds = self.fixtures.observations_api.observations_of_datastream(ds_id)
+ observation_of_ds = fixtures.observations_api.observations_of_datastream(ds_id)
assert observation_of_ds['items'][0]['result']['stationID'] == "WS013"
keys = ['stationID', 'temperature', 'pressure', 'humidity', 'windSpeed', 'windDirection']
assert [key in observation_of_ds['items'][0]['result'] for key in keys]
@pytest.mark.skip(reason="Skip transactional test")
- def test_observations(self):
+ def test_observations(self, fixtures):
# setup
- delete_all_systems()
- system = create_single_system()
- ds = create_single_datastream(system)
+ fixtures.delete_all_systems()
+ system = fixtures.create_single_system()
+ ds = fixtures.create_single_datastream(system)
the_time = datetime.utcnow().isoformat() + 'Z'
observation = {
@@ -369,60 +398,23 @@ class TestObservations:
"testboolean": True
}
}
- self.fixtures.observations_api.headers = {'Content-Type': 'application/om+json'}
- res = self.fixtures.observations_api.observations_create_in_datastream(ds, json.dumps(observation))
- obs = self.fixtures.observations_api.observations_of_datastream(ds)
+ fixtures.observations_api.headers = {'Content-Type': 'application/om+json'}
+ res = fixtures.observations_api.observations_create_in_datastream(ds, json.dumps(observation))
+ obs = fixtures.observations_api.observations_of_datastream(ds)
assert obs['items'][0]['phenomenonTime'] == the_time
obs_id = obs['items'][0]['id']
- res = self.fixtures.observations_api.observations_delete(obs_id)
- obs = self.fixtures.observations_api.observations_of_datastream(ds)
+ res = fixtures.observations_api.observations_delete(obs_id)
+ obs = fixtures.observations_api.observations_of_datastream(ds)
assert obs['items'] == []
- delete_all_systems()
+ fixtures.delete_all_systems()
class TestSystemHistory:
- fixtures = OSHFixtures()
-
- def test_system_history(self):
+ @pytest.mark.online
+ def test_system_history(self, fixtures):
sys_id = '0s2lbn2n1bnc8'
- res = self.fixtures.system_history_api.system_history(sys_id)
+ res = fixtures.system_history_api.system_history(sys_id)
assert len(res['items']) > 0
history_id = res['items'][0]['properties']['validTime'][0]
- res = self.fixtures.system_history_api.system_history_by_id(system_id=sys_id, history_id=history_id)
+ res = fixtures.system_history_api.system_history_by_id(system_id=sys_id, history_id=history_id)
assert res['id'] == sys_id
-
-
-def create_single_system():
- sys_api = Systems(OSHFixtures.TEST_URL, auth=OSHFixtures.auth, headers=OSHFixtures.sml_headers)
- sys_create_res = sys_api.system_create(json.dumps(OSHFixtures.system_definitions[0]))
- sys_id = sys_api.response_headers['Location'].split('/')[-1]
- return sys_id
-
-
-def create_single_datastream(system_id: str):
- ds_api = Datastreams(OSHFixtures.TEST_URL, auth=OSHFixtures.auth, headers=OSHFixtures.json_headers)
- result = ds_api.datastream_create_in_system(system_id, json.dumps(OSHFixtures.ds_definition))
- ds_id = ds_api.response_headers['Location'].split('/')[-1]
- return ds_id
-
-
-def delete_all_systems():
- # delete datastreams first
- delete_all_datastreams()
- delete_all_sampling_features()
- sys_api = Systems(OSHFixtures.TEST_URL, auth=OSHFixtures.auth, headers=OSHFixtures.sml_headers)
- systems = sys_api.systems()
- for system in systems['items']:
- OSHFixtures.systems_api.system_delete(system['id'])
-
-
-def delete_all_datastreams():
- datastreams = OSHFixtures.datastream_api.datastreams()
- for ds in datastreams['items']:
- OSHFixtures.datastream_api.datastream_delete(ds['id'])
-
-
-def delete_all_sampling_features():
- sampling_features = OSHFixtures.sampling_feature_api.sampling_features(use_fois=True)
- for sf in sampling_features['items']:
- OSHFixtures.sampling_feature_api.sampling_feature_delete(sf['id'], use_fois=True)
=====================================
tests/test_ogcapi_coverages.py
=====================================
@@ -0,0 +1,114 @@
+import json
+import pytest
+
+from owslib.ogcapi.coverages import Coverages
+
+
+class MockCoverages(Coverages):
+ def __init__(self, *args, **kwargs):
+ kwargs["json_"] = '{}' # avoid init API request
+ super(MockCoverages, self).__init__(*args, **kwargs)
+
+ def _request(self, **kwargs):
+ json_args = json.dumps(kwargs)
+ return json_args.encode("utf-8")
+
+
+ at pytest.mark.parametrize(
+ ["kwargs", "expect"],
+ [
+ (
+ {"unknown": "dropped-param"},
+ {}
+ ),
+ (
+ {"properties": ["B04"]},
+ {"properties": "B04"},
+ ),
+ (
+ {"properties": ["B04", "B08"]},
+ {"properties": "B04,B08"},
+ ),
+ (
+ {"scale_axes": [("Lat", 1), ("Lon", 2)]},
+ {"scale-axes": "Lat(1),Lon(2)"},
+ ),
+ (
+ {"scale_axes": (("Lat", 1), ("Lon", 2))},
+ {"scale-axes": "Lat(1),Lon(2)"},
+ ),
+ (
+ {"scale_axes": [["Lat", 1], ["Lon", 2]]},
+ {"scale-axes": "Lat(1),Lon(2)"},
+ ),
+ (
+ {"scale_axes": {"Lat": 1, "Lon": 2}},
+ {"scale-axes": "Lat(1),Lon(2)"},
+ ),
+ (
+ {"scale_size": [("Lat", 100), ("Lon", 200)]},
+ {"scale-size": "Lat(100),Lon(200)"},
+ ),
+ (
+ {"scale_size": (("Lat", 100), ("Lon", 200))},
+ {"scale-size": "Lat(100),Lon(200)"},
+ ),
+ (
+ {"scale_size": [["Lat", 100], ["Lon", 200]]},
+ {"scale-size": "Lat(100),Lon(200)"},
+ ),
+ (
+ {"scale_size": {"Lat": 100, "Lon": 200}},
+ {"scale-size": "Lat(100),Lon(200)"},
+ ),
+ (
+ {"scale_factor": 1.23},
+ {"scale-factor": 1.23},
+ ),
+ (
+ {"scale_factor": 2},
+ {"scale-factor": 2},
+ ),
+ (
+ {"scale_factor": 0.5},
+ {"scale-factor": 0.5},
+ ),
+ (
+ {"subset": {"Lat": [10, 20], "Lon": [30, 40]}},
+ {"subset": "Lat(10:20),Lon(30:40)"},
+ ),
+ (
+ {"subset": {"Lat": (10, 20), "Lon": (30, 40)}},
+ {"subset": "Lat(10:20),Lon(30:40)"},
+ ),
+ (
+ {"subset": [("Lat", 10, 20), ("Lon", 30, 40)]},
+ {"subset": "Lat(10:20),Lon(30:40)"},
+ ),
+ (
+ {"subset": [["Lat", 10, 20], ["Lon", 30, 40]]},
+ {"subset": "Lat(10:20),Lon(30:40)"},
+ ),
+ (
+ {"datetime": ("2025-01-01", "2025-01-02")},
+ {"datetime": "2025-01-01/2025-01-02"},
+ ),
+ (
+ {"datetime": ["2025-01-01", "2025-01-02"]},
+ {"datetime": "2025-01-01/2025-01-02"},
+ ),
+ (
+ {"datetime": "2025-01-01/2025-01-02"},
+ {"datetime": "2025-01-01/2025-01-02"},
+ ),
+ ]
+)
+def test_coverages_coverage_kwargs(kwargs, expect):
+ """
+ Validate that additional keywords for coverages are parsed as intended.
+ """
+ cov = MockCoverages("")
+ result = cov.coverage("test", **kwargs)
+ args = result.read()
+ params = json.loads(args)
+ assert params["kwargs"] == expect
=====================================
tests/test_ogcapi_records_pycsw.py
=====================================
@@ -40,7 +40,7 @@ def test_ogcapi_records_pycsw():
assert isinstance(w.response, dict)
pycsw_cite_demo_queryables = w.collection_queryables('metadata:main')
- assert len(pycsw_cite_demo_queryables['properties'].keys()) == 13
+ assert len(pycsw_cite_demo_queryables['properties'].keys()) == 14
# Minimum of limit param is 1
with pytest.raises(RuntimeError):
@@ -67,3 +67,13 @@ def test_ogcapi_records_pycsw():
assert pycsw_cite_demo_query['numberMatched'] == 1
assert pycsw_cite_demo_query['numberReturned'] == 1
assert len(pycsw_cite_demo_query['features']) == 1
+
+
+ at pytest.mark.online
+ at pytest.mark.parametrize("path, expected", [
+ ('collections/foo/1', 'https://demo.pycsw.org/cite/collections/foo/1'),
+ ('collections/foo/https://example.org/11', 'https://demo.pycsw.org/cite/collections/foo/https://example.org/11') # noqa
+])
+def test_ogcapi_build_url(path, expected):
+ w = Records(SERVICE_URL)
+ assert w._build_url(path) == expected
=====================================
tests/test_wfs_generic.py
=====================================
@@ -177,6 +177,7 @@ def test_schema_wfs_200():
@pytest.mark.online
+ at pytest.mark.skip(reason='HTTP 403 issue. See issue #956')
@pytest.mark.skipif(not service_ok(SERVICE_URL),
reason="WFS service is unreachable")
def test_xmlfilter_wfs_110():
@@ -193,6 +194,7 @@ def test_xmlfilter_wfs_110():
@pytest.mark.online
+ at pytest.mark.skip(reason='HTTP 403 issue. See issue #956')
@pytest.mark.skipif(not service_ok(SERVICE_URL),
reason="WFS service is unreachable")
def test_xmlfilter_wfs_200():
=====================================
tests/test_wms_getmap.py
=====================================
@@ -4,6 +4,7 @@ import pytest
from tests.utils import service_ok
from owslib.wms import WebMapService
+from owslib.map.wms130 import WebMapService_1_3_0
from owslib.util import ServiceException
from owslib.util import ResponseWrapper
@@ -12,6 +13,31 @@ SERVICE_URL = 'http://mesonet.agron.iastate.edu/cgi-bin/wms/nexrad/n0r-t.cgi'
NCWMS2_URL = "http://wms.stccmop.org:8080/ncWMS2/wms"
+ at pytest.fixture
+def wms():
+ return WebMapService_1_3_0(SERVICE_URL, version='1.3.0')
+
+
+ at pytest.mark.parametrize("version", ["1.3.0", "1.1.1"])
+def test_build_getmap_request_bbox_precision(version):
+ bbox = (-126.123456789, 24.123456789, -66.123456789, 50.123456789)
+ bbox_yx = (bbox[1], bbox[0], bbox[3], bbox[2])
+
+ m = mock.Mock()
+ type(m).version = mock.PropertyMock(return_value=version)
+
+ request = WebMapService_1_3_0._WebMapService_1_3_0__build_getmap_request(m,
+ layers=['layer1'],
+ styles=['default'],
+ srs='EPSG:4326',
+ bbox=bbox,
+ format='image/jpeg',
+ size=(250, 250),
+ transparent=True
+ )
+ assert request['bbox'] == ','.join(map(str, bbox_yx))
+
+
@pytest.mark.online
@pytest.mark.skipif(not service_ok(SERVICE_URL),
reason="WMS service is unreachable")
=====================================
tests/test_wmts.py
=====================================
@@ -105,7 +105,7 @@ def test_wmts_without_serviceprovider_tag():
_ = WebMapTileService(EXAMPLE_SERVICE_URL)
-SERVICE_URL_REST = 'https://www.basemap.at/wmts/1.0.0/WMTSCapabilities.xml'
+SERVICE_URL_REST = 'https://mapsneu.wien.gv.at/basemapneu/1.0.0/WMTSCapabilities.xml'
@pytest.mark.online
=====================================
tox.ini
=====================================
@@ -1,5 +1,16 @@
[pytest]
-addopts = -v -rxs -s --color=yes --tb=native --ignore=setup.py --doctest-modules --doctest-glob 'tests/**/*.txt' --cov-report term-missing --cov owslib
+addopts =
+ -v
+ -rxs
+ -s
+ --color=yes
+ --tb=native
+ --ignore=setup.py
+ --doctest-modules
+ --doctest-glob='tests/**/*.txt'
+ --cov-report=term-missing
+ --cov=owslib
+
norecursedirs = .git docs examples etc cov* *.egg* pytest* .tox _broken
markers =
online: test requires online resources.
View it on GitLab: https://salsa.debian.org/debian-gis-team/owslib/-/compare/505a90d107a6a53930185f22f9ca5ec016ddb7bb...f588d3671fe804e8f418b02bc6e2436b31424f14
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/owslib/-/compare/505a90d107a6a53930185f22f9ca5ec016ddb7bb...f588d3671fe804e8f418b02bc6e2436b31424f14
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20250123/9749901a/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list