diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..fb68add83194a69446dc8dbde992a15a5d7cbb77 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,20 @@ +--- +variables: + SONAR_TOKEN: "9864e0b53830ce6c7917fdc17ebe7e486d0eeba0" + SONAR_PROJECTKEY: "$CI_PROJECT_NAME" + SONAR_HOST_URL: "https://sonarqube.neogeo.fr" + GIT_DEPTH: 0 +# ---------------------------------------------------------------------------- # +stages: + - sonarqube +# ---------------------------------------------------------------------------- # +sonarqube-check: + image: + name: sonarsource/sonar-scanner-cli:latest + entrypoint: [""] + stage: sonarqube + script: + - sonar-scanner -Dsonar.qualitygate.wait=true -Dsonar.projectKey=$CI_PROJECT_NAME -Dsonar.projectName=$CI_PROJECT_NAME -Dsonar.projectVersion=$CI_COMMIT_BRANCH + allow_failure: true + only: + - rc/1.1.0 diff --git a/docs/mra-reference.md b/docs/mra-reference.md index fcafd8883f7bf1cc15bee429448ba407ba2124cb..1a98b656df4f9140ff553499845812150dcb42ef 100644 --- a/docs/mra-reference.md +++ b/docs/mra-reference.md @@ -28,7 +28,7 @@ A data store contains one or more feature types which are vector based spatial r In cases of a _shapefile_, the feature type is unique and corresponds to the data store. In cases of a _PostGIS_ database, feature types corresponds to tables. -A coverage store is a source of spatial data that is raster based. +A coverage store is a source of spatial data that is raster based. It can be a _GeoTIFF_ file and for this format file, the coverage is unique and corresponds to the coverage store. The concept of _workspace_ comes from GeoServer and it does not exist in MapServer. diff --git a/plugins/__init__.py b/plugins/__init__.py index b32a3a31f87ff20e20bbe24801658d09cd4f5ab2..686e9947db76ca4fb70442d13ebcaccf7f35a19e 100644 --- a/plugins/__init__.py +++ b/plugins/__init__.py @@ -25,7 +25,10 @@ # this folder and they will automatically be loaded if this folder is # present in the mra.yaml configuration under plugins/loadpaths -import os, os.path + +import os +import os.path + __all__ = [] for module in os.listdir(os.path.dirname(__file__)): diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..f9bfb48a31e1d9eeea6f2b0971860b41153caa18 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +web.py>=0.50,<0.60 +gdal<2.5.0 +pyyaml diff --git a/setup.py b/setup.py index d1231ed93bd0a9053b3db49353f6ad64f68dd3ab..45895dc737ae3bc0c80bef40277c00d0b2b87438 100644 --- a/setup.py +++ b/setup.py @@ -21,39 +21,49 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -import sys -from distutils.core import setup + +import os.path +from setuptools import setup + + +version = '1.1.0' + + +def parse_requirements(filename): + with open(filename) as f: + lines = (line.strip() for line in f) + return [line for line in lines if line and not line.startswith('#')] + + +dirname = os.path.dirname(__file__) + +reqs_filename = os.path.join(dirname, 'requirements.txt') +reqs = [str(req) for req in parse_requirements(reqs_filename)] setup( - name='MapServer Rest API', - version='1.0.2', - author='Neogeo Technologies', - author_email='contact@neogeo-online.net', - description='A RESTFul interface for MapServer', - long_description=open('README.md', 'r').read(), - keywords='neogeo mapserver rest restful', + name="MapServer Rest API", + version=version, + description="A RESTFul interface for MapServer", + author="Neogeo Technologies", + author_email="contact@neogeo.fr", license="GPLv3", - #url='', + url="https://github.com/neogeo-technologies/mra", classifiers=[ - 'Development Status :: Beta', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: GPLv3', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python3', - 'Natural Language :: English', - 'Topic :: Scientific/Engineering :: GIS', - ], - #packages=, - #package_dir={'':'src'}, - #namespace_packages=['mra'], - install_requires=[ - 'web.py>=0.50,<0.60', - 'pyyaml', - 'gdal<2.5.0', + "Operating System :: OS Independent", + "Development Status :: Beta", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GPLv3", + "Programming Language :: Python", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Scientific/Engineering :: GIS", ], + install_requires=reqs, scripts=[ - 'src/server.py', + os.path.join(dirname, 'src/server.py'), ] ) diff --git a/src/extensions.py b/src/extensions.py index 73bfd49578b08cac104e6f05ce15597e24c976b4..959e66433e26daf5f3871786f8e29fc4f1e295ac 100644 --- a/src/extensions.py +++ b/src/extensions.py @@ -21,14 +21,16 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ Module for managing add-ons (experimental). """ -import sys -import os.path + import logging +import os.path +import sys class ExtensionManager(object): diff --git a/src/metadata.py b/src/metadata.py index 807eb19bcf42d2f191182c730a693c8b5e24376f..5911742b1b37e099f173a3eb134cbcd2e95e472a 100644 --- a/src/metadata.py +++ b/src/metadata.py @@ -21,6 +21,7 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ Wrapper for managing metadata of mapfiles. @@ -29,13 +30,14 @@ """ -import yaml + import contextlib -from mapscript import MapServerError import logging +from mapscript import MapServerError +import yaml -METADATA_NAME="mra" +METADATA_NAME = "mra" def get_metadata(obj, key, *args): @@ -69,7 +71,7 @@ def iter_metadata_keys(obj): keys = [] key = obj.getFirstMetaDataKey() - while key != None: + while key is not None: keys.append(key) key = obj.getNextMetaDataKey(key) @@ -81,9 +83,10 @@ def get_metadata_keys(obj): def set_metadata(obj, key, value): + # TODO: Fix this with upgrade to py3 try: obj.setMetaData(key, value) - except UnicodeEncodeError as e: + except UnicodeEncodeError: obj.setMetaData(key, value.encode('utf8')) @@ -121,7 +124,7 @@ def __get_mra(obj): try: metadata = yaml.load(text) except yaml.parser.ParserError: - raise IOError("File has corrupted MRA metadata for entry \"%s\"." % key) + raise IOError("File has corrupted MRA metadata.") return metadata @@ -169,7 +172,7 @@ def set_mra_metadata(obj, key, value): def del_mra_metadata(obj, key, value): - with mra_metadata(obj) as mra_metadata: + with mra_metadata(obj) as metadata: del metadata[key] diff --git a/src/mra.py b/src/mra.py index 8eb91ba443ab6dee4a9811d1e623844a2e076576..d41f3b75e4e12cff13437a89e284faa5d14db8f8 100644 --- a/src/mra.py +++ b/src/mra.py @@ -21,6 +21,7 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ Module for managing MapFiles in MRA conceptual model. @@ -33,22 +34,22 @@ """ + +import functools +import logging +import mapscript import os import os.path -import string import urllib.parse -import functools -import web import yaml -import mapscript -import tools +import xml.etree.ElementTree as ET + from extensions import plugins +import metadata +import stores +import tools import webapp from webapp import KeyExists -import stores -import metadata -import xml.etree.ElementTree as ET -import logging yaml.warnings({'YAMLLoadWarning': False}) @@ -177,8 +178,8 @@ class Layer(MetadataMixin): def get_latlon_extent(self): rect = mapscript.rectObj(*self.get_extent()) - res = rect.project(mapscript.projectionObj(self.get_proj4()), - mapscript.projectionObj("+init=epsg:4326")) + rect.project(mapscript.projectionObj(self.get_proj4()), + mapscript.projectionObj("+init=epsg:4326")) return stores.Extent(rect.minx, rect.miny, rect.maxx, rect.maxy) def get_fields(self): @@ -186,7 +187,7 @@ class Layer(MetadataMixin): if fields == "all": # TODO: Get fields from feature type - raise NotImplemented() + raise NotImplementedError("TODO") elif not fields: return [] else: @@ -263,7 +264,7 @@ class Layer(MetadataMixin): self.ms.tolerance = 0 self.ms.toleranceunits = 6 else: - return + return None try: style = open(os.path.join(os.path.dirname(__file__), "%s.sld" % s_name), encoding="utf-8").read() @@ -318,7 +319,7 @@ class LayerGroup(object): def remove(self, *args): for layer in args: if isinstance(layer, str): - layer = mapfile.get_layer(layer) + layer = self.mapfile.get_layer(layer) self.remove_layer(layer) def clear(self): @@ -394,8 +395,8 @@ class Mapfile(MetadataMixin): def check(f, v): return f(v) if callable(f) else f == v - for l in range(self.ms.numlayers): - ms_layer = self.ms.getLayer(l) + for lay in range(self.ms.numlayers): + ms_layer = self.ms.getLayer(lay) if not all(check(checker, getattr(ms_layer, k, None)) for k, checker in attr.items()): continue if not all(check(checker, metadata.get_metadata(ms_layer, k, None)) for k, checker in meta.items()): @@ -542,7 +543,7 @@ class FeatureTypeModel(LayerModel): # self.set_metadata("ows_extent", "%s %s %s %s" % # (ft.get_extent().minX(), ft.get_extent().minY(), # ft.get_extent().maxX(), ft.get_extent().maxY())) - #elif cpram["dbtype"] in ["shp", "shapefile"]: + # elif cpram["dbtype"] in ["shp", "shapefile"]: # TODO: clean up this fallback. else: self.ms.connectiontype = mapscript.MS_SHAPEFILE @@ -641,13 +642,21 @@ class CoverageModel(LayerModel): info = ws.get_coveragestore_info(cs_name) cparam = info["connectionParameters"] - #if cparam["dbtype"] in ["tif", "tiff"]: self.ms.connectiontype = mapscript.MS_RASTER url = urllib.parse.urlparse(cparam["url"]) - self.ms.data = self.ws.mra.get_file_path(url.path) + filename = self.ws.mra.get_file_path(url.path) + if cs.tindex is None: + #if cparam["dbtype"] in ["tif", "tiff"]: + self.ms.data = filename + self.ms.tileindex = None + self.ms.tileitem = None # TODO: strip extention. - #else: - # raise ValueError("Unhandled type \"%s\"." % cparam["dbtype"]) + #else: + # raise ValueError("Unhandled type \"%s\"." % cparam["dbtype"]) + else: + self.ms.data = None + self.ms.tileindex = cs.get_tileindex() + self.ms.tileitem = cs.get_tileitem() # Update mra metadatas, and make sure the mandatory ones are left untouched. self.update_mra_metadatas(metadata) @@ -672,6 +681,8 @@ class CoverageModel(LayerModel): layer.ms.data = self.ms.data layer.ms.connectiontype = self.ms.connectiontype layer.ms.connection = self.ms.connection + layer.ms.tileindex = self.ms.tileindex + layer.ms.tileitem = self.ms.tileitem layer_name = self.get_mra_metadata("name") @@ -888,20 +899,20 @@ class Workspace(Mapfile): def create_layermodel(self, st_type, store, name, metadata={}): if self.has_layermodel(st_type, store, name): raise KeyExists((st_type, store, name)) - ft = self.__ms2model(mapscript.layerObj(self.ms), st_type=st_type) + lm = self.__ms2model(mapscript.layerObj(self.ms), st_type=st_type) - ft.update(store, name, metadata) - return ft + lm.update(store, name, metadata) + return lm def update_layermodel(self, st_type, store, name, metadata={}): - ft = self.get_layermodel(st_type, store, name) - ft.update(store, name, metadata) + lm = self.get_layermodel(st_type, store, name) + lm.update(store, name, metadata) def delete_layermodel(self, st_type, ds_name, ft_name): - model = self.get_layermodel(st_type, ds_name, ft_name) - if model.get_mra_metadata("layers", []): + lm = self.get_layermodel(st_type, ds_name, ft_name) + if lm.get_mra_metadata("layers", []): raise ValueError("The %s \"%s\" can't be delete because it is used." % (st_type, ft_name)) - self.ms.removeLayer(model.ms.index) + self.ms.removeLayer(lm.ms.index) # Featuretypes @@ -1114,11 +1125,12 @@ class MRA(object): path = self.get_available_path("%s.ws.map" % name) try: return Workspace(self, path) - except IOError as OSError: + except IOError: raise KeyError(name) def delete_workspace(self, name): - path = self.get_available_path("%s.ws.map" % name) + # path = self.get_available_path("%s.ws.map" % name) + raise NotImplementedError("Method 'delete_workspace' is not yet available. (TODO)") # Services: diff --git a/src/mralogs.py b/src/mralogs.py index aad56854ac49f6c095244dc2c9a07f40c96ea296..ce9cdfbe80d84e7530f6284a824e6f86e27a44cd 100644 --- a/src/mralogs.py +++ b/src/mralogs.py @@ -21,16 +21,18 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ Module for managing the logs. """ -import sys -import string + +import functools import inspect import logging -import functools +import sys +import string def setup(log_level, log_file=None, format="%(asctime)s %(levelname)7s: %(message)s"): @@ -79,7 +81,7 @@ class Reccord(logging.Handler): return iter(self.records) def __del__(self): - logger.removeHandler(logging.getLogger(self.logger)) + logging.removeHandler(logging.getLogger(self.logger)) logging.Handler.__del__(self) @@ -92,10 +94,10 @@ def short_str(obj, length=15, delta=5, tail="..."): s = str(obj) if len(s) < length: return s - return s[:length+1-delta+min(s[length-delta:length+delta].find(c) for c in string.punctuation)] + tail + return s[:length + 1 - delta + min(s[length - delta:length + delta].find(c) for c in string.punctuation)] + tail -def logIn(level="debug", filter=(lambda *a, **kw:True)): +def logIn(level="debug", filter=(lambda *a, **kw: True)): """Decorator factory used to log when the function is called. The log level can be specified using level. filter can be used to specify a function that should be used to @@ -121,7 +123,7 @@ def logIn(level="debug", filter=(lambda *a, **kw:True)): return decorator -def logOut(level="debug", filter=(lambda *a, **kw:True)): +def logOut(level="debug", filter=(lambda *a, **kw: True)): """Decorator factory used to log when the function returns. The log level can be specified using level. filter can be used to specify a function that should be used to diff --git a/src/pyhtml.py b/src/pyhtml.py index 7421c0953cc39f69b72405d4984036c231d61d3e..077ce8d46ac769df4f9d9e3fe1968caf33c37584 100644 --- a/src/pyhtml.py +++ b/src/pyhtml.py @@ -21,16 +21,17 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ HTML interface of the REST API. """ -import pyxml + import io -import urllib.parse -from xml.etree import ElementTree as etree from cgi import escape +import pyxml +import urllib.parse from xml.sax.saxutils import unescape @@ -60,7 +61,7 @@ def dump_xml(xml, fp, indent=0, indent_depth=2, reinit=True): def line(fmt, *args): """Writes a line to fp with corect indentation.""" - fp.write(('%s' + fmt + '\n') % tuple([indent*' ']+list(args))) + fp.write(('%s' + fmt + '\n') % tuple([indent * ' '] + list(args))) if reinit: # We need to reset element ID if we where nto called recursivly. @@ -89,7 +90,7 @@ def dump_xml(xml, fp, indent=0, indent_depth=2, reinit=True): escape(child.tag), '' if not child.attrib else escape(" %s" % (child.attrib))) line('<td><div id="entry_%d">', id) - dump_xml(child, fp, indent=indent+indent_depth, reinit=False) + dump_xml(child, fp, indent=indent + indent_depth, reinit=False) line('</div></td>') indent -= indent_depth line('</tr>') diff --git a/src/pyxml.py b/src/pyxml.py index 27cd8f4dc5aa118857964a93c6c0490c3447af30..5720c3dd963dc6b2eb01afcf9f6c9cd8520adc3f 100644 --- a/src/pyxml.py +++ b/src/pyxml.py @@ -21,13 +21,16 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ XML interface of the REST API. """ -import xml.etree.ElementTree as etree + +import logging from xml.etree.ElementTree import Element +import xml.etree.ElementTree as etree from xml.sax.saxutils import escape @@ -183,7 +186,7 @@ def xml_href(parent, obj, hint=None, xml_mapper=default_xml_mapper, def xml_string(parent, obj, _=None, xml_mapper=default_xml_mapper, - dict_mapper=default_xml_dict_mapper, list_mapper=default_xml_list_mapper): + dict_mapper=default_xml_dict_mapper, list_mapper=default_xml_list_mapper): """Adds obj to parent as if it is a string.""" parent.text = escape(str(obj)) @@ -197,7 +200,7 @@ def xml_dict(parent, obj, hint=None, xml_mapper=default_xml_mapper, """ for k, v in obj.items(): if hint: - child = etree.Element(hint[0], attrib={hint[1]:k}) + child = etree.Element(hint[0], attrib={hint[1]: k}) else: child = etree.Element(k, attrib={}) xml(v, parent=child, xml_mapper=xml_mapper, dict_mapper=dict_mapper, list_mapper=list_mapper) diff --git a/src/server.py b/src/server.py index 551ee7807c353f0bb96801452ceb4bf069014edc..0f2539ac33d97c17d1a890096bb380c20554acbb 100755 --- a/src/server.py +++ b/src/server.py @@ -21,27 +21,29 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ URL mapping infrastructure and HTTP methods used by the REST API. """ + import os.path import sys import web -import json -import urllib.parse as urlparse -import logging -import mralogs + +from extensions import plugins from mra import MRA -import webapp -from webapp import HTTPCompatible, urlmap, get_data -import tools -from tools import href, assert_is_empty +import mralogs from pyxml import Entries -from extensions import plugins -import mapscript +import tools +from tools import assert_is_empty +from tools import href +import webapp +from webapp import get_data +from webapp import HTTPCompatible +from webapp import urlmap # Some helper functions first. @@ -80,11 +82,14 @@ class version(object): """ @HTTPCompatible() def GET(self, format): - return {"about": {"resources": [ - {"name": "MapServer", "version": tools.ms_version()}, - {"name": "GDAL", "version": tools.gdal_version()}, - ]} + return { + "about": { + "resources": [ + {"name": "MapServer", "version": tools.ms_version()}, + {"name": "GDAL", "version": tools.gdal_version()}, + ] } + } class workspaces(object): @@ -99,11 +104,12 @@ class workspaces(object): def GET(self, format, *args, **kwargs): """List all workspaces.""" - return {"workspaces": [{ - "name": ws_name, - "href": "%s/workspaces/%s.%s" % (web.ctx.home, ws_name, format) - } for ws_name in mra.list_workspaces()] - } + return { + "workspaces": [{ + "name": ws_name, + "href": "%s/workspaces/%s.%s" % (web.ctx.home, ws_name, format) + } for ws_name in mra.list_workspaces()] + } @HTTPCompatible() def POST(self, format): @@ -144,14 +150,15 @@ class workspace(object): """Return workspace <ws>.""" ws = get_workspace(ws_name) - return {"workspace": ({ - "name": ws.name, - "dataStores": - href("%s/workspaces/%s/datastores.%s" % (web.ctx.home, ws.name, format)), - "coverageStores": - href("%s/workspaces/%s/coveragestores.%s" % (web.ctx.home, ws.name, format)), - }) - } + return { + "workspace": ({ + "name": ws.name, + "dataStores": href( + "%s/workspaces/%s/datastores.%s" % (web.ctx.home, ws.name, format)), + "coverageStores": href( + "%s/workspaces/%s/coveragestores.%s" % (web.ctx.home, ws.name, format)), + }) + } # TODO: def PUT(... # TODO: def DELETE(... @@ -170,12 +177,13 @@ class datastores(object): """List all data stores in workspace <ws>.""" ws = get_workspace(ws_name) - return {"dataStores": [{ - "name": ds_name, - "href": "%s/workspaces/%s/datastores/%s.%s" % ( - web.ctx.home, ws.name, ds_name, format) - } for ds_name in ws.iter_datastore_names()] - } + return { + "dataStores": [{ + "name": ds_name, + "href": "%s/workspaces/%s/datastores/%s.%s" % ( + web.ctx.home, ws.name, ds_name, format) + } for ds_name in ws.iter_datastore_names()] + } @HTTPCompatible() def POST(self, ws_name, format): @@ -215,21 +223,20 @@ class datastore(object): info = ws.get_datastore_info(ds_name) connectionParameters = info.get("connectionParameters", {}) - return {"dataStore": { - "name": info["name"], - "enabled": True, # Always enabled - # TODO: Handle enabled/disabled states - "workspace": { - "name": ws.name, - "href": "%s/workspaces/%s.%s" % ( - web.ctx.home, ws.name, format), - }, - "featureTypes": href("%s/workspaces/%s/datastores/%s/featuretypes.%s" % ( - web.ctx.home, ws.name, ds_name, format) - ), - "connectionParameters": Entries(connectionParameters, tag_name="entry"), - } + return { + "dataStore": { + "name": info["name"], + "enabled": True, # Always enabled + # TODO: Handle enabled/disabled states + "workspace": { + "name": ws.name, + "href": "%s/workspaces/%s.%s" % (web.ctx.home, ws.name, format), + }, + "featureTypes": href( + "%s/workspaces/%s/datastores/%s/featuretypes.%s" % (web.ctx.home, ws.name, ds_name, format)), + "connectionParameters": Entries(connectionParameters, tag_name="entry"), } + } @HTTPCompatible() def PUT(self, ws_name, ds_name, format): @@ -274,12 +281,13 @@ class featuretypes(object): """List all feature types in selected data store <ds>.""" ws = get_workspace(ws_name) - return {"featureTypes": [{ - "name": ft.name, - "href": "%s/workspaces/%s/datastores/%s/featuretypes/%s.%s" % ( - web.ctx.home, ws.name, ds_name, ft.name, format) - } for ft in ws.iter_featuretypemodels(ds_name)] - } + return { + "featureTypes": [{ + "name": ft.name, + "href": "%s/workspaces/%s/datastores/%s/featuretypes/%s.%s" % ( + web.ctx.home, ws.name, ds_name, ft.name, format) + } for ft in ws.iter_featuretypemodels(ds_name)] + } @HTTPCompatible() def POST(self, ws_name, ds_name, format): @@ -316,7 +324,7 @@ class featuretypes(object): wsmf.save() webapp.Created("%s/workspaces/%s/datastores/%s/featuretypes/%s.%s" % ( - web.ctx.home, ws.name, ds_name, data["name"], format)) + web.ctx.home, ws.name, ds_name, data["name"], format)) class featuretype(object): @@ -344,7 +352,6 @@ class featuretype(object): extent = ft.get_extent() latlon_extent = ft.get_latlon_extent() - # About attributs, we apply just values handled by # MapServer in a GetFeature response... attributes = [{ @@ -374,46 +381,47 @@ class featuretype(object): # binding? }) - return {"featureType": ({ - # Why the name would it be different from nativeName? - "name": ft.name, - "nativeName": ft.name, - "title": ft.get_mra_metadata("title", ft.name), - "abstract": ft.get_mra_metadata("abstract", None), - # TODO: keywords - "nativeCRS": ft.get_wkt(), - "attributes": attributes, - "nativeBoundingBox": { - "minx": extent.minX(), - "miny": extent.minY(), - "maxx": extent.maxX(), - "maxy": extent.maxY(), - "crs": "%s:%s" % (ft.get_authority_name(), ft.get_authority_code()), - }, - "latLonBoundingBox": { - "minx": latlon_extent.minX(), - "miny": latlon_extent.minY(), - "maxx": latlon_extent.maxX(), - "maxy": latlon_extent.maxY(), - "crs": "EPSG:4326", - }, - # "srs": "%s:%s" % (ft.get_authority()[0], ft.get_authority()[1]), - "projectionPolicy": "NONE", - # About srs & projectionPolicy: (TODO: Handle the other cases) - # In MRA, it is easier (or more logical?) to keep native CRS, - # Or there is a problem of understanding on our part. - # So, i prefer to comment 'srs' entry cause we force the - # value of 'projectionPolicy' to 'NONE'... but it is something - # we should investigate... - "enabled": True, # Always enabled => TODO - "store": { # TODO: add key: class="dataStore" - "name": ds_name, - "href": "%s/workspaces/%s/datastores/%s.%s" % ( - web.ctx.home, ws_name, ds_name, format) - }, - # TODO: maxFeatures - }) - } + return { + "featureType": ({ + # Why the name would it be different from nativeName? + "name": ft.name, + "nativeName": ft.name, + "title": ft.get_mra_metadata("title", ft.name), + "abstract": ft.get_mra_metadata("abstract", None), + # TODO: keywords + "nativeCRS": ft.get_wkt(), + "attributes": attributes, + "nativeBoundingBox": { + "minx": extent.minX(), + "miny": extent.minY(), + "maxx": extent.maxX(), + "maxy": extent.maxY(), + "crs": "%s:%s" % (ft.get_authority_name(), ft.get_authority_code()), + }, + "latLonBoundingBox": { + "minx": latlon_extent.minX(), + "miny": latlon_extent.minY(), + "maxx": latlon_extent.maxX(), + "maxy": latlon_extent.maxY(), + "crs": "EPSG:4326", + }, + # "srs": "%s:%s" % (ft.get_authority()[0], ft.get_authority()[1]), + "projectionPolicy": "NONE", + # About srs & projectionPolicy: (TODO: Handle the other cases) + # In MRA, it is easier (or more logical?) to keep native CRS, + # Or there is a problem of understanding on our part. + # So, i prefer to comment 'srs' entry cause we force the + # value of 'projectionPolicy' to 'NONE'... but it is something + # we should investigate... + "enabled": True, # Always enabled => TODO + "store": { # TODO: add key: class="dataStore" + "name": ds_name, + "href": "%s/workspaces/%s/datastores/%s.%s" % ( + web.ctx.home, ws_name, ds_name, format) + }, + # TODO: maxFeatures + }) + } @HTTPCompatible() def PUT(self, ws_name, ds_name, ft_name, format): @@ -439,8 +447,9 @@ class featuretype(object): # We need to check if there are any layers using this. mf = mra.get_available() - assert_is_empty(mf.iter_layers(mra={"name":ft_name, "workspace":ws_name, "storage":ds_name, - "type":"featuretype"}),"featuretype", ft_name) + assert_is_empty( + mf.iter_layers(mra={"name": ft_name, "workspace": ws_name, "storage": ds_name, "type": "featuretype"}), + "featuretype", ft_name) with webapp.mightNotFound("featureType", datastore=ds_name): ws.delete_featuretypemodel(ds_name, ft_name) @@ -460,12 +469,13 @@ class coveragestores(object): """List all coverage stores in workspace.""" ws = get_workspace(ws_name) - return {"coverageStores": [{ - "name": cs_name, - "href": "%s/workspaces/%s/coveragestores/%s.%s" % ( - web.ctx.home, ws.name, cs_name, format) - } for cs_name in ws.iter_coveragestore_names()] - } + return { + "coverageStores": [{ + "name": cs_name, + "href": "%s/workspaces/%s/coveragestores/%s.%s" % ( + web.ctx.home, ws.name, cs_name, format) + } for cs_name in ws.iter_coveragestore_names()] + } @HTTPCompatible() def POST(self, ws_name, format): @@ -487,7 +497,7 @@ class coveragestores(object): # mf.save() webapp.Created("%s/workspaces/%s/coveragestores/%s.%s" % ( - web.ctx.home, ws_name, cs_name, format)) + web.ctx.home, ws_name, cs_name, format)) class coveragestore(object): @@ -507,25 +517,25 @@ class coveragestore(object): info = ws.get_coveragestore_info(cs_name) connectionParameters = info.get("connectionParameters", {}) - return {"coverageStore": { - "name": info["name"], - "enabled": True, # Always enabled - # TODO: Handle enabled/disabled states - "workspace": { - "name": ws.name, - "href": "%s/workspaces/%s.%s" % ( - web.ctx.home, ws.name, format), - }, - "coverages": href("%s/workspaces/%s/coveragestores/%s/coverages.%s" % ( - web.ctx.home, ws.name, cs_name, format) - ), - "connectionParameters": connectionParameters and Entries({ - "url": info["connectionParameters"]["url"], - # "namespace": None, # TODO - }, tag_name="entry"), - # TODO: type - } + return { + "coverageStore": { + "name": info["name"], + "enabled": True, # Always enabled + # TODO: Handle enabled/disabled states + "workspace": { + "name": ws.name, + "href": "%s/workspaces/%s.%s" % ( + web.ctx.home, ws.name, format), + }, + "coverages": href("%s/workspaces/%s/coveragestores/%s/coverages.%s" % ( + web.ctx.home, ws.name, cs_name, format)), + "connectionParameters": connectionParameters and Entries({ + "url": info["connectionParameters"]["url"], + # "namespace": None, # TODO + }, tag_name="entry"), + # TODO: type } + } @HTTPCompatible() def PUT(self, ws_name, cs_name, format): @@ -569,12 +579,13 @@ class coverages(object): """List all coverages in selected coverages store <cs>.""" ws = get_workspace(ws_name) - return {"coverages": [{ - "name": c.name, - "href": "%s/workspaces/%s/coveragestores/%s/coverages/%s.%s" % ( - web.ctx.home, ws.name, cs_name, c.name, format) - } for c in ws.iter_coveragemodels(cs_name)] - } + return { + "coverages": [{ + "name": c.name, + "href": "%s/workspaces/%s/coveragestores/%s/coverages/%s.%s" % ( + web.ctx.home, ws.name, cs_name, c.name, format) + } for c in ws.iter_coveragemodels(cs_name)] + } @HTTPCompatible() def POST(self, ws_name, cs_name, format): @@ -609,7 +620,7 @@ class coverages(object): wsmf.save() webapp.Created("%s/workspaces/%s/coveragestores/%s/coverages/%s.%s" % ( - web.ctx.home, ws.name, cs_name, data["name"], format)) + web.ctx.home, ws.name, cs_name, data["name"], format)) class coverage(object): @@ -633,61 +644,62 @@ class coverage(object): extent = c.get_extent() latlon_extent = c.get_latlon_extent() - return {"coverage": ({ - "name": c.name, - "nativeName": c.name, - "title": c.get_mra_metadata("title", c.name), - "abstract": c.get_mra_metadata("abstract", None), - # TODO: Keywords - "nativeCRS": c.get_wkt(), # TODO: Add key class="projected" if projected... - "srs": "%s:%s" % (c.get_authority_name(), c.get_authority_code()), - "nativeBoundingBox": { - "minx": extent.minX(), - "miny": extent.minY(), - "maxx": extent.maxX(), - "maxy": extent.maxY(), - "crs": "%s:%s" % (c.get_authority_name(), c.get_authority_code()), # TODO: Add key class="projected" if projected... - }, - "latLonBoundingBox":{ - "minx": latlon_extent.minX(), - "miny": latlon_extent.minY(), - "maxx": latlon_extent.maxX(), - "maxy": latlon_extent.maxY(), - "crs": "EPSG:4326" - }, - "enabled": True, # Always enabled => TODO - "store": { # TODO: Add attr class="coverageStore" - "name": cs_name, - "href": "%s/workspaces/%s/coveragestores/%s.%s" % ( - web.ctx.home, ws_name, cs_name, format) - }, - # TODO: - # "nativeFormat": None, - # "grid": { - # "range": { - # "low": None, - # "high": None, - # }, - # "transform": { - # "scaleX": None, - # "scaleY": None, - # "shearX": None, - # "shearY": None, - # "translateX": None, - # "translateY": None, - # }, - # "crs": None, - # }, - # "supportedFormats": [], - # "interpolationMethods": [], - # "defaultInterpolationMethod": None, - # "dimensions": [], - # "projectionPolicy": None, - # "requestSRS": None, - # "responseSRS": None, - # "parameters": None, - }) - } + return { + "coverage": ({ + "name": c.name, + "nativeName": c.name, + "title": c.get_mra_metadata("title", c.name), + "abstract": c.get_mra_metadata("abstract", None), + # TODO: Keywords + "nativeCRS": c.get_wkt(), # TODO: Add key class="projected" if projected... + "srs": "%s:%s" % (c.get_authority_name(), c.get_authority_code()), + "nativeBoundingBox": { + "minx": extent.minX(), + "miny": extent.minY(), + "maxx": extent.maxX(), + "maxy": extent.maxY(), + "crs": "%s:%s" % (c.get_authority_name(), c.get_authority_code()), # TODO: Add key class="projected" if projected... + }, + "latLonBoundingBox":{ + "minx": latlon_extent.minX(), + "miny": latlon_extent.minY(), + "maxx": latlon_extent.maxX(), + "maxy": latlon_extent.maxY(), + "crs": "EPSG:4326" + }, + "enabled": True, # Always enabled => TODO + "store": { # TODO: Add attr class="coverageStore" + "name": cs_name, + "href": "%s/workspaces/%s/coveragestores/%s.%s" % ( + web.ctx.home, ws_name, cs_name, format) + }, + # TODO: + # "nativeFormat": None, + # "grid": { + # "range": { + # "low": None, + # "high": None, + # }, + # "transform": { + # "scaleX": None, + # "scaleY": None, + # "shearX": None, + # "shearY": None, + # "translateX": None, + # "translateY": None, + # }, + # "crs": None, + # }, + # "supportedFormats": [], + # "interpolationMethods": [], + # "defaultInterpolationMethod": None, + # "dimensions": [], + # "projectionPolicy": None, + # "requestSRS": None, + # "responseSRS": None, + # "parameters": None, + }) + } @HTTPCompatible() def PUT(self, ws_name, cs_name, c_name, format): @@ -712,8 +724,9 @@ class coverage(object): ws = get_workspace(ws_name) # We need to check if there are any layers using this. mf = mra.get_available() - assert_is_empty(mf.iter_layers(mra={"name":c_name, "workspace":ws_name, "storage":cs_name, - "type":"coverage"}), "coverage", c_name) + assert_is_empty( + mf.iter_layers(mra={"name": c_name, "workspace": ws_name, "storage": cs_name, "type": "coverage"}), + "coverage", c_name) with webapp.mightNotFound("coverage", coveragestore=cs_name): ws.delete_coveragemodel(c_name, cs_name) @@ -842,11 +855,12 @@ class styles(object): def GET(self, format): """List all SLD styles.""" - return {"styles": [{ - "name": s_name, - "href": "%s/styles/%s.%s" % (web.ctx.home, s_name, format) - } for s_name in mra.list_styles()] - } + return { + "styles": [{ + "name": s_name, + "href": "%s/styles/%s.%s" % (web.ctx.home, s_name, format) + } for s_name in mra.list_styles()] + } @HTTPCompatible(authorize=["sld"]) def POST(self, format): @@ -887,12 +901,13 @@ class style(object): if format == "sld": return style - return {"style": { - "name": s_name, - "sldVersion": Entries(["1.0.0"], tag_name="version"), - "filename": s_name + ".sld", - } + return { + "style": { + "name": s_name, + "sldVersion": Entries(["1.0.0"], tag_name="version"), + "filename": s_name + ".sld", } + } @HTTPCompatible(authorize=["sld"]) def PUT(self, s_name, format): @@ -918,11 +933,12 @@ class layers(object): """List all layers.""" mf = mra.get_available() - return {"layers": [{ - "name": layer.ms.name, - "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), - } for layer in mf.iter_layers()] - } + return { + "layers": [{ + "name": layer.ms.name, + "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), + } for layer in mf.iter_layers()] + } @HTTPCompatible() def POST(self, format): @@ -941,7 +957,7 @@ class layers(object): except ValueError: raise webapp.NotFound(message="resource \"%s\" was not found." % href) - st_type, r_type = st_type[:-1], r_type[:-1] # Remove trailing s. + st_type, r_type = st_type[:-1], r_type[:-1] # Remove trailing s. ws = get_workspace(ws_name) with webapp.mightNotFound(r_type, workspace=ws_name): @@ -995,11 +1011,12 @@ class workspaceLayers(object): """List all layers.""" wsmf = mra.get_service(ws_name) - return {"layers": [{ - "name": layer.ms.name, - "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), - } for layer in wsmf.iter_layers()] - } + return { + "layers": [{ + "name": layer.ms.name, + "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), + } for layer in wsmf.iter_layers()] + } class layer(object): @@ -1115,11 +1132,13 @@ class layer(object): raise KeyError(r_type) if layer.get_mra_metadata("type") != r_type: - raise webapp.BadRequest("Can't change a \"%s\" layer into a \"%s\"." - % (layer.get_mra_metadata("type"), r_type)) + raise webapp.BadRequest( + "Can't change a \"%s\" layer into a \"%s\"." + % (layer.get_mra_metadata("type"), r_type)) if wslayer.get_mra_metadata("type") != r_type: - raise webapp.BadRequest("Can't change a \"%s\" layer into a \"%s\"." - % (wslayer.get_mra_metadata("type"), r_type)) + raise webapp.BadRequest( + "Can't change a \"%s\" layer into a \"%s\"." + % (wslayer.get_mra_metadata("type"), r_type)) model.configure_layer(layer, l_enabled) model.configure_layer(wslayer, l_enabled) @@ -1265,11 +1284,12 @@ class layerstyles(object): if format == "sld": return layer.getSLD() - return {"styles": [{ - "name": s_name, - "href": "%s/styles/%s.%s" % (web.ctx.home, s_name, format), - } for s_name in layer.iter_styles()], - } + return { + "styles": [{ + "name": s_name, + "href": "%s/styles/%s.%s" % (web.ctx.home, s_name, format), + } for s_name in layer.iter_styles()], + } class layerstyle(object): @@ -1340,11 +1360,12 @@ class layergroups(object): mf = mra.get_available() - return {"layerGroups" : [{ - "name": lg.name, - "href": "%s/layergroups/%s.%s" % (web.ctx.home, lg.name, format) - } for lg in mf.iter_layergroups()] - } + return { + "layerGroups": [{ + "name": lg.name, + "href": "%s/layergroups/%s.%s" % (web.ctx.home, lg.name, format) + } for lg in mf.iter_layergroups()] + } @HTTPCompatible() def POST(self, format): @@ -1391,18 +1412,19 @@ class layergroup(object): "crs": "EPSG:4326", } - return {"layerGroup": ({ - "name": lg.name, - "mode": "NAMED", # Only available mode in MRA. - "publishables": Entries([{ - "name": layer.ms.name, - "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), - } for layer in lg.iter_layers()], tag_name="published"), - "bounds": Entries(bounds), - # TODO: Styles - # "styles": [], - }) - } + return { + "layerGroup": ({ + "name": lg.name, + "mode": "NAMED", # Only available mode in MRA. + "publishables": Entries([{ + "name": layer.ms.name, + "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), + } for layer in lg.iter_layers()], tag_name="published"), + "bounds": Entries(bounds), + # TODO: Styles + # "styles": [], + }) + } @HTTPCompatible() def PUT(self, lg_name, format): @@ -1447,11 +1469,12 @@ class workspaceLayergroups(object): """List all layer groups in a workspace.""" wsmf = mra.get_service(ws_name) - return {"layerGroups": [{ - "name": lg.name, - "href": "%s/workspaces/%s/layergroups/%s.%s" % (web.ctx.home, wsmf.name, lg.name, format) - } for lg in wsmf.iter_layergroups()] - } + return { + "layerGroups": [{ + "name": lg.name, + "href": "%s/workspaces/%s/layergroups/%s.%s" % (web.ctx.home, wsmf.name, lg.name, format) + } for lg in wsmf.iter_layergroups()] + } @HTTPCompatible() def POST(self, ws_name, format): @@ -1498,18 +1521,19 @@ class workspaceLayergroup(object): "crs": "EPSG:4326", } - return {"layerGroup": ({ - "name": lg.name, - "mode": "NAMED", # Only available mode in MRA. - "publishables": Entries([{ - "name": layer.ms.name, - "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), - } for layer in lg.iter_layers()], tag_name="published"), - "bounds": Entries(bounds), - # TODO: Styles - # "styles": [], - }) - } + return { + "layerGroup": ({ + "name": lg.name, + "mode": "NAMED", # Only available mode in MRA. + "publishables": Entries([{ + "name": layer.ms.name, + "href": "%s/layers/%s.%s" % (web.ctx.home, layer.ms.name, format), + } for layer in lg.iter_layers()], tag_name="published"), + "bounds": Entries(bounds), + # TODO: Styles + # "styles": [], + }) + } @HTTPCompatible() def PUT(self, ws_name, lg_name, format): @@ -1560,8 +1584,8 @@ class OWSGlobalSettings(object): "enabled": mf.get_metadata("%s_enable_request" % ows) == "*" and True or False, "name": ows, "schemaBaseURL": mf.get_metadata("ows_schemas_location", "http://schemas.opengis.net"), - } - )} + }) + } @HTTPCompatible() def PUT(self, ows, format): @@ -1597,8 +1621,8 @@ class OWSWorkspaceSettings(object): "title": mf.get_metadata("%s_title" % ows, None), "abstract": mf.get_metadata("%s_abstract" % ows, None), "schemaBaseURL": mf.get_metadata("ows_schemas_location", "http://schemas.opengis.net"), - } - )} + }) + } @HTTPCompatible() def PUT(self, ows, ws_name, format): diff --git a/src/stores.py b/src/stores.py index 088c1fff71dab7b330a871c025c8b4706c891305..87a16aeff4544bf0e7ffa970f1268556abeda558 100644 --- a/src/stores.py +++ b/src/stores.py @@ -21,14 +21,19 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ A field implementation of georeferenced data (both vector and raster) backed by the GDAL/OGR library. """ -from osgeo import ogr, osr, gdal + import mapscript +from osgeo import gdal +from osgeo import ogr +from osgeo import osr + import tools @@ -102,7 +107,7 @@ class Field(object): if type in (4, 5): return "Character" if type in (6, 7): - return "Unknown" # :) + return "Unknown" # :) if type in (9, 10): return "Date" else: @@ -127,7 +132,7 @@ class Feature(object): """Backend should be a ogr.Feature object which will be used to retrieve data.""" self.backend = backend - self.layer + self.layer = layer def __getattr__(self, attr): return self[attr] @@ -135,14 +140,15 @@ class Feature(object): def __getitem__(self, idx): if not isinstance(idx, int): idx = self.backend.GetFieldIndex(idx) - if idx < 0: raise KeyError() + if idx < 0: + raise KeyError() return self.backend.GetField(idx) def get_id(self): return self.backend.GetFID() def get_field(self): - return Field(self.backend.GetFieldDefn(), layer) + return Field(self.backend.GetFieldDefn(), self.layer) class Featuretype(object): @@ -252,15 +258,15 @@ class Featuretype(object): def get_latlon_extent(self): rect = mapscript.rectObj(*self.get_extent()) - res = rect.project(mapscript.projectionObj(self.get_proj4()), - mapscript.projectionObj("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs")) + rect.project(mapscript.projectionObj(self.get_proj4()), + mapscript.projectionObj("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs")) return Extent(rect.minx, rect.miny, rect.maxx, rect.maxy) def get_native(self): return str(self.backend.GetSpatialRef()) def fieldindex(self, field): - idx = GetLayerDefn().GetFieldIndex(field) + idx = self.backend.GetLayerDefn().GetFieldIndex(field) if idx < 0: raise AttributeError() return idx @@ -281,7 +287,7 @@ class Featuretype(object): def iterfeatures(self, what=[], when={}): if what != [] or when != {}: raise NotImplementedError("Iterfeature doesn't support filters yet.") - for i in range(self.backend.GetFeatureCount()): + for i in range(self.nbfeatures()): yield Feature(self.backend.GetFeature(i), self) def get_aditional_info(self): @@ -290,10 +296,11 @@ class Featuretype(object): tokens.insert(0, "public") schema, table = tokens - result = self.ds.backend.ExecuteSQL("SELECT column_name, is_nullable FROM INFORMATION_SCHEMA.COLUMNS " - "WHERE table_schema = '%s' AND table_name = '%s'" % - (schema, table)) - if not result: return + result = self.ds.backend.ExecuteSQL( + "SELECT column_name, is_nullable FROM INFORMATION_SCHEMA.COLUMNS " + "WHERE table_schema = '%s' AND table_name = '%s'" % (schema, table)) + if not result: + return None for i in range(result.GetFeatureCount()): feature = result.GetFeature(i) @@ -313,7 +320,7 @@ class Datastore(object): """ self.schema = schema self.backend = path if isinstance(path, ogr.DataSource) else ogr.Open(path) - if self.backend == None: + if self.backend is None: raise ValueError("Datastore backend could not be opened using \"%s\"." % path) def __len__(self): @@ -324,7 +331,7 @@ class Datastore(object): def __contains__(self, key): try: - self[item] + self[key] except LookupError: return False return True @@ -332,12 +339,14 @@ class Datastore(object): def __getitem__(self, key): if isinstance(key, int): item = self.backend.GetLayerByIndex(key) - if item == None: raise IndexError("No layer \"%s\"" % key) + if item is None: + raise IndexError("No layer \"%s\"" % key) else: if self.schema: key = "%s.%s" % (self.schema, key) item = self.backend.GetLayerByName(key) - if item == None: raise KeyError(key) + if item is None: + raise KeyError(key) return Featuretype(item, self) def nblayers(self): @@ -360,6 +369,8 @@ class Band(object): class Coveragestore(object): """A coveragestore implementation backed by gdal.""" + tindex = None + def __init__(self, path): """Path will be used to open the store, it can be a simple filesystem path or something more complex used by gdal/ogr to access databases for example. @@ -368,7 +379,15 @@ class Coveragestore(object): """ self.backend = path if isinstance(path, gdal.Dataset) else gdal.Open(path) - if self.backend == None: + if self.backend is None: + ds = Datastore(path if isinstance(path, ogr.DataSource) else ogr.Open(path)) + if ds: + self.tindex = Featuretype(ds.backend.GetLayerByIndex(0), ds) + path = getattr( + self.tindex.backend.GetFeature(0), self.get_tileitem()) + self.backend = gdal.Open(str(path)) + + if self.backend is None: raise ValueError("Coveragestore backend could not be opened. \"%s\"." % path) def __len__(self): @@ -378,11 +397,11 @@ class Coveragestore(object): return self.iterbands() def __contains__(self, idx): - return 0 < idx and idx < self.backend.RasterCount + return idx > 0 and idx < self.backend.RasterCount def __getitem__(self, idx): band = self.backend.GetRasterBand(idx) - if band == None: + if band is None: raise IndexError() return band @@ -394,22 +413,25 @@ class Coveragestore(object): corners = set() for x in (0, self.backend.RasterXSize): for y in (0, self.backend.RasterYSize): - corners.add((gt[0]+(x*gt[1])+(y*gt[2]), gt[3]+(x*gt[4])+(y*gt[5]))) + corners.add( + (gt[0] + (x * gt[1]) + (y * gt[2]), gt[3] + (x * gt[4]) + (y * gt[5]))) return corners def get_extent(self): + if self.tindex is not None: + return self.tindex.get_extent() + #else: corners = self.get_corners() - minX = min(x for x, y in corners) - minY = min(y for x, y in corners) - maxX = max(x for x, y in corners) - maxY = max(y for x, y in corners) - return Extent(minX, minY, maxX, maxY) + minx = min(x for x, y in corners) + miny = min(y for x, y in corners) + maxx = max(x for x, y in corners) + maxy = max(y for x, y in corners) + return Extent(minx, miny, maxx, maxy) def get_latlon_extent(self): rect = mapscript.rectObj(*self.get_extent()) - res = rect.project(mapscript.projectionObj(self.get_proj4()), - mapscript.projectionObj("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs")) - + rect.project(mapscript.projectionObj(self.get_proj4()), + mapscript.projectionObj("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs")) return Extent(rect.minx, rect.miny, rect.maxx, rect.maxy) def get_projection(self): @@ -427,3 +449,16 @@ class Coveragestore(object): def iterbands(self): for i in range(1, self.backend.RasterCount + 1): yield Band(self.backend.GetRasterBand(i)) + + def get_tileindex(self): + """Return the path to the index file.""" + if self.tindex is None: + return None + return self.tindex.ds.backend.GetName() + + def get_tileitem(self): + """Return the field in the shapefile which contains + the filenames referenced by the index.""" + if self.tindex is None: + return None + return self.tindex.backend.GetLayerDefn().GetFieldDefn(0).name diff --git a/src/templates/response.html b/src/templates/response.html index 597a470e65ecc69ae9a16923eab10774bf989a5f..447943a2c6e174752d10a27f7b3cff7f859feff8 100644 --- a/src/templates/response.html +++ b/src/templates/response.html @@ -1,18 +1,15 @@ $def with (base, path, links, code) <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> - <html> <head> <title>MRA</title> <link rel="StyleSheet" type="text/css" href="$base/static/bootstrap.css" /> - <style> .key:hover { - text-decoration:underline; + text-decoration:underline; } </style> </head> - <body style="margin: 1em"> <div class="alert alert-block"> <h4>This data is also available in other formats!</h4> @@ -20,13 +17,11 @@ $def with (base, path, links, code) $for comma, (type, link) in ((',' if i < len(links)-1 else '.', data) for i, data in enumerate(links)): <b><a href="$base$link">$type</a></b>$comma </div> - <ul class="breadcrumb"> <li><a href="$base/">mra</a></li> $for name in path[:-1]: <li><a href="$base$('/'.join(path[:loop.index]))">$name </a><span class="divider">/</span></li> <li class="active">$path[-1]</li> </ul> - <script> function toggle(id) { e = document.getElementById(id); @@ -47,8 +42,6 @@ $def with (base, path, links, code) } } </script> - $:code </body> - </html> diff --git a/src/tools.py b/src/tools.py index 9dbe4d0a3b32bfc1dc7fa93e23b9ccac8151134e..a1b3c56ad71db2ce6759e466eee1b8f1b99c7c90 100644 --- a/src/tools.py +++ b/src/tools.py @@ -21,19 +21,20 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + """ Miscellaneous functions. """ + +import mapscript import os -import yaml -import sys +from osgeo import gdal +from osgeo import osr import pyxml import webapp -import xml.etree.ElementTree as etree -from osgeo import osr, gdal -import mapscript + __config = None @@ -52,7 +53,7 @@ def assert_is_empty(generator, tname, iname): try: next(generator) except (StopIteration, SystemError): - pass # Everything is ok. + pass # Everything is ok. else: raise webapp.Forbidden(message="Can't remove \"%s\" because it is an non-empty %s." % (iname, tname)) @@ -76,11 +77,11 @@ def is_hidden(path): def get_dflt_sld_name(type): # TODO: Names should be changed... - if type == 0: # point + if type == 0: # point return "default_point" - elif type == 1: # line + elif type == 1: # line return "default_line" - elif type == 2: # polygon + elif type == 2: # polygon return "default_polygon" else: return None @@ -114,7 +115,7 @@ def wkt_to_authority(wkt): elif srs.GetAuthorityCode("GEOGCS") is not None: return srs.GetAuthorityName("GEOGCS"), srs.GetAuthorityCode("GEOGCS") else: - return "Unknown", "Unknown" # :s it could be improved... (TODO) + return "Unknown", "Unknown" # Bad, it could be improved... (TODO) def get_units(value): diff --git a/src/webapp.py b/src/webapp.py index 6d03ad41c4ac9e23b88f5852ac7d1f433d3f7ad1..69e060771baa646272ab873e13c98188e5fb48e2 100644 --- a/src/webapp.py +++ b/src/webapp.py @@ -21,21 +21,17 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -""" - ... -""" - -import web -import json -import pyxml -import pyhtml -import inspect import functools +import inspect +import json +import logging import os.path -import itertools +import web + import mralogs -import logging +import pyxml +import pyhtml class KeyExists(KeyError): @@ -133,7 +129,7 @@ class exceptionManager(object): class exceptionsToHTTPError(exceptionManager): def __init__(self, message=None, exceptions=None, **kwargs): - if message != None: + if message is not None: self.message = message self.msg_args = kwargs exceptionManager.__init__(self, exceptions or self.exceptions) @@ -478,10 +474,12 @@ def get_data(name=None, mandatory=[], authorized=[], forbidden=[]): data, dname = pyxml.loads(data, retname=True) print("received \"%s\"" % dname) print(data) - if name and dname != name: data = None + if name and dname != name: + data = None elif "application/json" in ctype: data = json.loads(data.decode()) - if name: data = data.get(name, None) + if name: + data = data.get(name, None) else: raise web.badrequest("Content-type \"%s\" is not allowed." % ctype) except (AttributeError, ValueError): diff --git a/tests/__init__.py b/tests/__init__.py index 27b06b41f8163aea594658c73b1cc1286ed07a50..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,22 +0,0 @@ -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# # -# MapServer REST API is a python wrapper around MapServer which # -# allows to manipulate a mapfile in a RESTFul way. It has been # -# developped to match as close as possible the way the GeoServer # -# REST API acts. # -# # -# Copyright (C) 2011-2020 Neogeo Technologies. # -# # -# This file is part of MapServer Rest API. # -# # -# MapServer Rest API is free software: you can redistribute it # -# and/or modify it under the terms of the GNU General Public License # -# as published by the Free Software Foundation, either version 3 of # -# the License, or (at your option) any later version. # -# # -# MapServer Rest API is distributed in the hope that it will be # -# useful, but WITHOUT ANY WARRANTY; without even the implied warranty # -# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # -# GNU General Public License for more details. # -# # -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # diff --git a/tests/files/style.sld b/tests/files/style.sld index fd2f1bd579e7aa8dfeceb37c5a9e2e3af509ffe4..b475cb8d9f00ee721fbf0096f39b7615ef3ea0d6 100644 --- a/tests/files/style.sld +++ b/tests/files/style.sld @@ -1,21 +1,28 @@ -<StyledLayerDescriptor version="1.0.0" xmlns="http://www.opengis.net/sld" xmlns:gml="http://www.opengis.net/gml" xmlns:ogc="http://www.opengis.net/ogc" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd"> -<NamedLayer> -<Name>countries</Name> -<UserStyle> -<FeatureTypeStyle> -<Rule> -<Name>Class given in default</Name> -<PolygonSymbolizer> -<Fill> -<CssParameter name="fill">#dcdcdc</CssParameter> -</Fill> -<Stroke> -<CssParameter name="stroke">#606060</CssParameter> -<CssParameter name="stroke-width">1.00</CssParameter> -</Stroke> -</PolygonSymbolizer> -</Rule> -</FeatureTypeStyle> -</UserStyle> -</NamedLayer> -</StyledLayerDescriptor> +<StyledLayerDescriptor + version="1.0.0" + xmlns="http://www.opengis.net/sld" + xmlns:gml="http://www.opengis.net/gml" + xmlns:ogc="http://www.opengis.net/ogc" + xmlns:xlink="http://www.w3.org/1999/xlink" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd"> + <NamedLayer> + <Name>countries</Name> + <UserStyle> + <FeatureTypeStyle> + <Rule> + <Name>Class given in default</Name> + <PolygonSymbolizer> + <Fill> + <CssParameter name="fill">#dcdcdc</CssParameter> + </Fill> + <Stroke> + <CssParameter name="stroke">#606060</CssParameter> + <CssParameter name="stroke-width">1.00</CssParameter> + </Stroke> + </PolygonSymbolizer> + </Rule> + </FeatureTypeStyle> + </UserStyle> + </NamedLayer> +</StyledLayerDescriptor> \ No newline at end of file diff --git a/tests/testScenario.py b/tests/testScenario.py index 5fc5a4e966002fd6d5fc071930e7d656990ab656..1f2c960246c576e8366c5db0a17beda1aa599b3c 100644 --- a/tests/testScenario.py +++ b/tests/testScenario.py @@ -21,11 +21,10 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -import utils -from utils import APIRequest -import sys import random +from utils import APIRequest + def _test_workspaces(target, map_name, delete=True): @@ -52,8 +51,9 @@ def _test_workspaces(target, map_name, delete=True): # POST a datastore and GET it name, title = "testDS1", "test datastore 1" - _, r = APIRequest("POST", ws["dataStores"]["href"], {"dataStore":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", ws["dataStores"]["href"], {"dataStore": {"name": name, "title": title}}, + get_response=True) ds_link = r.getheader("Location").rsplit(".", 1)[0] ds = APIRequest("GET", ds_link)["dataStore"] @@ -64,7 +64,7 @@ def _test_workspaces(target, map_name, delete=True): ds["title"] = title.upper() del ds["href"] - APIRequest("PUT", ds_link, {"dataStore":ds}) + APIRequest("PUT", ds_link, {"dataStore": ds}) ds = APIRequest("GET", ds_link)["dataStore"] assert ds["title"] == title.upper() @@ -74,7 +74,6 @@ def _test_workspaces(target, map_name, delete=True): fts = APIRequest("GET", ds["href"])["featureTypes"] assert len(fts) == 0 - # PUT file, and check if datastore is updated. APIRequest("PUT", ds_link + "/file.shp", open("./files/timezones_shp.zip", "rb"), @@ -86,8 +85,9 @@ def _test_workspaces(target, map_name, delete=True): # POST a featuretype and GET it name, title = "timezones", "test feature type 1" - _, r = APIRequest("POST", ds["href"], {"featureType":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", ds["href"], {"featureType": {"name": name, "title": title}}, + get_response=True) ft_link = r.getheader("Location").rsplit(".", 1)[0] ft = APIRequest("GET", ft_link)["featureType"] @@ -97,7 +97,7 @@ def _test_workspaces(target, map_name, delete=True): # PUT a featuretype ft["title"] = title.upper() - APIRequest("PUT", ft_link, {"featureType":ft}) + APIRequest("PUT", ft_link, {"featureType": ft}) ft = APIRequest("GET", ft_link)["featureType"] assert ft["title"] == title.upper() @@ -114,8 +114,6 @@ def _test_workspaces(target, map_name, delete=True): dss = APIRequest("GET", ws["dataStores"]["href"])["dataStores"] assert len(dss) == 0 - - # # Test CoverageStores. # @@ -128,8 +126,9 @@ def _test_workspaces(target, map_name, delete=True): # POST a coverageStore and GET it name, title = "testCS1", "test coverageStore 1" - _, r = APIRequest("POST", ws["coverageStores"]["href"], {"coverageStore":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", ws["coverageStores"]["href"], {"coverageStore": {"name": name, "title": title}}, + get_response=True) cs_link = r.getheader("Location").rsplit(".", 1)[0] cs = APIRequest("GET", cs_link)["coverageStore"] @@ -140,7 +139,7 @@ def _test_workspaces(target, map_name, delete=True): cs["title"] = title.upper() del cs["href"] - APIRequest("PUT", cs_link, {"coverageStore":cs}) + APIRequest("PUT", cs_link, {"coverageStore": cs}) cs = APIRequest("GET", cs_link)["coverageStore"] assert cs["title"] == title.upper() @@ -150,7 +149,6 @@ def _test_workspaces(target, map_name, delete=True): fts = APIRequest("GET", cs["href"])["coverages"] assert len(fts) == 0 - # PUT file, and check if coverageStore is updated. APIRequest("PUT", cs_link + "/file.tif", open("./files/HYP_LR.zip", "rb"), @@ -162,8 +160,9 @@ def _test_workspaces(target, map_name, delete=True): # POST a coverage and GET it name, title = "HYP_LR", "test coverage 1" - _, r = APIRequest("POST", cs["href"], {"coverage":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", cs["href"], {"coverage": {"name": name, "title": title}}, + get_response=True) c_link = r.getheader("Location").rsplit(".", 1)[0] ft = APIRequest("GET", c_link)["coverage"] @@ -173,14 +172,13 @@ def _test_workspaces(target, map_name, delete=True): # PUT a coverage ft["title"] = title.upper() - APIRequest("PUT", c_link, {"coverage":ft}) + APIRequest("PUT", c_link, {"coverage": ft}) ft = APIRequest("GET", c_link)["coverage"] assert ft["title"] == title.upper() # DELETE stuff - if delete: APIRequest("DELETE", c_link) @@ -191,9 +189,9 @@ def _test_workspaces(target, map_name, delete=True): css = APIRequest("GET", ws["coverageStores"]["href"])["coverageStores"] assert len(css) == 0 - return ws + def _test_styles(target, map_name): # Lets DELETE all the styles. @@ -207,11 +205,12 @@ def _test_styles(target, map_name): # Lets POST a style and GET it. name = "__test_awesome_style_name" data = open("./files/style.sld").read() - noise = "".join(map(str, random.sample(xrange(10000000), 60))) + noise = "".join(map(str, random.sample(range(10000000), 60))) # We add some noise, so we can check PUT later. - styles = APIRequest("POST", target + "/maps/" + map_name + "/styles?name=" + name, - encode=None, content_type="application/vnd.ogc.sld+xml", data=data+noise) + styles = APIRequest( + "POST", target + "/maps/" + map_name + "/styles?name=" + name, + encode=None, content_type="application/vnd.ogc.sld+xml", data=data + noise) styles = APIRequest("GET", target + "/maps/" + map_name + "/styles")["styles"] assert len(styles) == 1 @@ -221,7 +220,7 @@ def _test_styles(target, map_name): style = APIRequest("GET", st_link) content = APIRequest("GET", style["href"], encode=None, decode=None) - assert content == data+noise + assert content == data + noise # Use PUT to remove the noise in the file. styles = APIRequest("PUT", style["href"], encode=None, content_type="application/vnd.ogc.sld+xml", data=data) @@ -237,8 +236,9 @@ def _test_layers(target, map_name): ws = APIRequest("GET", wss[0]["href"])["workspace"] # DataStores. name, title = "testDS1", "test datastore 1" - _, r = APIRequest("POST", ws["dataStores"]["href"], {"dataStore":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", ws["dataStores"]["href"], {"dataStore": {"name": name, "title": title}}, + get_response=True) ds_link = r.getheader("Location").rsplit(".", 1)[0] # PUT file APIRequest("PUT", ds_link + "/file.shp", open("./files/timezones_shp.zip", "rb"), @@ -246,13 +246,15 @@ def _test_layers(target, map_name): ds = APIRequest("GET", ds_link)["dataStore"] # POST a featuretype name, title = "timezones", "test feature type 1" - _, r = APIRequest("POST", ds["href"], {"featureType":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", ds["href"], {"featureType": {"name": name, "title": title}}, + get_response=True) ft_link = r.getheader("Location").rsplit(".", 1)[0] # CoverageStores. name, title = "testCS1", "test coverageStore 1" - _, r = APIRequest("POST", ws["coverageStores"]["href"], {"coverageStore":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", ws["coverageStores"]["href"], {"coverageStore": {"name": name, "title": title}}, + get_response=True) cs_link = r.getheader("Location").rsplit(".", 1)[0] # PUT file APIRequest("PUT", cs_link + "/file.tif", open("./files/HYP_LR.zip", "rb"), @@ -260,8 +262,9 @@ def _test_layers(target, map_name): cs = APIRequest("GET", cs_link)["coverageStore"] # POST a coverage name, title = "HYP_LR", "test coverage 1" - _, r = APIRequest("POST", cs["href"], {"coverage":{"name":name, "title":title}}, - get_response=True) + _, r = APIRequest( + "POST", cs["href"], {"coverage": {"name": name, "title": title}}, + get_response=True) c_link = r.getheader("Location").rsplit(".", 1)[0] # @@ -275,7 +278,7 @@ def _test_layers(target, map_name): name = "FTlayerTest" _, r = APIRequest("POST", target + "/maps/" + map_name + "/layers", - {"layer":{"name":name, "resource":{"href":ft_link}}}, + {"layer": {"name": name, "resource": {"href": ft_link}}}, get_response=True) ftl_link = r.getheader("Location").rsplit(".", 1)[0] @@ -292,13 +295,13 @@ def _test_layers(target, map_name): styles = APIRequest("GET", ftl_link + "/styles")["styles"] assert len(styles) == 1 - APIRequest("POST", ftl_link + "/styles", {"style":{"resource":{"href":styles[0]["href"]}}}) + APIRequest("POST", ftl_link + "/styles", {"style": {"resource": {"href": styles[0]["href"]}}}) # A second layer for coverage name = "ClayerTest" _, r = APIRequest("POST", target + "/maps/" + map_name + "/layers", - {"layer":{"name":name, "resource":{"href":c_link}}}, + {"layer": {"name": name, "resource": {"href": c_link}}}, get_response=True) cl_link = r.getheader("Location").rsplit(".", 1)[0] @@ -319,7 +322,6 @@ def _test_layers(target, map_name): fields = APIRequest("GET", cl_link + "/styles")["styles"] assert len(fields) == 0 - # Now lets try layer groups. layers = APIRequest("GET", target + "/maps/" + map_name + "/layergroups")["layerGroups"] @@ -327,7 +329,9 @@ def _test_layers(target, map_name): # POST an empty group name = "test_group" - _, r = APIRequest("POST", target + "/maps/" + map_name + "/layergroups", {"layerGroup":{"name":name}}, get_response=True) + _, r = APIRequest( + "POST", target + "/maps/" + map_name + "/layergroups", + {"layerGroup": {"name": name}}, get_response=True) g_link = r.getheader("Location").rsplit(".", 1)[0] # GET it. @@ -338,7 +342,7 @@ def _test_layers(target, map_name): # PUT some new layers in it. group["layers"] = [x["name"] for x in group["layers"]] + [ftl["name"]] del group["bounds"] - APIRequest("PUT", g_link, {"layerGroup":group}) + APIRequest("PUT", g_link, {"layerGroup": group}) # GET it. group = APIRequest("GET", g_link)["layerGroup"] @@ -347,7 +351,7 @@ def _test_layers(target, map_name): # PUT some new layers in it. group["layers"] = [x["name"] for x in group["layers"]] + [cl["name"]] del group["bounds"] - APIRequest("PUT", g_link, {"layerGroup":group}) + APIRequest("PUT", g_link, {"layerGroup": group}) # GET it. group = APIRequest("GET", g_link)["layerGroup"] @@ -363,7 +367,7 @@ def test_scenario(): # Clean the test file, now we are sure it is empty. APIRequest("DELETE", target + "/maps/" + map_name) - APIRequest("POST", target + "/maps", {"mapfile":{"name":map_name}}) + APIRequest("POST", target + "/maps", {"mapfile": {"name": map_name}}) # _test_workspaces(target, map_name) # _test_styles(target, map_name) diff --git a/tests/utils.py b/tests/utils.py index 41ac318b63f9f87d81153fc5be8babe579038d1d..1985ec9cffeb56821aae6cec8298dd0fbf518d60 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,15 +21,16 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -import httplib -import pyxml +import httplib import json - +import pyxml import sys + default_encoding = "json" + def deduce_content_type(type): if type == "json": return "application/json" @@ -45,7 +46,7 @@ def APIRequest(method, url, data=None, encode=default_encoding, decode=default_e elif encode == "xml": data = pyxml.dumps(data) - if content_type == None: + if content_type is None: content_type = deduce_content_type(encode) surl = httplib.urlsplit(url) @@ -58,13 +59,13 @@ def APIRequest(method, url, data=None, encode=default_encoding, decode=default_e if surl.query: url += "?" + surl.query - print >>sys.stderr, method, surl.geturl().replace(surl.path, url) + print(sys.stderr, method, surl.geturl().replace(surl.path, url)) conn = httplib.HTTPConnection(surl.hostname, surl.port) - conn.request(method, url, body=data, headers={"Content-Type":content_type}) + conn.request(method, url, body=data, headers={"Content-Type": content_type}) r = conn.getresponse() - if expected_type == None: + if expected_type is None: expected_type = deduce_content_type(decode) # TODO: enable this test once it is suported. @@ -78,10 +79,10 @@ def APIRequest(method, url, data=None, encode=default_encoding, decode=default_e recv = json.loads(recv) elif decode == "xml": recv = pyxml.loads(recv) - except: + except Exception: pass - print >>sys.stderr, r.status, r.reason + print(sys.stderr, r.status, r.reason) assert 200 <= r.status < 300, recv return (recv, r) if get_response else recv