desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Test uploading a zipped shapefile with xml sidecar'
| def test_zipped_upload_xml_sidecar(self):
| (fd, abspath) = self.temp_file('.zip')
fp = os.fdopen(fd, 'wb')
zf = ZipFile(fp, 'w')
fpath = os.path.join(GOOD_DATA, 'vector', 'Air_Runways.*')
for f in glob.glob(fpath):
zf.write(f, os.path.basename(f))
zf.close()
self.upload_file(abspath, self.complete_upload, check_name='Air_Runways')
|
'Tests the layers that are invalid and should not be uploaded'
| def test_invalid_layer_upload(self):
| invalid_path = os.path.join(BAD_DATA)
self.upload_folder_of_files(invalid_path, self.check_invalid_projection)
|
'Tests that the upload computes correctly next session IDs'
| def test_coherent_importer_session(self):
| session_ids = []
fname = os.path.join(GOOD_DATA, 'raster', 'relief_san_andres.tif')
self.upload_file(fname, self.complete_raster_upload, session_ids=session_ids)
invalid_path = os.path.join(BAD_DATA)
self.upload_folder_of_files(invalid_path, self.check_invalid_projection, session_ids=session_ids)
fname = os.path.join(GOOD_DATA, 'raster', 'relief_san_andres.tif')
self.upload_file(fname, self.complete_raster_upload, session_ids=session_ids)
self.assertTrue((len(session_ids) > 1))
self.assertTrue((int(session_ids[0]) < int(session_ids[1])))
|
'Verify a error message is return when an unsupported layer is
uploaded'
| def test_extension_not_implemented(self):
| unsupported_path = __file__
if unsupported_path.endswith('.pyc'):
unsupported_path = unsupported_path.rstrip('c')
with self.assertRaises(HTTPError):
self.client.upload_file(unsupported_path)
|
'make sure a csv upload fails gracefully/normally when not activated'
| def test_csv(self):
| csv_file = self.make_csv(['lat', 'lon', 'thing'], ['-100', '-40', 'foo'])
(layer_name, ext) = os.path.splitext(os.path.basename(csv_file))
(resp, data) = self.client.upload_file(csv_file)
self.assertTrue(('success' in data))
self.assertTrue(data['success'])
self.assertTrue(data['redirect_to'], '/upload/csv')
|
'Override the baseclass test and verify a correct CSV upload'
| def test_csv(self):
| csv_file = self.make_csv(['lat', 'lon', 'thing'], ['-100', '-40', 'foo'])
(layer_name, ext) = os.path.splitext(os.path.basename(csv_file))
(resp, form_data) = self.client.upload_file(csv_file)
self.check_save_step(resp, form_data)
csv_step = form_data['redirect_to']
self.assertEquals(csv_step, upload_step('csv'))
form_data = dict(lat='lat', lng='lon', csrfmiddlewaretoken=self.client.get_csrf_token())
resp = self.client.make_request(csv_step, form_data)
content = json.loads(resp.read())
resp = self.client.get(content.get('redirect_to'))
content = json.loads(resp.read())
url = content.get('url')
self.assertTrue(url.endswith(layer_name), ('expected url to end with %s, but got %s' % (layer_name, url)))
self.assertEquals(resp.code, 200)
self.check_layer_complete(url, layer_name)
|
'Verify that uploading time based shapefile works properly'
| def test_time(self):
| cascading_delete(self.catalog, 'boxes_with_date')
timedir = os.path.join(GOOD_DATA, 'time')
layer_name = 'boxes_with_date'
shp = os.path.join(timedir, ('%s.shp' % layer_name))
(resp, data) = self.client.upload_file(shp)
self.wait_for_progress(data.get('progress'))
self.assertEquals(resp.code, 200)
self.assertTrue(data['success'])
self.assertTrue(data['redirect_to'], upload_step('time'))
(resp, data) = self.client.get_html(upload_step('time'))
self.assertEquals(resp.code, 200)
data = dict(csrfmiddlewaretoken=self.client.get_csrf_token(), time_attribute='date', presentation_strategy='LIST')
resp = self.client.make_request(upload_step('time'), data)
url = json.loads(resp.read())['redirect_to']
resp = self.client.make_request(url, data)
url = json.loads(resp.read())['url']
self.assertTrue(url.endswith(layer_name), ('expected url to end with %s, but got %s' % (layer_name, url)))
self.assertEquals(resp.code, 200)
url = urllib.unquote(url)
self.check_layer_complete(url, layer_name)
wms = get_wms(type_name=('geonode:%s' % layer_name))
layer_info = wms.items()[0][1]
self.assertEquals(100, len(layer_info.timepositions))
|
'Test formation of REST call to geoserver\'s geogig API'
| def test_payload_creation(self):
| author_name = 'test'
author_email = '[email protected]'
settings.OGC_SERVER['default']['PG_GEOGIG'] = False
fb_message = {'authorName': author_name, 'authorEmail': author_email, 'parentDirectory': settings.OGC_SERVER['default']['GEOGIG_DATASTORE_DIR']}
fb_payload = make_geogig_rest_payload(author_name, author_email)
self.assertDictEqual(fb_message, fb_payload)
self.assertEquals(json.dumps(fb_message, sort_keys=True), json.dumps(fb_payload, sort_keys=True))
settings.OGC_SERVER['default']['PG_GEOGIG'] = True
settings.DATABASES['test-pg'] = {'HOST': 'localhost', 'PORT': '5432', 'NAME': 'repos', 'SCHEMA': 'public', 'USER': 'geogig', 'PASSWORD': 'geogig'}
settings.OGC_SERVER['default']['DATASTORE'] = 'test-pg'
pg_message = {'authorName': author_name, 'authorEmail': author_email, 'dbHost': settings.DATABASES['test-pg']['HOST'], 'dbPort': settings.DATABASES['test-pg']['PORT'], 'dbName': settings.DATABASES['test-pg']['NAME'], 'dbSchema': settings.DATABASES['test-pg']['SCHEMA'], 'dbUser': settings.DATABASES['test-pg']['USER'], 'dbPassword': settings.DATABASES['test-pg']['PASSWORD']}
pg_payload = make_geogig_rest_payload(author_name, author_email)
self.assertDictEqual(pg_message, pg_payload)
self.assertEquals(json.dumps(pg_message, sort_keys=True), json.dumps(pg_payload, sort_keys=True))
|
'Tests the scan_file function.'
| def test_scan_file(self):
| exts = ('.shp', '.shx', '.sld', '.xml', '.prj', '.dbf')
with create_files(map((lambda s: 'san_andres_y_providencia_location{0}'.format(s)), exts)) as tests:
shp = filter((lambda s: s.endswith('.shp')), tests)[0]
spatial_files = scan_file(shp)
self.assertTrue(isinstance(spatial_files, SpatialFiles))
spatial_file = spatial_files[0]
self.assertEqual(shp, spatial_file.base_file)
self.assertTrue(spatial_file.file_type.matches('shp'))
self.assertEqual(len(spatial_file.auxillary_files), 3)
self.assertEqual(len(spatial_file.xml_files), 1)
self.assertTrue(all(map((lambda s: s.endswith('xml')), spatial_file.xml_files)))
self.assertEqual(len(spatial_file.sld_files), 1)
self.assertTrue(all(map((lambda s: s.endswith('sld')), spatial_file.sld_files)))
file_names = ['109029_23.shp', '109029_23.shx', '109029_23.dbf', '109029_23.prj', '109029_23.xml', '109029_23.sld']
with create_files(file_names, zipped=True) as tests:
spatial_files = scan_file(tests[0])
self.assertTrue(isinstance(spatial_files, SpatialFiles))
spatial_file = spatial_files[0]
self.assertTrue(spatial_file.file_type.matches('shp'))
self.assertEqual(len(spatial_file.auxillary_files), 3)
self.assertEqual(len(spatial_file.xml_files), 1)
self.assertEqual(len(spatial_file.sld_files), 1)
self.assertTrue(all(map((lambda s: s.endswith('xml')), spatial_file.xml_files)))
basedir = os.path.dirname(spatial_file.base_file)
for f in file_names:
path = os.path.join(basedir, ('_%s' % f))
self.assertTrue(os.path.exists(path))
|
'This does not actually delete the file, only the database record. But
that is easy to implement.'
| def delete(self, request, *args, **kwargs):
| self.object = self.get_object()
self.object.delete()
if request.is_ajax():
response = JSONResponse(True, {}, response_content_type(self.request))
response['Content-Disposition'] = 'inline; filename=files.json'
return response
else:
return HttpResponseRedirect(reverse('data_upload_new'))
|
'Convert this map to a nested dictionary structure matching the JSON
configuration for GXP Viewers.
The ``added_layers`` parameter list allows a list of extra MapLayer
instances to append to the Map\'s layer list when generating the
configuration. These are not persisted; if you want to add layers you
should use ``.layer_set.create()``.'
| def viewer_json(self, user, access_token, *added_layers):
| if (self.id and (len(added_layers) == 0)):
cfg = cache.get(((('viewer_json_' + str(self.id)) + '_') + str((0 if (user is None) else user.id))))
if (cfg is not None):
return cfg
layers = list(self.layers)
layers.extend(added_layers)
server_lookup = {}
sources = {}
def uniqify(seq):
'\n get a list of unique items from the input sequence.\n\n This relies only on equality tests, so you can use it on most\n things. If you have a sequence of hashables, list(set(seq)) is\n better.\n '
results = []
for x in seq:
if (x not in results):
results.append(x)
return results
configs = [l.source_config(access_token) for l in layers]
i = 0
for source in uniqify(configs):
while (str(i) in sources):
i = (i + 1)
sources[str(i)] = source
server_lookup[json.dumps(source)] = str(i)
def source_lookup(source):
for (k, v) in sources.iteritems():
if (v == source):
return k
return None
def layer_config(l, user=None):
cfg = l.layer_config(user=user)
src_cfg = l.source_config(access_token)
source = source_lookup(src_cfg)
if source:
cfg['source'] = source
return cfg
source_urls = [source['url'] for source in sources.values() if ('url' in source)]
if ('geonode.geoserver' in settings.INSTALLED_APPS):
if ((len(sources.keys()) > 0) and (not (settings.MAP_BASELAYERS[0]['source']['url'] in source_urls))):
keys = sorted(sources.keys())
settings.MAP_BASELAYERS[0]['source']['title'] = 'Local Geoserver'
sources[str((int(keys[(-1)]) + 1))] = settings.MAP_BASELAYERS[0]['source']
def _base_source(source):
base_source = copy.deepcopy(source)
for key in ['id', 'baseParams', 'title']:
if (key in base_source):
del base_source[key]
return base_source
for (idx, lyr) in enumerate(settings.MAP_BASELAYERS):
if (_base_source(lyr['source']) not in map(_base_source, sources.values())):
if (len(sources.keys()) > 0):
sources[str((int(max(sources.keys(), key=int)) + 1))] = lyr['source']
from geonode.services.models import Service
index = (int(max(sources.keys())) if (len(sources.keys()) > 0) else 0)
for service in Service.objects.all():
remote_source = {'url': service.base_url, 'remote': True, 'ptype': 'gxp_wmscsource', 'name': service.name}
if (remote_source['url'] not in source_urls):
index += 1
sources[index] = remote_source
config = {'id': self.id, 'about': {'title': self.title, 'abstract': self.abstract}, 'aboutUrl': '../about', 'defaultSourceType': 'gxp_wmscsource', 'sources': sources, 'map': {'layers': [layer_config(l, user=user) for l in layers], 'center': [self.center_x, self.center_y], 'projection': self.projection, 'zoom': self.zoom}}
if any(layers):
config['map']['layers'][(len(layers) - 1)]['selected'] = True
else:
(def_map_config, def_map_layers) = default_map_config(None)
config = def_map_config
layers = def_map_layers
config['map'].update(_get_viewer_projection_info(self.projection))
if (self is not None):
cache.set(((('viewer_json_' + str(self.id)) + '_') + str((0 if (user is None) else user.id))), config)
return config
|
'Generate a dict that can be serialized to a GXP layer source
configuration suitable for loading this layer.'
| def source_config(self, access_token):
| try:
cfg = json.loads(self.source_params)
except Exception:
cfg = dict(ptype='gxp_wmscsource', restUrl='/gs/rest')
if self.ows_url:
'\n This limits the access token we add to only the OGC servers decalred in OGC_SERVER.\n Will also override any access_token in the request and replace it with an existing one.\n '
urls = []
for (name, server) in settings.OGC_SERVER.iteritems():
url = urlparse.urlsplit(server['PUBLIC_LOCATION'])
urls.append(url.netloc)
my_url = urlparse.urlsplit(self.ows_url)
if (access_token and (my_url.netloc in urls)):
request_params = urlparse.parse_qs(my_url.query)
if ('access_token' in request_params):
del request_params['access_token']
request_params['access_token'] = [access_token]
encoded_params = urllib.urlencode(request_params, doseq=True)
parsed_url = urlparse.SplitResult(my_url.scheme, my_url.netloc, my_url.path, encoded_params, my_url.fragment)
cfg['url'] = parsed_url.geturl()
else:
cfg['url'] = self.ows_url
return cfg
|
'Generate a dict that can be serialized to a GXP layer configuration
suitable for loading this layer.
The "source" property will be left unset; the layer is not aware of the
name assigned to its source plugin. See
geonode.maps.models.Map.viewer_json for an example of
generating a full map configuration.'
| def layer_config(self, user=None):
| try:
cfg = json.loads(self.layer_params)
except Exception:
cfg = dict()
if self.format:
cfg['format'] = self.format
if self.name:
cfg['name'] = self.name
if self.opacity:
cfg['opacity'] = self.opacity
if self.styles:
cfg['styles'] = self.styles
if self.transparent:
cfg['transparent'] = True
cfg['fixed'] = self.fixed
if self.group:
cfg['group'] = self.group
cfg['visibility'] = self.visibility
return cfg
|
'Remove record from the catalogue'
| def remove_record(self, uuid):
| raise NotImplementedError()
|
'Create record in the catalogue'
| def create_record(self, item):
| raise NotImplementedError()
|
'Get record from the catalogue'
| def get_record(self, uuid):
| raise NotImplementedError()
|
'Search for records from the catalogue'
| def search_records(self, keywords, start, limit, bbox):
| raise NotImplementedError()
|
'initialize pycsw HTTP CSW backend'
| def __init__(self, *args, **kwargs):
| super(CatalogueBackend, self).__init__(*args, **kwargs)
self.catalogue.formats = ['Atom', 'DIF', 'Dublin Core', 'ebRIM', 'FGDC', 'ISO']
|
'returns list of valid GetRecordById URLs for a given record'
| def urls_for_uuid(self, uuid):
| urls = []
for mformat in self.formats:
urls.append(('text/xml', mformat, self.url_for_uuid(uuid, METADATA_FORMATS[mformat][1])))
return urls
|
'get all element data from an XML document'
| def csw_gen_anytext(self, xml):
| xml = etree.fromstring(xml)
return ' '.join([value.strip() for value in xml.xpath('//text()')])
|
'set the full set of geonetwork privileges on the item with the
specified uuid based on the dictionary given of the form:
\'group_name1\': {\'operation1\': True, \'operation2\': True, ...},
\'group_name2\': ...
all unspecified operations and operations for unspecified groups
are set to False.'
| def set_metadata_privs(self, uuid, privileges):
| if (self.type == 'geonetwork'):
get_dbid_url = ('%sgeonetwork/srv/en/portal.search.present?%s' % (self.base, urllib.urlencode({'uuid': uuid})))
request = urllib2.Request(get_dbid_url)
response = self.urlopen(request)
doc = etree.fromstring(response.read())
data_dbid = doc.find('metadata/{http://www.fao.org/geonetwork}info/id').text
if (len(self._group_ids) == 0):
self._group_ids = self._geonetwork_get_group_ids()
if (len(self._operation_ids) == 0):
self._operation_ids = self._geonetwork_get_operation_ids()
priv_params = {'id': data_dbid}
for (group, privs) in privileges.items():
group_id = self._group_ids[group.lower()]
for (op, state) in privs.items():
if (state is not True):
continue
op_id = self._operation_ids[op.lower()]
priv_params[('_%s_%s' % (group_id, op_id))] = 'on'
update_privs_url = ('%sgeonetwork/srv/en/metadata.admin?%s' % (self.base, urllib.urlencode(priv_params)))
request = urllib2.Request(update_privs_url)
response = self.urlopen(request)
|
'helper to fetch the set of geonetwork
groups.'
| def _geonetwork_get_group_ids(self):
| get_groups_url = ('%sgeonetwork/srv/en/xml.info?%s' % (self.base, urllib.urlencode({'type': 'groups'})))
request = urllib2.Request(get_groups_url)
response = self.urlopen(request)
doc = etree.fromstring(response.read())
groups = {}
for gp in doc.findall('groups/group'):
groups[gp.find('name').text.lower()] = gp.attrib['id']
return groups
|
'helper to fetch the set of geonetwork
\'operations\' (privileges)'
| def _geonetwork_get_operation_ids(self):
| get_ops_url = ('%sgeonetwork/srv/en/xml.info?%s' % (self.base, urllib.urlencode({'type': 'operations'})))
request = urllib2.Request(get_ops_url)
response = self.urlopen(request)
doc = etree.fromstring(response.read())
ops = {}
for op in doc.findall('operations/operation'):
ops[op.find('name').text.lower()] = op.attrib['id']
return ops
|
'CSW search wrapper'
| def search(self, keywords, startposition, maxrecords, bbox):
| formats = []
for f in self.formats:
formats.append(METADATA_FORMATS[f][0])
return self.getrecords(typenames=' '.join(formats), keywords=keywords, startposition=startposition, maxrecords=maxrecords, bbox=bbox, outputschema='http://www.isotc211.org/2005/gmd', esn='full')
|
'fix bbox axis order
GeoNetwork accepts x/y
pycsw accepts y/x'
| def normalize_bbox(self, bbox):
| if (self.type == 'geonetwork'):
return bbox
else:
return [bbox[1], bbox[0], bbox[3], bbox[2]]
|
'accepts a node representing a catalogue result
record and builds a POD structure representing
the search result.'
| def metadatarecord2dict(self, rec):
| if (rec is None):
return None
result = {}
result['uuid'] = rec.identifier
result['title'] = rec.identification.title
result['abstract'] = rec.identification.abstract
keywords = []
for kw in rec.identification.keywords:
keywords.extend(kw['keywords'])
result['keywords'] = keywords
result['attribution'] = {'title': '', 'href': ''}
result['name'] = result['uuid']
result['bbox'] = {'minx': rec.identification.bbox.minx, 'maxx': rec.identification.bbox.maxx, 'miny': rec.identification.bbox.miny, 'maxy': rec.identification.bbox.maxy}
result['download_links'] = self.extract_links(rec)
result['metadata_links'] = [('text/xml', 'ISO', self.url_for_uuid(rec.identifier, 'http://www.isotc211.org/2005/gmd'))]
return result
|
'HTTP-less CSW'
| def _csw_local_dispatch(self, keywords=None, start=0, limit=10, bbox=None, identifier=None):
| mdict = dict(settings.PYCSW['CONFIGURATION'], **CONFIGURATION)
if ('server' in settings.PYCSW['CONFIGURATION']):
mdict['server'].update(settings.PYCSW['CONFIGURATION']['server'])
config = SafeConfigParser()
for (section, options) in mdict.iteritems():
config.add_section(section)
for (option, value) in options.iteritems():
config.set(section, option, value)
os.environ['QUERY_STRING'] = ''
csw = server.Csw(config, version='2.0.2')
csw.requesttype = 'GET'
if (identifier is None):
formats = []
for f in self.catalogue.formats:
formats.append(METADATA_FORMATS[f][0])
csw.kvp = {'service': 'CSW', 'version': '2.0.2', 'elementsetname': 'full', 'typenames': formats, 'resulttype': 'results', 'constraintlanguage': 'CQL_TEXT', 'outputschema': 'http://www.isotc211.org/2005/gmd', 'constraint': None, 'startposition': start, 'maxrecords': limit}
response = csw.getrecords()
else:
csw.kvp = {'service': 'CSW', 'version': '2.0.2', 'request': 'GetRecordById', 'id': identifier, 'outputschema': 'http://www.isotc211.org/2005/gmd'}
try:
response = csw.dispatch()
except ReadingError:
return []
if isinstance(response, list):
response = response[1]
return response
|
'Tests the get_catalogue function works.'
| def test_get_catalog(self):
| c = get_catalogue()
|
'Test that the data.json representation behaves correctly'
| def test_data_json(self):
| response = self.client.get(reverse('data_json')).content
data_json = json.loads(response)
len1 = len(ResourceBase.objects.all())
len2 = len(data_json)
self.assertEquals(len1, len2, 'Expected equality of json and repository lengths')
record_keys = ['accessLevel', 'contactPoint', 'description', 'distribution', 'identifier', 'keyword', 'mbox', 'modified', 'publisher', 'title']
for record in data_json:
self.assertEquals(record_keys, record.keys(), 'Expected specific list of fields to output')
|
'Returns a tuple of the server\'s credentials.'
| @property
def credentials(self):
| creds = namedtuple('OGC_SERVER_CREDENTIALS', ['username', 'password'])
return creds(username=self.USER, password=self.PASSWORD)
|
'Returns the server\'s datastore dict or None.'
| @property
def datastore_db(self):
| if (self.DATASTORE and settings.DATABASES.get(self.DATASTORE, None)):
return settings.DATABASES.get(self.DATASTORE, dict())
else:
return dict()
|
'The Open Web Service url for the server.'
| @property
def ows(self):
| location = (self.PUBLIC_LOCATION if self.PUBLIC_LOCATION else self.LOCATION)
return (self.OWS_LOCATION if self.OWS_LOCATION else (location + 'ows'))
|
'The REST endpoint for the server.'
| @property
def rest(self):
| return ((self.LOCATION + 'rest') if (not self.REST_LOCATION) else self.REST_LOCATION)
|
'The global public endpoint for the server.'
| @property
def public_url(self):
| return (self.LOCATION if (not self.PUBLIC_LOCATION) else self.PUBLIC_LOCATION)
|
'The Open Web Service url for the server used by GeoNode internally.'
| @property
def internal_ows(self):
| location = self.LOCATION
return (location + 'ows')
|
'The internal REST endpoint for the server.'
| @property
def internal_rest(self):
| return (self.LOCATION + 'rest')
|
'Ensures the settings are valid.'
| def ensure_valid_configuration(self, alias):
| try:
server = self.servers[alias]
except KeyError:
raise ServerDoesNotExist(("The server %s doesn't exist" % alias))
datastore = server.get('DATASTORE')
uploader_backend = getattr(settings, 'UPLOADER', dict()).get('BACKEND', 'geonode.rest')
if ((uploader_backend == 'geonode.importer') and datastore and (not settings.DATABASES.get(datastore))):
raise ImproperlyConfigured('The OGC_SERVER setting specifies a datastore but no connection parameters are present.')
if ((uploader_backend == 'geonode.importer') and (not datastore)):
raise ImproperlyConfigured('The UPLOADER BACKEND is set to geonode.importer but no DATASTORE is specified.')
if ('PRINTNG_ENABLED' in server):
raise ImproperlyConfigured("The PRINTNG_ENABLED setting has been removed, use 'PRINT_NG_ENABLED' instead.")
|
'Puts the defaults into the settings dictionary for a given connection where no settings is provided.'
| def ensure_defaults(self, alias):
| try:
server = self.servers[alias]
except KeyError:
raise ServerDoesNotExist(("The server %s doesn't exist" % alias))
server.setdefault('BACKEND', 'geonode.geoserver')
server.setdefault('LOCATION', 'http://localhost:8080/geoserver/')
server.setdefault('USER', 'admin')
server.setdefault('PASSWORD', 'geoserver')
server.setdefault('DATASTORE', str())
server.setdefault('GEOGIG_DATASTORE_DIR', str())
for option in ['MAPFISH_PRINT_ENABLED', 'PRINT_NG_ENABLED', 'GEONODE_SECURITY_ENABLED', 'BACKEND_WRITE_ENABLED']:
server.setdefault(option, True)
for option in ['GEOGIG_ENABLED', 'WMST_ENABLED', 'WPS_ENABLED']:
server.setdefault(option, False)
|
'Ensures the layer_style_manage route returns a 200.'
| def test_style_manager(self):
| layer = Layer.objects.all()[0]
bob = get_user_model().objects.get(username='bobby')
assign_perm('change_layer_style', bob, layer)
logged_in = self.client.login(username='bobby', password='bob')
self.assertEquals(logged_in, True)
response = self.client.get(reverse('layer_style_manage', args=(layer.alternate,)))
self.assertEqual(response.status_code, 200)
|
'Verify that the feature_edit_check view is behaving as expected'
| def test_feature_edit_check(self):
| valid_layer_typename = Layer.objects.all()[0].alternate
Layer.objects.all()[0].set_default_permissions()
invalid_layer_typename = 'n0ch@nc3'
response = self.client.post(reverse('feature_edit_check', args=(invalid_layer_typename,)))
self.assertEquals(response.status_code, 404)
response = self.client.post(reverse('feature_edit_check', args=(valid_layer_typename,)))
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], False)
logged_in = self.client.login(username='bobby', password='bob')
self.assertEquals(logged_in, True)
response = self.client.post(reverse('feature_edit_check', args=(valid_layer_typename,)))
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], False)
logged_in = self.client.login(username='admin', password='admin')
self.assertEquals(logged_in, True)
response = self.client.post(reverse('feature_edit_check', args=(valid_layer_typename,)))
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], False)
layer = Layer.objects.all()[0]
layer.storeType = 'dataStore'
layer.save()
if settings.OGC_SERVER['default']['DATASTORE']:
response = self.client.post(reverse('feature_edit_check', args=(valid_layer_typename,)))
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], True)
|
'Verify that the layer_acls view is behaving as expected'
| def test_layer_acls(self):
| valid_uname_pw = ('%s:%s' % ('bobby', 'bob'))
invalid_uname_pw = ('%s:%s' % ('n0t', 'v@l1d'))
valid_auth_headers = {'HTTP_AUTHORIZATION': ('basic ' + base64.b64encode(valid_uname_pw))}
invalid_auth_headers = {'HTTP_AUTHORIZATION': ('basic ' + base64.b64encode(invalid_uname_pw))}
bob = get_user_model().objects.get(username='bobby')
layer_ca = Layer.objects.get(alternate='geonode:CA')
assign_perm('change_layer_data', bob, layer_ca)
expected_result = {u'email': u'[email protected]', u'fullname': u'bobby', u'is_anonymous': False, u'is_superuser': False, u'name': u'bobby', u'ro': [u'geonode:layer2', u'geonode:mylayer', u'geonode:foo', u'geonode:whatever', u'geonode:fooey', u'geonode:quux', u'geonode:fleem'], u'rw': [u'geonode:CA']}
response = self.client.get(reverse('layer_acls'), **valid_auth_headers)
response_json = json.loads(response.content)
self.assertEquals(sorted(expected_result), sorted(response_json))
response = self.client.get(reverse('layer_acls'), **invalid_auth_headers)
self.assertEquals(response.status_code, 401)
self.client.login(username='admin', password='admin')
response = self.client.get(reverse('layer_acls'))
response_json = json.loads(response.content)
self.assertEquals('admin', response_json['fullname'])
self.assertEquals('', response_json['email'])
|
'Verify that the resolve_user view is behaving as expected'
| def test_resolve_user(self):
| valid_uname_pw = ('%s:%s' % ('admin', 'admin'))
invalid_uname_pw = ('%s:%s' % ('n0t', 'v@l1d'))
valid_auth_headers = {'HTTP_AUTHORIZATION': ('basic ' + base64.b64encode(valid_uname_pw))}
invalid_auth_headers = {'HTTP_AUTHORIZATION': ('basic ' + base64.b64encode(invalid_uname_pw))}
response = self.client.get(reverse('layer_resolve_user'), **valid_auth_headers)
response_json = json.loads(response.content)
self.assertEquals({'geoserver': False, 'superuser': True, 'user': 'admin', 'fullname': 'admin', 'email': ''}, response_json)
response = self.client.get(reverse('layer_acls'), **invalid_auth_headers)
self.assertEquals(response.status_code, 401)
self.client.login(username='admin', password='admin')
response = self.client.get(reverse('layer_resolve_user'))
response_json = json.loads(response.content)
self.assertEquals('admin', response_json['user'])
self.assertEquals('admin', response_json['fullname'])
self.assertEquals('', response_json['email'])
|
'Tests the OGC Servers Handler class.'
| def test_ogc_server_settings(self):
| with override_settings(OGC_SERVER=self.OGC_DEFAULT_SETTINGS, UPLOADER=self.UPLOADER_DEFAULT_SETTINGS):
OGC_SERVER = self.OGC_DEFAULT_SETTINGS.copy()
OGC_SERVER.update({'PUBLIC_LOCATION': 'http://localhost:8080/geoserver/'})
ogc_settings = OGC_Servers_Handler(OGC_SERVER)['default']
default = OGC_SERVER.get('default')
self.assertEqual(ogc_settings.server, default)
self.assertEqual(ogc_settings.BACKEND, default.get('BACKEND'))
self.assertEqual(ogc_settings.LOCATION, default.get('LOCATION'))
self.assertEqual(ogc_settings.PUBLIC_LOCATION, default.get('PUBLIC_LOCATION'))
self.assertEqual(ogc_settings.USER, default.get('USER'))
self.assertEqual(ogc_settings.PASSWORD, default.get('PASSWORD'))
self.assertEqual(ogc_settings.DATASTORE, str())
self.assertEqual(ogc_settings.credentials, ('admin', 'geoserver'))
self.assertTrue(ogc_settings.MAPFISH_PRINT_ENABLED)
self.assertTrue(ogc_settings.PRINT_NG_ENABLED)
self.assertTrue(ogc_settings.GEONODE_SECURITY_ENABLED)
self.assertFalse(ogc_settings.GEOGIG_ENABLED)
self.assertFalse(ogc_settings.WMST_ENABLED)
self.assertTrue(ogc_settings.BACKEND_WRITE_ENABLED)
self.assertFalse(ogc_settings.WPS_ENABLED)
|
'Tests that OGC_SERVER_SETTINGS are built if they do not exist in the settings.'
| def test_ogc_server_defaults(self):
| OGC_SERVER = {'default': dict()}
defaults = self.OGC_DEFAULT_SETTINGS.get('default')
ogc_settings = OGC_Servers_Handler(OGC_SERVER)['default']
self.assertEqual(ogc_settings.server, defaults)
self.assertEqual(ogc_settings.rest, (defaults['LOCATION'] + 'rest'))
self.assertEqual(ogc_settings.ows, (defaults['LOCATION'] + 'ows'))
self.assertIsNone(ogc_settings.SFDSDFDSF)
|
'Tests that the OGC_Servers_Handler throws an ImproperlyConfigured exception when using the importer
backend without a vector database and a datastore configured.'
| def test_importer_configuration(self):
| database_settings = self.DATABASE_DEFAULT_SETTINGS.copy()
ogc_server_settings = self.OGC_DEFAULT_SETTINGS.copy()
uploader_settings = self.UPLOADER_DEFAULT_SETTINGS.copy()
uploader_settings['BACKEND'] = 'geonode.importer'
self.assertTrue([('geonode_imports' not in database_settings.keys())])
with self.settings(UPLOADER=uploader_settings, OGC_SERVER=ogc_server_settings, DATABASES=database_settings):
with self.assertRaises(ImproperlyConfigured):
OGC_Servers_Handler(ogc_server_settings)['default']
ogc_server_settings['default']['DATASTORE'] = 'geonode_imports'
with self.settings(UPLOADER=uploader_settings, OGC_SERVER=ogc_server_settings, DATABASES=database_settings):
with self.assertRaises(ImproperlyConfigured):
OGC_Servers_Handler(ogc_server_settings)['default']
database_settings['geonode_imports'] = database_settings['default'].copy()
database_settings['geonode_imports'].update({'NAME': 'geonode_imports'})
with self.settings(UPLOADER=uploader_settings, OGC_SERVER=ogc_server_settings, DATABASES=database_settings):
OGC_Servers_Handler(ogc_server_settings)['default']
|
'Tests the Geonode login required authentication middleware.'
| def test_login_middleware(self):
| from geonode.security.middleware import LoginRequiredMiddleware
middleware = LoginRequiredMiddleware()
white_list = [reverse('account_ajax_login'), reverse('account_confirm_email', kwargs=dict(key='test')), reverse('account_login'), reverse('account_password_reset'), reverse('forgot_username'), reverse('layer_acls'), reverse('layer_resolve_user')]
black_list = [reverse('account_signup'), reverse('document_browse'), reverse('maps_browse'), reverse('layer_browse'), reverse('layer_detail', kwargs=dict(layername='geonode:Test')), reverse('layer_remove', kwargs=dict(layername='geonode:Test')), reverse('profile_browse')]
request = HttpRequest()
request.user = get_anonymous_user()
for path in black_list:
request.path = path
response = middleware.process_request(request)
self.assertEqual(response.status_code, 302)
self.assertTrue(response.get('Location').startswith(middleware.redirect_to))
for path in white_list:
request.path = path
response = middleware.process_request(request)
self.assertIsNone(response, msg='Middleware activated for white listed path: {0}'.format(path))
self.client.login(username='admin', password='admin')
self.assertTrue(self.admin.is_authenticated())
request.user = self.admin
for path in black_list:
request.path = path
response = middleware.process_request(request)
self.assertIsNone(response)
|
'Try uploading a layer and verify that the user can administrate
his own layer despite not being a site administrator.'
| def test_layer_upload(self):
| self.client.login(username='norman', password='norman')
norman = get_user_model().objects.get(username='norman')
saved_layer = file_upload(os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp'), name='san_andres_y_providencia_poi_by_norman', user=norman, overwrite=True)
saved_layer.set_default_permissions()
url = reverse('layer_metadata', args=[saved_layer.service_typename])
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
|
'Test that the wcs links are correctly created for a raster'
| def test_raster_upload(self):
| filename = os.path.join(gisdata.GOOD_DATA, 'raster/test_grid.tif')
uploaded = file_upload(filename)
wcs_link = False
for link in uploaded.link_set.all():
if (link.mime == 'image/tiff'):
wcs_link = True
self.assertTrue(wcs_link)
|
'Test that layers can be uploaded to running GeoNode/GeoServer'
| def test_layer_upload(self):
| layers = {}
expected_layers = []
not_expected_layers = []
for filename in os.listdir(gisdata.GOOD_DATA):
(basename, extension) = os.path.splitext(filename)
if (extension.lower() in ['.tif', '.shp', '.zip']):
expected_layers.append(os.path.join(gisdata.GOOD_DATA, filename))
for filename in os.listdir(gisdata.BAD_DATA):
not_expected_layers.append(os.path.join(gisdata.BAD_DATA, filename))
uploaded = upload(gisdata.DATA_DIR, console=None)
for item in uploaded:
errors = ('error' in item)
if errors:
if (item['file'] in not_expected_layers):
continue
else:
msg = ('Could not upload file "%s", and it is not in %s' % (item['file'], not_expected_layers))
assert errors, msg
else:
msg = ('Upload should have returned either "name" or "errors" for file %s.' % item['file'])
assert ('name' in item), msg
layers[item['file']] = item['name']
msg = ('There were %s compatible layers in the directory, but only %s were sucessfully uploaded' % (len(expected_layers), len(layers)))
for layer in expected_layers:
msg = ('The following file should have been uploadedbut was not: %s. ' % layer)
assert (layer in layers), msg
layer_name = layers[layer]
Layer.objects.get(name=layer_name)
found = False
(gs_username, gs_password) = (settings.OGC_SERVER['default']['USER'], settings.OGC_SERVER['default']['PASSWORD'])
page = get_web_page(os.path.join(settings.OGC_SERVER['default']['LOCATION'], 'rest/layers'), username=gs_username, password=gs_password)
if (page.find(('rest/layers/%s.html' % layer_name)) > 0):
found = True
if (not found):
msg = ('Upload could not be verified, the layer %s is not in geoserver %s, but GeoNode did not raise any errors, this should never happen.' % (layer_name, settings.OGC_SERVER['default']['LOCATION']))
raise GeoNodeException(msg)
for layer in expected_layers:
layer_name = layers[layer]
Layer.objects.get(name=layer_name).delete()
|
'Verify a GeoNodeException is returned for not compatible extensions'
| def test_extension_not_implemented(self):
| sampletxt = os.path.join(gisdata.VECTOR_DATA, 'points_epsg2249_no_prj.dbf')
try:
file_upload(sampletxt)
except GeoNodeException:
pass
except Exception:
raise
|
'Test uploading a layer with XML metadata'
| def test_layer_upload_metadata(self):
| thelayer = os.path.join(gisdata.PROJECT_ROOT, 'both', 'good', 'sangis.org', 'Airport', 'Air_Runways.shp')
self.assertTrue(('%s.xml' % thelayer), 'Expected layer XML metadata to exist')
uploaded = file_upload(thelayer, overwrite=True)
self.assertEqual(uploaded.title, 'Air Runways', 'Expected specific title from uploaded layer XML metadata')
self.assertEqual(uploaded.abstract, 'Airport Runways', 'Expected specific abstract from uploaded layer XML metadata')
self.assertEqual(uploaded.purpose, 'To show the location of Public Airports and Runways within San Diego County', 'Expected specific purpose from uploaded layer XML metadata')
self.assertEqual(uploaded.supplemental_information, 'No information provided', 'Expected specific supplemental information from uploaded layer XML metadata')
self.assertEqual(len(uploaded.keyword_list()), 5, 'Expected specific number of keywords from uploaded layer XML metadata')
self.assertEqual(uploaded.keyword_csv, u'Airport,Airports,Landing Strips,Runway,Runways', 'Expected CSV of keywords from uploaded layer XML metadata')
self.assertTrue(('Landing Strips' in uploaded.keyword_list()), 'Expected specific keyword from uploaded layer XML metadata')
self.assertEqual(uploaded.constraints_other, 'None', 'Expected specific constraint from uploaded layer XML metadata')
self.assertEqual(uploaded.date, datetime.datetime(2010, 8, 3, 0, 0), 'Expected specific date from uploaded layer XML metadata')
uploaded.delete()
|
'Test Uploading a good shapefile'
| def test_shapefile(self):
| thefile = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
uploaded = file_upload(thefile, overwrite=True)
check_layer(uploaded)
uploaded.delete()
|
'Verifying GeoNode complains about a shapefile without .prj'
| def test_bad_shapefile(self):
| thefile = os.path.join(gisdata.BAD_DATA, 'points_epsg2249_no_prj.shp')
try:
file_upload(thefile, overwrite=True)
except UploadError:
pass
except GeoNodeException:
pass
except Exception:
raise
|
'Uploading a good .tiff'
| def test_tiff(self):
| thefile = os.path.join(gisdata.RASTER_DATA, 'test_grid.tif')
uploaded = file_upload(thefile, overwrite=True)
check_layer(uploaded)
uploaded.delete()
|
'Upload the same file more than once'
| def test_repeated_upload(self):
| thefile = os.path.join(gisdata.RASTER_DATA, 'test_grid.tif')
uploaded1 = file_upload(thefile, overwrite=True)
check_layer(uploaded1)
uploaded2 = file_upload(thefile, overwrite=True)
check_layer(uploaded2)
uploaded3 = file_upload(thefile, overwrite=False)
check_layer(uploaded3)
msg = ('Expected %s but got %s' % (uploaded1.name, uploaded2.name))
assert (uploaded1.name == uploaded2.name), msg
msg = ('Expected a different name when uploading %s using overwrite=False but got %s' % (thefile, uploaded3.name))
assert (uploaded1.name != uploaded3.name), msg
uploaded2.delete()
uploaded3.delete()
|
'Verify that layer is correctly deleted from GeoServer'
| def test_layer_delete_from_geoserver(self):
| gs_cat = gs_catalog
shp_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
shp_layer = file_upload(shp_file, overwrite=True)
ws = gs_cat.get_workspace(shp_layer.workspace)
shp_store = gs_cat.get_store(shp_layer.store, ws)
shp_store_name = shp_store.name
shp_layer.delete()
self.assertRaises(FailedRequestError, (lambda : gs_cat.get_store(shp_store_name)))
tif_file = os.path.join(gisdata.RASTER_DATA, 'test_grid.tif')
tif_layer = file_upload(tif_file)
ws = gs_cat.get_workspace(tif_layer.workspace)
tif_store = gs_cat.get_store(tif_layer.store, ws)
tif_layer.delete()
self.assertRaises(FailedRequestError, (lambda : gs_cat.get_resource(shp_layer.name, store=tif_store)))
|
'Verify that the \'delete_layer\' pre_delete hook is functioning'
| def test_delete_layer(self):
| gs_cat = gs_catalog
shp_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
shp_layer = file_upload(shp_file)
shp_layer_id = shp_layer.pk
ws = gs_cat.get_workspace(shp_layer.workspace)
shp_store = gs_cat.get_store(shp_layer.store, ws)
shp_store_name = shp_store.name
uuid = shp_layer.uuid
shp_layer.delete()
self.assertRaises(FailedRequestError, (lambda : gs_cat.get_store(shp_store_name)))
self.assertRaises(ObjectDoesNotExist, (lambda : Layer.objects.get(pk=shp_layer_id)))
if ('geonode.catalogue' in settings.INSTALLED_APPS):
from geonode.catalogue import get_catalogue
catalogue = get_catalogue()
shp_layer_gn_info = catalogue.get_record(uuid)
assert (shp_layer_gn_info is None)
|
'Verify that the helpers.cascading_delete() method is working properly'
| def test_cascading_delete(self):
| gs_cat = gs_catalog
shp_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
shp_layer = file_upload(shp_file)
resource_name = shp_layer.name
ws = gs_cat.get_workspace(shp_layer.workspace)
store = gs_cat.get_store(shp_layer.store, ws)
store_name = store.name
layer = gs_cat.get_layer(resource_name)
styles = (layer.styles + [layer.default_style])
cascading_delete(gs_cat, shp_layer.alternate)
for style in styles:
s = gs_cat.get_style(style.name)
assert (s is None)
self.assertRaises(FailedRequestError, (lambda : gs_cat.get_resource(resource_name, store=store)))
self.assertRaises(FailedRequestError, (lambda : gs_cat.get_store(store_name)))
shp_layer.delete()
|
'Check that keywords can be passed to file_upload'
| def test_keywords_upload(self):
| thefile = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
uploaded = file_upload(thefile, keywords=['foo', 'bar'], overwrite=True)
keywords = uploaded.keyword_list()
msg = ('No keywords found in layer %s' % uploaded.name)
assert (len(keywords) > 0), msg
assert ('foo' in uploaded.keyword_list()), ('Could not find "foo" in %s' % keywords)
assert ('bar' in uploaded.keyword_list()), ('Could not find "bar" in %s' % keywords)
|
'Regression-test for failures caused by zero-width bounding boxes'
| def test_empty_bbox(self):
| thefile = os.path.join(gisdata.VECTOR_DATA, 'single_point.shp')
uploaded = file_upload(thefile, overwrite=True)
uploaded.set_default_permissions()
self.client.login(username='norman', password='norman')
resp = self.client.get(uploaded.get_absolute_url())
self.assertEquals(resp.status_code, 200)
|
'Test layer replace functionality'
| def test_layer_replace(self):
| vector_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_administrative.shp')
vector_layer = file_upload(vector_file, overwrite=True)
raster_file = os.path.join(gisdata.RASTER_DATA, 'test_grid.tif')
raster_layer = file_upload(raster_file, overwrite=True)
self.client.login(username='admin', password='admin')
raster_replace_url = reverse('layer_replace', args=[raster_layer.service_typename])
response = self.client.get(raster_replace_url)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.context['is_featuretype'], False)
vector_replace_url = reverse('layer_replace', args=[vector_layer.service_typename])
response = self.client.get(vector_replace_url)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.context['is_featuretype'], True)
response = self.client.post(vector_replace_url, {'base_file': open(raster_file, 'rb')})
self.assertEquals(response.status_code, 400)
response_dict = json.loads(response.content)
self.assertEquals(response_dict['success'], False)
new_vector_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
(layer_path, __) = os.path.splitext(new_vector_file)
layer_base = open((layer_path + '.shp'), 'rb')
layer_dbf = open((layer_path + '.dbf'), 'rb')
layer_shx = open((layer_path + '.shx'), 'rb')
layer_prj = open((layer_path + '.prj'), 'rb')
response = self.client.post(vector_replace_url, {'base_file': layer_base, 'dbf_file': layer_dbf, 'shx_file': layer_shx, 'prj_file': layer_prj})
self.assertEquals(response.status_code, 200)
response_dict = json.loads(response.content)
self.assertEquals(response_dict['success'], True)
new_vector_layer = Layer.objects.get(pk=vector_layer.pk)
self.assertNotEqual(vector_layer.bbox_x0, new_vector_layer.bbox_x0)
self.assertNotEqual(vector_layer.bbox_x1, new_vector_layer.bbox_x1)
self.assertNotEqual(vector_layer.bbox_y0, new_vector_layer.bbox_y0)
self.assertNotEqual(vector_layer.bbox_y1, new_vector_layer.bbox_y1)
self.client.logout()
self.client.login(username='norman', password='norman')
response = self.client.post(vector_replace_url, {'base_file': layer_base, 'dbf_file': layer_dbf, 'shx_file': layer_shx, 'prj_file': layer_prj})
self.assertEquals(response.status_code, 401)
|
'Test layer import management command'
| def test_importlayer_mgmt_command(self):
| vector_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_administrative.shp')
call_command('importlayers', vector_file, overwrite=True, keywords='test, import, san andreas', title='Test San Andres y Providencia Administrative', verbosity=1)
lyr = Layer.objects.get(title='Test San Andres y Providencia Administrative')
self.assertIsNotNone(lyr)
self.assertEqual(lyr.name, 'test_san_andres_y_providencia_administrative')
self.assertEqual(lyr.title, 'Test San Andres y Providencia Administrative')
self.assertEqual(lyr.keyword_list(), [u'import', u'san andreas', u'test'])
lyr.delete()
|
'Test permissions on an unpublished layer'
| def test_unpublished(self):
| thefile = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
layer = file_upload(thefile, overwrite=True)
layer.set_default_permissions()
check_layer(layer)
time.sleep(20)
url = 'http://localhost:8080/geoserver/ows?service=wms&version=1.3.0&request=GetCapabilities'
str_to_check = '<Name>geonode:san_andres_y_providencia_poi</Name>'
request = urllib2.Request(url)
response = urllib2.urlopen(request)
self.assertTrue(any(((str_to_check in s) for s in response.readlines())))
self.assertTrue(layer.is_published, True)
layer.delete()
with self.settings(RESOURCE_PUBLISHING=True):
thefile = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_administrative.shp')
layer = file_upload(thefile, overwrite=True)
layer.set_default_permissions()
check_layer(layer)
time.sleep(20)
str_to_check = '<Name>san_andres_y_providencia_administrative</Name>'
self.assertEqual(layer.is_published, False)
request = urllib2.Request(url)
response = urllib2.urlopen(request)
self.assertFalse(any(((str_to_check in s) for s in response.readlines())))
resource = layer.get_self_resource()
resource.is_published = True
resource.save()
request = urllib2.Request(url)
response = urllib2.urlopen(request)
self.assertTrue(any(((str_to_check in s) for s in response.readlines())))
layer.delete()
|
'Test the layer save method generates a thumbnail link'
| def test_layer_thumbnail(self):
| self.client.login(username='norman', password='norman')
norman = get_user_model().objects.get(username='norman')
saved_layer = file_upload(os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp'), name='san_andres_y_providencia_poi_by_norman', user=norman, overwrite=True)
thumbnail_url = saved_layer.get_thumbnail_url()
assert (thumbnail_url != staticfiles.static(settings.MISSING_THUMBNAIL))
|
'Test the map save method generates a thumbnail link'
| def test_map_thumbnail(self):
| self.client.login(username='norman', password='norman')
norman = get_user_model().objects.get(username='norman')
saved_layer = file_upload(os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp'), name='san_andres_y_providencia_poi_by_norman', user=norman, overwrite=True)
saved_layer.set_default_permissions()
map_obj = Map(owner=norman, zoom=0, center_x=0, center_y=0)
map_obj.create_from_layer_list(norman, [saved_layer], 'title', '')
thumbnail_url = map_obj.get_thumbnail_url()
assert (thumbnail_url != staticfiles.static(settings.MISSING_THUMBNAIL))
|
'Test the PrintProxyMiddleware if activated.
It should respect the permissions on private layers.'
| def testPrintProxy(self):
| if ('geonode.middleware.PrintProxyMiddleware' in settings.MIDDLEWARE_CLASSES):
from geonode.maps.models import Map
self.client.login(username='norman', password='norman')
norman = get_user_model().objects.get(username='norman')
saved_layer = file_upload(os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp'), name='san_andres_y_providencia_poi_by_norman', user=norman, overwrite=True)
saved_layer.set_permissions({'users': {'AnonymousUser': ['view_resourcebase']}})
url = reverse('layer_metadata', args=[saved_layer.service_typename])
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
self.client.logout()
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
map_obj = Map(owner=norman, zoom=0, center_x=0, center_y=0)
map_obj.create_from_layer_list(norman, [saved_layer], 'title', '')
print_url = (settings.OGC_SERVER['default']['LOCATION'] + 'pdf/create.json')
post_payload = {'dpi': 75, 'layers': [{'baseURL': (settings.OGC_SERVER['default']['LOCATION'] + 'wms?SERVICE=WMS&'), 'format': 'image/png', 'customParams': {'TILED': True, 'TRANSPARENT': True}, 'layers': [saved_layer.service_typename], 'opacity': 1, 'singleTile': False, 'type': 'WMS'}], 'layout': 'A4 portrait', 'mapTitle': 'test', 'outputFilename': 'print', 'srs': getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:900913'), 'units': 'm'}
self.client.post(print_url, post_payload)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 302)
else:
pass
|
'Test attributes syncronization'
| def test_set_attributes_from_geoserver(self):
| shp_file = os.path.join(gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp')
layer = file_upload(shp_file)
for attribute in layer.attribute_set.all():
attribute.attribute_label = ('%s_label' % attribute.attribute)
attribute.description = ('%s_description' % attribute.attribute)
attribute.save()
set_attributes_from_geoserver(layer)
for attribute in layer.attribute_set.all():
self.assertEquals(attribute.attribute_label, ('%s_label' % attribute.attribute))
self.assertEquals(attribute.description, ('%s_description' % attribute.attribute))
|
'Test utility function set_attributes() which creates Attribute instances attached
to a Layer instance.'
| def test_set_attributes_creates_attributes(self):
| self.client.login(username='norman', password='norman')
disconnected_post_save = signals.post_save.disconnect(geoserver_post_save, sender=Layer)
l = Layer.objects.create(name='dummy_layer')
if disconnected_post_save:
signals.post_save.connect(geoserver_post_save, sender=Layer)
attribute_map = [['id', 'Integer'], ['date', 'IntegerList'], ['enddate', 'Real'], ['date_as_date', 'xsd:dateTime']]
expected_results = copy.deepcopy(attribute_map)
set_attributes(l, attribute_map)
self.assertEquals(l.attributes.count(), len(expected_results))
for a in l.attributes:
self.assertIn([a.attribute, a.attribute_type], expected_results)
|
'Verify that GeoNode works against any CSW'
| def test_csw_base(self):
| csw = get_catalogue(skip_caps=False)
for op in csw.catalogue.operations:
for method in op.methods:
self.assertEqual(csw.catalogue.url, method['url'], 'Expected GeoNode URL to be equal to all CSW URLs')
self.assertEqual(csw.catalogue.version, '2.0.2', 'Expected "2.0.2" as a supported version')
if (csw.catalogue.type != 'pycsw_local'):
self.assertTrue(('Transaction' in [o.name for o in csw.catalogue.operations]), 'Expected Transaction to be a supported operation')
for o in csw.catalogue.operations:
if (o.name == 'GetRecords'):
typenames = o.parameters['typeNames']['values']
self.assertTrue(('gmd:MD_Metadata' in typenames), 'Expected "gmd:MD_Metadata" to be a supported typeNames value')
for o in csw.catalogue.operations:
if (o.name == 'GetRecords'):
outputschemas = o.parameters['outputSchema']['values']
self.assertTrue(('http://www.isotc211.org/2005/gmd' in outputschemas), 'Expected "http://www.isotc211.org/2005/gmd" to be a supported outputSchema value')
|
'Verify that GeoNode CSW can handle search counting'
| def test_csw_search_count(self):
| csw = get_catalogue(skip_caps=False)
csw.catalogue.getrecords(typenames='csw:Record')
self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records')
csw.catalogue.getrecords(typenames='gmd:MD_Metadata')
self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records against ISO typename')
|
'Verify that GeoNode CSW can handle ISO metadata with Dublin Core outputSchema'
| def test_csw_outputschema_dc(self):
| csw = get_catalogue()
csw.catalogue.getrecords(typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full')
record = csw.catalogue.records.values()[0]
self.assertEqual(record.title, 'San Andres Y Providencia Location', 'Expected a specific title in Dublin Core model')
self.assertEqual(record.abstract, 'No abstract provided', 'Expected a specific abstract in Dublin Core model')
for link in record.references:
if (link['scheme'] == 'OGC:WMS'):
self.assertEqual(link['url'], 'http://localhost:8080/geoserver/geonode/wms', 'Expected a specific OGC:WMS URL')
elif (link['scheme'] == 'OGC:WFS'):
self.assertEqual(link['url'], 'http://localhost:8080/geoserver/geonode/wfs', 'Expected a specific OGC:WFS URL')
|
'Verify that GeoNode CSW can handle ISO metadata with ISO outputSchema'
| def test_csw_outputschema_iso(self):
| csw = get_catalogue()
csw.catalogue.getrecords(typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.isotc211.org/2005/gmd', esn='full')
record = csw.catalogue.records.values()[0]
self.assertEqual(record.identification.title, 'San Andres Y Providencia Location', 'Expected a specific title in ISO model')
self.assertEqual(record.identification.abstract, 'No abstract provided', 'Expected a specific abstract in ISO model')
self.assertEqual(record.identification.bbox.minx, '-81.8593555', 'Expected a specific minx coordinate value in ISO model')
self.assertEqual(record.identification.bbox.miny, '12.1665322', 'Expected a specific minx coordinate value in ISO model')
self.assertEqual(record.identification.bbox.maxx, '-81.356409', 'Expected a specific maxx coordinate value in ISO model')
self.assertEqual(record.identification.bbox.maxy, '13.396306', 'Expected a specific maxy coordinate value in ISO model')
for link in record.distribution.online:
if (link.protocol == 'OGC:WMS'):
self.assertEqual(link.url, 'http://localhost:8080/geoserver/geonode/wms', 'Expected a specific OGC:WMS URL')
elif (link.protocol == 'OGC:WFS'):
self.assertEqual(link.url, 'http://localhost:8080/geoserver/geonode/wfs', 'Expected a specific OGC:WFS URL')
|
'Verify that GeoNode CSW can handle ISO metadata BBOX model with Dublin Core outputSchema'
| def test_csw_outputschema_dc_bbox(self):
| csw = get_catalogue()
if (csw.catalogue.type != 'geonetwork'):
csw.catalogue.getrecords(typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full')
record = csw.catalogue.records.values()[0]
self.assertEqual(record.bbox.crs.code, 4326, 'Expected a specific CRS code value in Dublin Core model')
self.assertEqual(record.bbox.minx, '-81.8593555', 'Expected a specific minx coordinate value in Dublin Core model')
self.assertEqual(record.bbox.miny, '12.1665322', 'Expected a specific minx coordinate value in Dublin Core model')
self.assertEqual(record.bbox.maxx, '-81.356409', 'Expected a specific maxx coordinate value in Dublin Core model')
self.assertEqual(record.bbox.maxy, '13.396306', 'Expected a specific maxy coordinate value in Dublin Core model')
|
'Verify that GeoNode CSW can handle ISO metadata with FGDC outputSchema'
| def test_csw_outputschema_fgdc(self):
| csw = get_catalogue()
if (csw.catalogue.type in ['pycsw_http', 'pycsw_local']):
csw.catalogue.getrecords(typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/csdgm')
record = csw.catalogue.records.values()[0]
self.assertEqual(record.idinfo.citation.citeinfo['title'], 'San Andres Y Providencia Location', 'Expected a specific title in FGDC model')
self.assertEqual(record.idinfo.descript.abstract, 'No abstract provided', 'Expected a specific abstract in FGDC model')
|
'Verify that GeoNode CSW can handle bbox queries'
| def test_csw_query_bbox(self):
| csw = get_catalogue()
csw.catalogue.getrecords(bbox=[(-140), (-70), 80, 70])
self.assertEqual(csw.catalogue.results, {'matches': 7, 'nextrecord': 0, 'returned': 7}, 'Expected a specific bbox query result set')
|
'Verify that GeoNode CSW can handle FGDC metadata upload'
| def test_csw_upload_fgdc(self):
| csw = get_catalogue()
if (csw.catalogue.type == 'pycsw_http'):
md_doc = etree.tostring(etree.fromstring(open(os.path.join(gisdata.GOOD_METADATA, 'sangis.org', 'Census', 'Census_Blockgroup_Pop_Housing.shp.xml')).read()))
csw.catalogue.transaction(ttype='insert', typename='fgdc:metadata', record=md_doc)
self.assertEqual(csw.catalogue.results['inserted'], 1, 'Expected 1 inserted record in FGDC model')
csw.catalogue.getrecords(typenames='fgdc:metadata')
self.assertEqual(csw.catalogue.results['matches'], 1, 'Expected 1 record in FGDC model')
record = csw.catalogue.records.values()[0]
self.assertEqual(record.title, 'Census_Blockgroup_Pop_Housing', 'Expected a specific title in DC model')
self.assertEqual(record.type, 'vector digital data', 'Expected a specific type in DC model')
self.assertEqual(record.bbox.crs.code, 4326, 'Expected a specific CRS code value in Dublin Core model')
self.assertEqual(record.bbox.minx, '-117.6', 'Expected a specific minx coordinate value in Dublin Core model')
self.assertEqual(record.bbox.miny, '32.53', 'Expected a specific minx coordinate value in Dublin Core model')
self.assertEqual(record.bbox.maxx, '-116.08', 'Expected a specific maxx coordinate value in Dublin Core model')
self.assertEqual(record.bbox.maxy, '33.51', 'Expected a specific maxy coordinate value in Dublin Core model')
csw.catalogue.getrecords(typenames='fgdc:metadata', esn='brief', outputschema='http://www.isotc211.org/2005/gmd')
self.assertEqual(csw.catalogue.results['matches'], 1, 'Expected 1 record in ISO model')
record = csw.catalogue.records.values()[0]
self.assertEqual(record.identification.title, 'Census_Blockgroup_Pop_Housing', 'Expected a specific title in ISO model')
csw.catalogue.transaction(ttype='delete', typename='fgdc:metadata', cql='fgdc:Title like "Census_Blockgroup_Pop_Housing"')
self.assertEqual(csw.catalogue.results['deleted'], 1, 'Expected 1 deleted record in FGDC model')
|
'Verify that GeoNode CSW can handle bulk upload of ISO and FGDC metadata'
| def test_csw_bulk_upload(self):
| csw = get_catalogue()
if (csw.catalogue.type == 'pycsw_http'):
identifiers = []
for (root, dirs, files) in os.walk(os.path.join(gisdata.GOOD_METADATA, 'sangis.org')):
for mfile in files:
if mfile.endswith('.xml'):
md_doc = etree.tostring(etree.fromstring(open(os.path.join(root, mfile)).read()))
csw.catalogue.transaction(ttype='insert', typename='fgdc:metadata', record=md_doc)
identifiers.append(csw.catalogue.results['insertresults'][0])
for md in glob.glob(os.path.join(gisdata.GOOD_METADATA, 'wustl.edu', '*.xml')):
md_doc = etree.tostring(etree.fromstring(open(md).read()))
csw.catalogue.transaction(ttype='insert', typename='gmd:MD_Metadata', record=md_doc)
identifiers.append(csw.catalogue.results['insertresults'][0])
csw.catalogue.getrecords(typenames='fgdc:metadata')
self.assertEqual(csw.catalogue.results['matches'], 72, 'Expected 187 records in FGDC model')
csw.catalogue.getrecords(typenames='gmd:MD_Metadata')
self.assertEqual(csw.catalogue.results['matches'], 115, 'Expected 194 records in ISO model')
csw.catalogue.getrecords(typenames='gmd:MD_Metadata fgdc:metadata')
self.assertEqual(csw.catalogue.results['matches'], 187, 'Expected 381 records total in FGDC and ISO model')
for i in identifiers:
csw.catalogue.transaction(ttype='delete', identifier=i)
|
'Test if the homepage renders.'
| def test_home_page(self):
| response = self.client.get(reverse('home'))
self.failUnlessEqual(response.status_code, 200)
|
'Test help page renders.'
| def test_help_page(self):
| response = self.client.get(reverse('help'))
self.failUnlessEqual(response.status_code, 200)
|
'Test help page renders.'
| def test_developer_page(self):
| response = self.client.get(reverse('help'))
self.failUnlessEqual(response.status_code, 200)
|
'Test if the data home page renders.'
| def test_layer_page(self):
| response = self.client.get(reverse('layer_browse'))
self.failUnlessEqual(response.status_code, 200)
|
'Test if the data/acls endpoint renders.'
| def test_layer_acls(self):
| response = self.client.get(reverse('layer_acls'))
self.failUnlessEqual(response.status_code, 200)
|
'Test Maps page renders.'
| def test_maps_page(self):
| response = self.client.get(reverse('maps_browse'))
self.failUnlessEqual(response.status_code, 200)
|
'Test New Map page renders.'
| def test_new_map_page(self):
| response = self.client.get(reverse('new_map'))
self.failUnlessEqual(response.status_code, 200)
|
'Test the profiles page renders.'
| def test_profile_list(self):
| response = self.client.get(reverse('profile_browse'))
self.failUnlessEqual(response.status_code, 200)
|
'Test that user profile pages render.'
| def test_profiles(self):
| response = self.client.get(reverse('profile_detail', args=['admin']))
self.failUnlessEqual(response.status_code, 200)
response = self.client.get(reverse('profile_detail', args=['norman']))
self.failUnlessEqual(response.status_code, 200)
|
'Test that the CSW endpoint is correctly configured.'
| def test_csw_endpoint(self):
| response = self.client.get(reverse('csw_global_dispatch'))
self.failUnlessEqual(response.status_code, 200)
|
'Test that the local OpenSearch endpoint is correctly configured.'
| def test_opensearch_description(self):
| response = self.client.get(reverse('opensearch_dispatch'))
self.failUnlessEqual(response.status_code, 200)
|
'Get the related service object dynamically'
| @property
def service(self):
| service_layers = self.servicelayer_set.all()
if (len(service_layers) == 0):
return None
else:
return service_layers[0].service
|
'Get the shp or geotiff file for this layer.'
| def get_base_file(self):
| if (self.upload_session is None):
return (None, None)
base_exts = [x.replace('.', '') for x in (cov_exts + vec_exts)]
base_files = self.upload_session.layerfile_set.filter(name__in=base_exts)
base_files_count = base_files.count()
if (base_files_count == 0):
return (None, None)
msg = ('There should only be one main file (.shp or .geotiff or .asc), found %s' % base_files_count)
assert (base_files_count == 1), msg
list_col = None
if (self.storeType == 'dataStore'):
(valid_shp, wrong_column_name, list_col) = check_shp_columnnames(self)
if wrong_column_name:
msg = ('Shapefile has an invalid column name: %s' % wrong_column_name)
else:
msg = _('File cannot be opened, maybe check the encoding')
assert valid_shp, msg
return (base_files.get(), list_col)
|
'/data/ -> Test accessing the data page'
| def test_data(self):
| response = self.client.get(reverse('layer_browse'))
self.failUnlessEqual(response.status_code, 200)
|
'/data/geonode:CA/metadata -> Test accessing the description of a layer'
| def test_describe_data_2(self):
| self.assertEqual(8, get_user_model().objects.all().count())
response = self.client.get(reverse('layer_metadata', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 302)
self.client.login(username='admin', password='admin')
response = self.client.get(reverse('layer_metadata', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 200)
|
'/data/geonode:CA/metadata -> Test accessing the description of a layer'
| def test_describe_data(self):
| self.assertEqual(8, get_user_model().objects.all().count())
response = self.client.get(reverse('layer_metadata', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 302)
self.client.login(username='admin', password='admin')
response = self.client.get(reverse('layer_metadata', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 200)
|
'Test layer feature catalogue functionality'
| def test_layer_attributes_feature_catalogue(self):
| url = reverse('layer_feature_catalogue', args=('bad_layer',))
response = self.client.get(url)
self.assertEquals(response.status_code, 404)
layer = Layer.objects.get(pk=3)
url = reverse('layer_feature_catalogue', args=(layer.alternate,))
response = self.client.get(url)
self.assertEquals(response.status_code, 400)
self.assertEquals(response['content-type'], 'application/json')
layer = Layer.objects.get(pk=2)
url = reverse('layer_feature_catalogue', args=(layer.alternate,))
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertEquals(response['content-type'], 'application/xml')
|
'Test layer rating is removed on layer remove'
| def test_rating_layer_remove(self):
| layer = Layer.objects.get(pk=3)
layer.default_style = Style.objects.get(pk=layer.pk)
layer.save()
url = reverse('layer_remove', args=(layer.alternate,))
layer_id = layer.id
ctype = ContentType.objects.get(model='layer')
OverallRating.objects.create(category=2, object_id=layer_id, content_type=ctype, rating=3)
self.client.login(username='admin', password='admin')
self.client.post(url)
rating = OverallRating.objects.filter(category=2, object_id=layer_id)
self.assertEquals(rating.count(), 0)
|
'Test layer remove functionality'
| def test_layer_remove(self):
| layer = Layer.objects.get(pk=1)
url = reverse('layer_remove', args=(layer.alternate,))
layer.default_style = Style.objects.get(pk=layer.pk)
layer.save()
response = self.client.get(url)
self.assertEquals(response.status_code, 302)
self.client.login(username='norman', password='norman')
response = self.client.post(url)
self.assertEquals(response.status_code, 302)
self.client.logout()
self.client.login(username='admin', password='admin')
response = self.client.put(url)
self.assertEquals(response.status_code, 403)
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
response = self.client.post(url)
self.assertEquals(response.status_code, 302)
self.assertEquals(response['Location'], 'http://testserver/layers/')
self.assertEquals(Layer.objects.filter(pk=layer.pk).count(), 0)
self.assertEquals(Style.objects.count(), 0)
|
'Tests that deleting a layer with a shared default style will not cascade and
delete multiple layers.'
| def test_non_cascading(self):
| layer1 = Layer.objects.get(pk=1)
layer2 = Layer.objects.get(pk=2)
url = reverse('layer_remove', args=(layer1.alternate,))
layer1.default_style = Style.objects.get(pk=layer1.pk)
layer1.save()
layer2.default_style = Style.objects.get(pk=layer1.pk)
layer2.save()
self.assertEquals(layer1.default_style, layer2.default_style)
self.client.login(username='admin', password='admin')
response = self.client.post(url)
self.assertEquals(response.status_code, 302)
self.assertEquals(response['Location'], 'http://testserver/layers/')
self.assertEquals(Layer.objects.filter(pk=layer1.pk).count(), 0)
self.assertEquals(Layer.objects.filter(pk=2).count(), 1)
self.assertEquals(Style.objects.count(), 1)
|
'Ensure set_permissions supports the change_layer_data permission.'
| def test_assign_change_layer_data_perm(self):
| layer = Layer.objects.first()
user = get_anonymous_user()
layer.set_permissions({'users': {user.username: ['change_layer_data']}})
perms = layer.get_all_level_info()
self.assertIn('change_layer_data', perms['users'][user])
|
'Test unpublished layer behaviour'
| def test_unpublished_layer(self):
| user = get_user_model().objects.get(username='bobby')
self.client.login(username='bobby', password='bob')
response = self.client.get(reverse('layer_detail', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 200)
layer = Layer.objects.filter(title='CA')[0]
layer.is_published = False
layer.save()
response = self.client.get(reverse('layer_detail', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 200)
with self.settings(RESOURCE_PUBLISHING=True):
response = self.client.get(reverse('layer_detail', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 404)
assign_perm('publish_resourcebase', user, layer.get_self_resource())
response = self.client.get(reverse('layer_detail', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 200)
layer.is_published = True
layer.save()
remove_perm('publish_resourcebase', user, layer.get_self_resource())
response = self.client.get(reverse('layer_detail', args=('geonode:CA',)))
self.failUnlessEqual(response.status_code, 200)
layer.is_published = True
layer.save()
|
'Test if moderation flag works'
| def test_moderated_upload(self):
| with self.settings(ADMIN_MODERATE_UPLOADS=False):
layer_upload_url = reverse('layer_upload')
self.client.login(username=self.user, password=self.passwd)
(input_paths, suffixes) = self._get_input_paths()
input_files = [open(fp, 'rb') for fp in input_paths]
files = dict(zip(['{}_file'.format(s) for s in suffixes], input_files))
files['base_file'] = files.pop('shp_file')
with contextlib.nested(*input_files):
files['permissions'] = '{}'
files['charset'] = 'utf-8'
files['layer_title'] = 'test layer'
resp = self.client.post(layer_upload_url, data=files)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
lname = data['url'].split(':')[(-1)]
l = Layer.objects.get(name=lname)
self.assertTrue(l.is_published)
with self.settings(ADMIN_MODERATE_UPLOADS=True):
layer_upload_url = reverse('layer_upload')
self.client.login(username=self.user, password=self.passwd)
(input_paths, suffixes) = self._get_input_paths()
input_files = [open(fp, 'rb') for fp in input_paths]
files = dict(zip(['{}_file'.format(s) for s in suffixes], input_files))
files['base_file'] = files.pop('shp_file')
with contextlib.nested(*input_files):
files['permissions'] = '{}'
files['charset'] = 'utf-8'
files['layer_title'] = 'test layer'
resp = self.client.post(layer_upload_url, data=files)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
lname = data['url'].split(':')[(-1)]
l = Layer.objects.get(name=lname)
self.assertFalse(l.is_published)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.