desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Args: address: A string or integer representing the IP \'192.168.1.1\' Additionally, an integer can be passed, so IPv4Address(\'192.168.1.1\') == IPv4Address(3232235777). or, more generally IPv4Address(int(IPv4Address(\'192.168.1.1\'))) == IPv4Address(\'192.168.1.1\') Raises: AddressValueError: If ipaddr isn\'t a valid IPv4 address.'
def __init__(self, address):
_BaseV4.__init__(self, address) if isinstance(address, (int, long)): self._ip = address if ((address < 0) or (address > self._ALL_ONES)): raise AddressValueError(address) return if isinstance(address, Bytes): try: (self._ip,) = struct.unpack('!I', address) except struct.error: raise AddressValueError(address) return addr_str = str(address) self._ip = self._ip_int_from_string(addr_str)
'Instantiate a new IPv4 network object. Args: address: A string or integer representing the IP [& network]. \'192.168.1.1/24\' \'192.168.1.1/255.255.255.0\' \'192.168.1.1/0.0.0.255\' are all functionally the same in IPv4. Similarly, \'192.168.1.1\' \'192.168.1.1/255.255.255.255\' \'192.168.1.1/32\' are also functionaly equivalent. That is to say, failing to provide a subnetmask will create an object with a mask of /32. If the mask (portion after the / in the argument) is given in dotted quad form, it is treated as a netmask if it starts with a non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it starts with a zero field (e.g. 0.255.255.255 == /8), with the single exception of an all-zero mask which is treated as a netmask == /0. If no mask is given, a default of /32 is used. Additionally, an integer can be passed, so IPv4Network(\'192.168.1.1\') == IPv4Network(3232235777). or, more generally IPv4Network(int(IPv4Network(\'192.168.1.1\'))) == IPv4Network(\'192.168.1.1\') strict: A boolean. If true, ensure that we have been passed A true network address, eg, 192.168.1.0/24 and not an IP address on a network, eg, 192.168.1.1/24. Raises: AddressValueError: If ipaddr isn\'t a valid IPv4 address. NetmaskValueError: If the netmask isn\'t valid for an IPv4 address. ValueError: If strict was True and a network address was not supplied.'
def __init__(self, address, strict=False):
_BaseNet.__init__(self, address) _BaseV4.__init__(self, address) if isinstance(address, (int, long, Bytes)): self.ip = IPv4Address(address) self._ip = self.ip._ip self._prefixlen = self._max_prefixlen self.netmask = IPv4Address(self._ALL_ONES) return addr = str(address).split('/') if (len(addr) > 2): raise AddressValueError(address) self._ip = self._ip_int_from_string(addr[0]) self.ip = IPv4Address(self._ip) if (len(addr) == 2): mask = addr[1].split('.') if (len(mask) == 4): if self._is_valid_netmask(addr[1]): self.netmask = IPv4Address(self._ip_int_from_string(addr[1])) elif self._is_hostmask(addr[1]): self.netmask = IPv4Address((self._ip_int_from_string(addr[1]) ^ self._ALL_ONES)) else: raise NetmaskValueError(('%s is not a valid netmask' % addr[1])) self._prefixlen = self._prefix_from_ip_int(int(self.netmask)) else: if (not self._is_valid_netmask(addr[1])): raise NetmaskValueError(addr[1]) self._prefixlen = int(addr[1]) self.netmask = IPv4Address(self._ip_int_from_prefix(self._prefixlen)) else: self._prefixlen = self._max_prefixlen self.netmask = IPv4Address(self._ip_int_from_prefix(self._prefixlen)) if strict: if (self.ip != self.network): raise ValueError(('%s has host bits set' % self.ip)) if (self._prefixlen == (self._max_prefixlen - 1)): self.iterhosts = self.__iter__
'Test if the IP string is a hostmask (rather than a netmask). Args: ip_str: A string, the potential hostmask. Returns: A boolean, True if the IP string is a hostmask.'
def _is_hostmask(self, ip_str):
bits = ip_str.split('.') try: parts = [int(x) for x in bits if (int(x) in self._valid_mask_octets)] except ValueError: return False if (len(parts) != len(bits)): return False if (parts[0] < parts[(-1)]): return True return False
'Verify that the netmask is valid. Args: netmask: A string, either a prefix or dotted decimal netmask. Returns: A boolean, True if the prefix represents a valid IPv4 netmask.'
def _is_valid_netmask(self, netmask):
mask = netmask.split('.') if (len(mask) == 4): if [x for x in mask if (int(x) not in self._valid_mask_octets)]: return False if [y for (idx, y) in enumerate(mask) if ((idx > 0) and (y > mask[(idx - 1)]))]: return False return True try: netmask = int(netmask) except ValueError: return False return (0 <= netmask <= self._max_prefixlen)
'Turn an IPv6 ip_str into an integer. Args: ip_str: A string, the IPv6 ip_str. Returns: A long, the IPv6 ip_str. Raises: AddressValueError: if ip_str isn\'t a valid IPv6 Address.'
def _ip_int_from_string(self, ip_str):
parts = ip_str.split(':') if (len(parts) < 3): raise AddressValueError(ip_str) if ('.' in parts[(-1)]): ipv4_int = IPv4Address(parts.pop())._ip parts.append(('%x' % ((ipv4_int >> 16) & 65535))) parts.append(('%x' % (ipv4_int & 65535))) if (len(parts) > (self._HEXTET_COUNT + 1)): raise AddressValueError(ip_str) try: (skip_index,) = ([i for i in xrange(1, (len(parts) - 1)) if (not parts[i])] or [None]) except ValueError: raise AddressValueError(ip_str) if (skip_index is not None): parts_hi = skip_index parts_lo = ((len(parts) - skip_index) - 1) if (not parts[0]): parts_hi -= 1 if parts_hi: raise AddressValueError(ip_str) if (not parts[(-1)]): parts_lo -= 1 if parts_lo: raise AddressValueError(ip_str) parts_skipped = (self._HEXTET_COUNT - (parts_hi + parts_lo)) if (parts_skipped < 1): raise AddressValueError(ip_str) else: if (len(parts) != self._HEXTET_COUNT): raise AddressValueError(ip_str) parts_hi = len(parts) parts_lo = 0 parts_skipped = 0 try: ip_int = 0L for i in xrange(parts_hi): ip_int <<= 16 ip_int |= self._parse_hextet(parts[i]) ip_int <<= (16 * parts_skipped) for i in xrange((- parts_lo), 0): ip_int <<= 16 ip_int |= self._parse_hextet(parts[i]) return ip_int except ValueError: raise AddressValueError(ip_str)
'Convert an IPv6 hextet string into an integer. Args: hextet_str: A string, the number to parse. Returns: The hextet as an integer. Raises: ValueError: if the input isn\'t strictly a hex number from [0..FFFF].'
def _parse_hextet(self, hextet_str):
if (not self._HEX_DIGITS.issuperset(hextet_str)): raise ValueError hextet_int = int(hextet_str, 16) if (hextet_int > 65535): raise ValueError return hextet_int
'Compresses a list of hextets. Compresses a list of strings, replacing the longest continuous sequence of "0" in the list with "" and adding empty strings at the beginning or at the end of the string such that subsequently calling ":".join(hextets) will produce the compressed version of the IPv6 address. Args: hextets: A list of strings, the hextets to compress. Returns: A list of strings.'
def _compress_hextets(self, hextets):
best_doublecolon_start = (-1) best_doublecolon_len = 0 doublecolon_start = (-1) doublecolon_len = 0 for index in range(len(hextets)): if (hextets[index] == '0'): doublecolon_len += 1 if (doublecolon_start == (-1)): doublecolon_start = index if (doublecolon_len > best_doublecolon_len): best_doublecolon_len = doublecolon_len best_doublecolon_start = doublecolon_start else: doublecolon_len = 0 doublecolon_start = (-1) if (best_doublecolon_len > 1): best_doublecolon_end = (best_doublecolon_start + best_doublecolon_len) if (best_doublecolon_end == len(hextets)): hextets += [''] hextets[best_doublecolon_start:best_doublecolon_end] = [''] if (best_doublecolon_start == 0): hextets = ([''] + hextets) return hextets
'Turns a 128-bit integer into hexadecimal notation. Args: ip_int: An integer, the IP address. Returns: A string, the hexadecimal representation of the address. Raises: ValueError: The address is bigger than 128 bits of all ones.'
def _string_from_ip_int(self, ip_int=None):
if ((not ip_int) and (ip_int != 0)): ip_int = int(self._ip) if (ip_int > self._ALL_ONES): raise ValueError('IPv6 address is too large') hex_str = ('%032x' % ip_int) hextets = [] for x in range(0, 32, 4): hextets.append(('%x' % int(hex_str[x:(x + 4)], 16))) hextets = self._compress_hextets(hextets) return ':'.join(hextets)
'Expand a shortened IPv6 address. Args: ip_str: A string, the IPv6 address. Returns: A string, the expanded IPv6 address.'
def _explode_shorthand_ip_string(self):
if isinstance(self, _BaseNet): ip_str = str(self.ip) else: ip_str = str(self) ip_int = self._ip_int_from_string(ip_str) parts = [] for i in xrange(self._HEXTET_COUNT): parts.append(('%04x' % (ip_int & 65535))) ip_int >>= 16 parts.reverse() if isinstance(self, _BaseNet): return ('%s/%d' % (':'.join(parts), self.prefixlen)) return ':'.join(parts)
'The binary representation of this address.'
@property def packed(self):
return v6_int_to_packed(self._ip)
'Test if the address is reserved for multicast use. Returns: A boolean, True if the address is a multicast address. See RFC 2373 2.7 for details.'
@property def is_multicast(self):
return (self in IPv6Network('ff00::/8'))
'Test if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within one of the reserved IPv6 Network ranges.'
@property def is_reserved(self):
return ((self in IPv6Network('::/8')) or (self in IPv6Network('100::/8')) or (self in IPv6Network('200::/7')) or (self in IPv6Network('400::/6')) or (self in IPv6Network('800::/5')) or (self in IPv6Network('1000::/4')) or (self in IPv6Network('4000::/3')) or (self in IPv6Network('6000::/3')) or (self in IPv6Network('8000::/3')) or (self in IPv6Network('A000::/3')) or (self in IPv6Network('C000::/3')) or (self in IPv6Network('E000::/4')) or (self in IPv6Network('F000::/5')) or (self in IPv6Network('F800::/6')) or (self in IPv6Network('FE00::/9')))
'Test if the address is unspecified. Returns: A boolean, True if this is the unspecified address as defined in RFC 2373 2.5.2.'
@property def is_unspecified(self):
return ((self._ip == 0) and (getattr(self, '_prefixlen', 128) == 128))
'Test if the address is a loopback address. Returns: A boolean, True if the address is a loopback address as defined in RFC 2373 2.5.3.'
@property def is_loopback(self):
return ((self._ip == 1) and (getattr(self, '_prefixlen', 128) == 128))
'Test if the address is reserved for link-local. Returns: A boolean, True if the address is reserved per RFC 4291.'
@property def is_link_local(self):
return (self in IPv6Network('fe80::/10'))
'Test if the address is reserved for site-local. Note that the site-local address space has been deprecated by RFC 3879. Use is_private to test if this address is in the space of unique local addresses as defined by RFC 4193. Returns: A boolean, True if the address is reserved per RFC 3513 2.5.6.'
@property def is_site_local(self):
return (self in IPv6Network('fec0::/10'))
'Test if this address is allocated for private networks. Returns: A boolean, True if the address is reserved per RFC 4193.'
@property def is_private(self):
return (self in IPv6Network('fc00::/7'))
'Return the IPv4 mapped address. Returns: If the IPv6 address is a v4 mapped address, return the IPv4 mapped address. Return None otherwise.'
@property def ipv4_mapped(self):
if ((self._ip >> 32) != 65535): return None return IPv4Address((self._ip & 4294967295))
'Tuple of embedded teredo IPs. Returns: Tuple of the (server, client) IPs or None if the address doesn\'t appear to be a teredo address (doesn\'t start with 2001::/32)'
@property def teredo(self):
if ((self._ip >> 96) != 536936448): return None return (IPv4Address(((self._ip >> 64) & 4294967295)), IPv4Address(((~ self._ip) & 4294967295)))
'Return the IPv4 6to4 embedded address. Returns: The IPv4 6to4-embedded address if present or None if the address doesn\'t appear to contain a 6to4 embedded address.'
@property def sixtofour(self):
if ((self._ip >> 112) != 8194): return None return IPv4Address(((self._ip >> 80) & 4294967295))
'Instantiate a new IPv6 address object. Args: address: A string or integer representing the IP Additionally, an integer can be passed, so IPv6Address(\'2001:4860::\') == IPv6Address(42541956101370907050197289607612071936L). or, more generally IPv6Address(IPv6Address(\'2001:4860::\')._ip) == IPv6Address(\'2001:4860::\') Raises: AddressValueError: If address isn\'t a valid IPv6 address.'
def __init__(self, address):
_BaseV6.__init__(self, address) if isinstance(address, (int, long)): self._ip = address if ((address < 0) or (address > self._ALL_ONES)): raise AddressValueError(address) return if isinstance(address, Bytes): try: (hi, lo) = struct.unpack('!QQ', address) except struct.error: raise AddressValueError(address) self._ip = ((hi << 64) | lo) return addr_str = str(address) if (not addr_str): raise AddressValueError('') self._ip = self._ip_int_from_string(addr_str)
'Instantiate a new IPv6 Network object. Args: address: A string or integer representing the IPv6 network or the IP and prefix/netmask. \'2001:4860::/128\' \'2001:4860:0000:0000:0000:0000:0000:0000/128\' \'2001:4860::\' are all functionally the same in IPv6. That is to say, failing to provide a subnetmask will create an object with a mask of /128. Additionally, an integer can be passed, so IPv6Network(\'2001:4860::\') == IPv6Network(42541956101370907050197289607612071936L). or, more generally IPv6Network(IPv6Network(\'2001:4860::\')._ip) == IPv6Network(\'2001:4860::\') strict: A boolean. If true, ensure that we have been passed A true network address, eg, 192.168.1.0/24 and not an IP address on a network, eg, 192.168.1.1/24. Raises: AddressValueError: If address isn\'t a valid IPv6 address. NetmaskValueError: If the netmask isn\'t valid for an IPv6 address. ValueError: If strict was True and a network address was not supplied.'
def __init__(self, address, strict=False):
_BaseNet.__init__(self, address) _BaseV6.__init__(self, address) if isinstance(address, (int, long, Bytes)): self.ip = IPv6Address(address) self._ip = self.ip._ip self._prefixlen = self._max_prefixlen self.netmask = IPv6Address(self._ALL_ONES) return addr = str(address).split('/') if (len(addr) > 2): raise AddressValueError(address) self._ip = self._ip_int_from_string(addr[0]) self.ip = IPv6Address(self._ip) if (len(addr) == 2): if self._is_valid_netmask(addr[1]): self._prefixlen = int(addr[1]) else: raise NetmaskValueError(addr[1]) else: self._prefixlen = self._max_prefixlen self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen)) if strict: if (self.ip != self.network): raise ValueError(('%s has host bits set' % self.ip)) if (self._prefixlen == (self._max_prefixlen - 1)): self.iterhosts = self.__iter__
'Verify that the netmask/prefixlen is valid. Args: prefixlen: A string, the netmask in prefix length format. Returns: A boolean, True if the prefix represents a valid IPv6 netmask.'
def _is_valid_netmask(self, prefixlen):
try: prefixlen = int(prefixlen) except ValueError: return False return (0 <= prefixlen <= self._max_prefixlen)
'Reverse find needle from start'
def rfind(self, needle, start):
pos = self._read(((self._size - start) - 1), start).rfind(needle) if (pos == (-1)): return pos return (start + pos)
'Size of file'
def size(self):
return self._size
'Close file'
def close(self):
self._handle.close()
'Created a Decoder for a MaxMind DB Arguments: database_buffer -- an mmap\'d MaxMind DB file. pointer_base -- the base number to use when decoding a pointer pointer_test -- used for internal unit testing of pointer code'
def __init__(self, database_buffer, pointer_base=0, pointer_test=False):
self._pointer_test = pointer_test self._buffer = database_buffer self._pointer_base = pointer_base
'Decode a section of the data section starting at offset Arguments: offset -- the location of the data structure to decode'
def decode(self, offset):
new_offset = (offset + 1) (ctrl_byte,) = struct.unpack('!B', self._buffer[offset:new_offset]) type_num = (ctrl_byte >> 5) if (not type_num): (type_num, new_offset) = self._read_extended(new_offset) if (not (type_num in self._type_decoder)): raise InvalidDatabaseError(u'Unexpected type number ({type}) encountered'.format(type=type_num)) (size, new_offset) = self._size_from_ctrl_byte(ctrl_byte, new_offset, type_num) return self._type_decoder[type_num](self, size, new_offset)
'Reader for the MaxMind DB file format Arguments: database -- A path to a valid MaxMind DB file such as a GeoIP2 database file. mode -- mode to open the database with. Valid mode are: * MODE_MMAP - read from memory map. * MODE_FILE - read database as standard file. * MODE_MEMORY - load database into memory. * MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default.'
def __init__(self, database, mode=MODE_AUTO):
if (((mode == MODE_AUTO) and mmap) or (mode == MODE_MMAP)): with open(database, u'rb') as db_file: self._buffer = mmap.mmap(db_file.fileno(), 0, access=mmap.ACCESS_READ) self._buffer_size = self._buffer.size() elif (mode in (MODE_AUTO, MODE_FILE)): self._buffer = FileBuffer(database) self._buffer_size = self._buffer.size() elif (mode == MODE_MEMORY): with open(database, u'rb') as db_file: self._buffer = db_file.read() self._buffer_size = len(self._buffer) else: raise ValueError(u'Unsupported open mode ({0}). Only MODE_AUTO, MODE_FILE, and MODE_MEMORY are support by the pure Python Reader'.format(mode)) metadata_start = self._buffer.rfind(self._METADATA_START_MARKER, max(0, (self._buffer_size - (128 * 1024)))) if (metadata_start == (-1)): self.close() raise InvalidDatabaseError(u'Error opening database file ({0}). Is this a valid MaxMind DB file?'.format(database)) metadata_start += len(self._METADATA_START_MARKER) metadata_decoder = Decoder(self._buffer, metadata_start) (metadata, _) = metadata_decoder.decode(metadata_start) self._metadata = Metadata(**metadata) self._decoder = Decoder(self._buffer, (self._metadata.search_tree_size + self._DATA_SECTION_SEPARATOR_SIZE))
'Return the metadata associated with the MaxMind DB file'
def metadata(self):
return self._metadata
'Return the record for the ip_address in the MaxMind DB Arguments: ip_address -- an IP address in the standard string notation'
def get(self, ip_address):
address = ipaddress.ip_address(ip_address) if ((address.version == 6) and (self._metadata.ip_version == 4)): raise ValueError(u'Error looking up {0}. You attempted to look up an IPv6 address in an IPv4-only database.'.format(ip_address)) pointer = self._find_address_in_tree(address) return (self._resolve_data_pointer(pointer) if pointer else None)
'Closes the MaxMind DB file and returns the resources to the system'
def close(self):
if (type(self._buffer) not in (str, bytes)): self._buffer.close()
'Creates new Metadata object. kwargs are key/value pairs from spec'
def __init__(self, **kwargs):
self.node_count = kwargs[u'node_count'] self.record_size = kwargs[u'record_size'] self.ip_version = kwargs[u'ip_version'] self.database_type = kwargs[u'database_type'] self.languages = kwargs[u'languages'] self.binary_format_major_version = kwargs[u'binary_format_major_version'] self.binary_format_minor_version = kwargs[u'binary_format_minor_version'] self.build_epoch = kwargs[u'build_epoch'] self.description = kwargs[u'description']
'The size of a node in bytes'
@property def node_byte_size(self):
return (self.record_size // 4)
'The size of the search tree'
@property def search_tree_size(self):
return (self.node_count * self.node_byte_size)
''
def __init__(self, layer, tileset):
sethref = urljoin(layer.config.dirpath, tileset) (scheme, h, path, q, p, f) = urlparse(sethref) if (scheme not in ('file', '')): raise Exception(('Bad scheme in MBTiles provider, must be local file: "%s"' % scheme)) self.tileset = path self.layer = layer
'Convert configured parameters to keyword args for __init__().'
@staticmethod def prepareKeywordArgs(config_dict):
return {'tileset': config_dict['tileset']}
'Retrieve a single tile, return a TileResponse instance.'
def renderTile(self, width, height, srs, coord):
(mime_type, content) = get_tile(self.tileset, coord) formats = {'image/png': 'PNG', 'image/jpeg': 'JPEG', 'application/json': 'JSON', None: None} return TileResponse(formats[mime_type], content)
'Get mime-type and format by file extension. This only accepts "png" or "jpg" or "json".'
def getTypeByExtension(self, extension):
if (extension.lower() == 'json'): return ('application/json', 'JSON') elif (extension.lower() == 'png'): return ('image/png', 'PNG') elif (extension.lower() == 'jpg'): return ('image/jpg', 'JPEG') else: raise KnownUnknown(('MBTiles only makes .png and .jpg and .json tiles, not "%s"' % extension))
''
def __init__(self, filename, format, name):
self.filename = filename if (not tileset_exists(filename)): create_tileset(filename, name, 'baselayer', '0', '', format.lower())
'Remove a cached tile.'
def remove(self, layer, coord, format):
delete_tile(self.filename, coord)
'Return raw tile content from tileset.'
def read(self, layer, coord, format):
return get_tile(self.filename, coord)[1]
'Write raw tile content to tileset.'
def save(self, body, layer, coord, format):
put_tile(self.filename, coord, body)
'Acquire a cache lock for this tile. Returns nothing, but blocks until the lock has been acquired.'
def lock(self, layer, coord, format):
key = (tile_key(layer, coord, format, self.key_prefix) + '-lock') due = (_time() + layer.stale_lock_timeout) while (_time() < due): if self.conn.setnx(key, 'locked.'): return _sleep(0.2) self.conn.set(key, 'locked.') return
'Release a cache lock for this tile.'
def unlock(self, layer, coord, format):
key = tile_key(layer, coord, format, self.key_prefix) self.conn.delete((key + '-lock'))
'Remove a cached tile.'
def remove(self, layer, coord, format):
key = tile_key(layer, coord, format, self.key_prefix) self.conn.delete(key)
'Read a cached tile.'
def read(self, layer, coord, format):
key = tile_key(layer, coord, format, self.key_prefix) value = self.conn.get(key) return value
'Save a cached tile.'
def save(self, body, layer, coord, format):
key = tile_key(layer, coord, format, self.key_prefix) cache_lifespan = layer.cache_lifespan if (cache_lifespan == 0): cache_lifespan = None self.conn.set(key, body, ex=cache_lifespan)
'Handle a request, using PATH_INFO and QUERY_STRING from environ. There are six required query string parameters: width, height, xmin, ymin, xmax and ymax. Layer name must be supplied in PATH_INFO.'
def __call__(self, environ, start_response):
try: for var in 'QUERY_STRING PATH_INFO'.split(): if (var not in environ): raise KnownUnknown(('Missing "%s" environment variable' % var)) query = dict(parse_qsl(environ['QUERY_STRING'])) for param in 'width height xmin ymin xmax ymax'.split(): if (param not in query): raise KnownUnknown(('Missing "%s" parameter' % param)) layer = environ['PATH_INFO'].strip('/') layer = self.config.layers[layer] provider = layer.provider if (not hasattr(provider, 'renderArea')): raise KnownUnknown(('Layer "%s" provider %s has no renderArea() method' % (layer.name(), provider.__class__))) (width, height) = [int(query[p]) for p in 'width height'.split()] (xmin, ymin, xmax, ymax) = [float(query[p]) for p in 'xmin ymin xmax ymax'.split()] output = StringIO() image = provider.renderArea(width, height, None, xmin, ymin, xmax, ymax, None) image.save(output, format='PNG') headers = [('Content-Type', 'image/png')] if layer.allowed_origin: headers.append(('Access-Control-Allow-Origin', layer.allowed_origin)) if (layer.max_cache_age is not None): expires = (datetime.utcnow() + timedelta(seconds=layer.max_cache_age)) headers.append(('Expires', expires.strftime('%a %d %b %Y %H:%M:%S GMT'))) headers.append(('Cache-Control', ('public, max-age=%d' % layer.max_cache_age))) start_response('200 OK', headers) return output.getvalue() except KnownUnknown as e: start_response('400 Bad Request', [('Content-Type', 'text/plain')]) return str(e)
''
def _filepath(self, layer, coord, format):
l = layer.name() z = ('%d' % coord.zoom) e = format.lower() x = ('%06d' % coord.column) y = ('%06d' % coord.row) (x1, x2) = (x[:3], x[3:]) (y1, y2) = (y[:3], y[3:]) filepath = os.sep.join((l, z, x1, x2, y1, ((y2 + '.') + e))) return filepath
'Acquire a cache lock for this tile. Returns nothing, but (TODO) blocks until the lock has been acquired. Lock is implemented as a row in the "locks" table.'
def lock(self, layer, coord, format):
sys.stderr.write(('lock %d/%d/%d, %s' % (coord.zoom, coord.column, coord.row, format))) due = (time.time() + layer.stale_lock_timeout) while True: if (time.time() > due): sys.stderr.write(('...force %d/%d/%d, %s' % (coord.zoom, coord.column, coord.row, format))) self.unlock(layer, coord, format) db = connect(self.dbpath, isolation_level='EXCLUSIVE').cursor() try: db.execute('INSERT INTO locks\n (row, column, zoom, format)\n VALUES (?, ?, ?, ?)', (coord.row, coord.column, coord.zoom, format)) except IntegrityError: db.connection.close() time.sleep(0.2) continue else: db.connection.commit() db.connection.close() break
'Release a cache lock for this tile. Lock is implemented as a row in the "locks" table.'
def unlock(self, layer, coord, format):
sys.stderr.write(('unlock %d/%d/%d, %s' % (coord.zoom, coord.column, coord.row, format))) db = connect(self.dbpath, isolation_level='EXCLUSIVE').cursor() db.execute('DELETE FROM locks\n WHERE row=? AND column=? AND zoom=? AND format=?', (coord.row, coord.column, coord.zoom, format)) db.connection.commit() db.connection.close()
'Remove a cached tile.'
def remove(self, layer, coord, format):
raise NotImplementedError('LimitedDisk Cache does not yet implement the .remove() method.')
'Read a cached tile. If found, update the used column in the tiles table with current time.'
def read(self, layer, coord, format):
sys.stderr.write(('read %d/%d/%d, %s' % (coord.zoom, coord.column, coord.row, format))) path = self._filepath(layer, coord, format) fullpath = pathjoin(self.cachepath, path) if exists(fullpath): body = open(fullpath, 'r').read() sys.stderr.write(('...hit %s, set used=%d' % (path, time.time()))) db = connect(self.dbpath).cursor() db.execute('UPDATE tiles\n SET used=?\n WHERE path=?', (int(time.time()), path)) db.connection.commit() db.connection.close() else: sys.stderr.write('...miss') body = None return body
'Actually write the file to the cache directory, return its size. If filesystem block size is known, try to return actual disk space used.'
def _write(self, body, path, format):
fullpath = pathjoin(self.cachepath, path) try: umask_old = os.umask(self.umask) os.makedirs(dirname(fullpath), (511 & (~ self.umask))) except OSError as e: if (e.errno != 17): raise finally: os.umask(umask_old) (fh, tmp_path) = mkstemp(dir=self.cachepath, suffix=('.' + format.lower())) os.write(fh, body) os.close(fh) try: os.rename(tmp_path, fullpath) except OSError: os.unlink(fullpath) os.rename(tmp_path, fullpath) os.chmod(fullpath, (438 & (~ self.umask))) stat = os.stat(fullpath) size = stat.st_size if hasattr(stat, 'st_blksize'): blocks = _ceil((size / float(stat.st_blksize))) size = int((blocks * stat.st_blksize)) return size
''
def _remove(self, path):
fullpath = pathjoin(self.cachepath, path) os.unlink(fullpath)
''
def save(self, body, layer, coord, format):
sys.stderr.write(('save %d/%d/%d, %s' % (coord.zoom, coord.column, coord.row, format))) path = self._filepath(layer, coord, format) size = self._write(body, path, format) db = connect(self.dbpath).cursor() try: db.execute('INSERT INTO tiles\n (size, used, path)\n VALUES (?, ?, ?)', (size, int(time.time()), path)) except IntegrityError: db.execute('UPDATE tiles\n SET size=?, used=?\n WHERE path=?', (size, int(time.time()), path)) row = db.execute('SELECT SUM(size) FROM tiles').fetchone() if (row and (row[0] > self.limit)): over = (row[0] - self.limit) while (over > 0): row = db.execute('SELECT path, size FROM tiles ORDER BY used ASC LIMIT 1').fetchone() if (row is None): break (path, size) = row db.execute('DELETE FROM tiles WHERE path=?', (path,)) self._remove(path) over -= size sys.stderr.write(('delete ' + path)) db.connection.commit() db.connection.close()
'Acquire a cache lock for this tile. Returns nothing, but blocks until the lock has been acquired.'
def lock(self, layer, coord, format):
key_name = tile_key(layer, coord, format) due = (time() + layer.stale_lock_timeout) while (time() < due): if (not self.bucket.get_key((key_name + '-lock'))): break _sleep(0.2) key = self.bucket.new_key((key_name + '-lock')) key.set_contents_from_string('locked.', {'Content-Type': 'text/plain'})
'Release a cache lock for this tile.'
def unlock(self, layer, coord, format):
key_name = tile_key(layer, coord, format) try: self.bucket.delete_key((key_name + '-lock')) except: pass
'Remove a cached tile.'
def remove(self, layer, coord, format):
key_name = tile_key(layer, coord, format) self.bucket.delete_key(key_name)
'Read a cached tile.'
def read(self, layer, coord, format):
key_name = tile_key(layer, coord, format) key = self.bucket.get_key(key_name) if (key is None): return None if layer.cache_lifespan: t = timegm(strptime(key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')) if ((time() - t) > layer.cache_lifespan): return None return key.get_contents_as_string()
'Save a cached tile.'
def save(self, body, layer, coord, format):
key_name = tile_key(layer, coord, format) key = self.bucket.new_key(key_name) (content_type, encoding) = guess_type(('example.' + format)) headers = ((content_type and {'Content-Type': content_type}) or {}) key.set_contents_from_string(body, headers, policy='public-read')
'Creates a new instance with the projection specified in srs, which is in Proj4 format.'
def __init__(self, srs, resolutions, tile_size=256, transformation=Transformation(1, 0, 0, 0, 1, 0)):
self.resolutions = resolutions self.tile_size = tile_size self.proj = Proj(srs) self.srs = srs self.tile_dimensions = [(self.tile_size * r) for r in self.resolutions] try: self.base_zoom = self.resolutions.index(1.0) except ValueError: raise TileStache.Core.KnownUnknown('No zoom level with resolution 1.0') LinearProjection.__init__(self, self.base_zoom, transformation)
'TODO: write me.'
def coordinateLocation(self, coord):
raise NotImplementedError('Missing Proj4Projection.coordinateLocation(), see https://github.com/migurski/TileStache/pull/127')
'Convert from Coordinate object to a Point object in the defined projection'
def coordinateProj(self, coord):
if (coord.zoom >= len(self.tile_dimensions)): raise TileStache.Core.KnownUnknown(('Requested zoom level %d outside defined resolutions.' % coord.zoom)) p = self.unproject(Point(coord.column, coord.row), (1.0 / self.tile_dimensions[coord.zoom])) return p
'Convert from Location object to a Point object in the defined projection'
def locationProj(self, location):
(x, y) = self.proj(location.lon, location.lat) return Point(x, y)
'Convert from Point object in the defined projection to a Coordinate object'
def projCoordinate(self, point, zoom=None):
if (zoom == None): zoom = self.base_zoom if (zoom >= len(self.tile_dimensions)): raise TileStache.Core.KnownUnknown(('Requested zoom level %d outside defined resolutions.' % zoom)) td = self.tile_dimensions[zoom] p = self.project(point, (1.0 / td)) row = round(p.y) col = round(p.x) if ((abs((p.y - row)) > _grid_threshold) or (abs((p.x - col)) > _grid_threshold)): raise TileStache.Core.KnownUnknown(((('Point(%f, %f) does not align with grid ' + 'for zoom level %d ') + '(resolution=%f, difference: %f, %f).') % (point.x, point.y, zoom, self.resolutions[zoom], (p.y - row), (p.x - col)))) c = Coordinate(int(row), int(col), zoom) return c
'Convert from Point object in the defined projection to a Location object'
def projLocation(self, point):
(x, y) = self.proj(point.x, point.y, inverse=True) return Location(y, x)
''
def __init__(self, layer, dbinfo, queries, clip=True, srid=900913, simplify=1.0, simplify_until=16, padding=0):
self.layer = layer keys = ('host', 'user', 'password', 'database', 'port', 'dbname') self.dbinfo = dict([(k, v) for (k, v) in dbinfo.items() if (k in keys)]) self.clip = bool(clip) self.srid = int(srid) self.simplify = float(simplify) self.simplify_until = int(simplify_until) self.padding = int(padding) self.columns = {} if isinstance(queries, dict): n_zooms = (max((int(z) for z in queries)) + 1) queryiter = ((int(z), q) for (z, q) in queries.iteritems()) else: n_zooms = len(queries) queryiter = enumerate(queries) self.queries = ([None] * n_zooms) for (z, query) in queryiter: if (query is None): continue url = urljoin(layer.config.dirpath, query) (scheme, h, path, p, q, f) = urlparse(url) if ((scheme in ('file', '')) and exists(path)): query = open(path).read() elif ((scheme == 'http') and (' ' not in url)): query = urlopen(url).read() self.queries[z] = query
'Render a single tile, return a Response instance.'
def renderTile(self, width, height, srs, coord):
try: query = self.queries[coord.zoom] except IndexError: query = self.queries[(-1)] ll = self.layer.projection.coordinateProj(coord.down()) ur = self.layer.projection.coordinateProj(coord.right()) bounds = (ll.x, ll.y, ur.x, ur.y) if (not query): return EmptyResponse(bounds) if (query not in self.columns): self.columns[query] = query_columns(self.dbinfo, self.srid, query, bounds) if (not self.columns[query]): return EmptyResponse(bounds) tolerance = ((self.simplify * tolerances[coord.zoom]) if (coord.zoom < self.simplify_until) else None) return Response(self.dbinfo, self.srid, query, self.columns[query], bounds, tolerance, coord.zoom, self.clip, coord, self.layer.name(), self.padding)
'Get mime-type and format by file extension, one of "mvt", "json", "topojson" or "pbf".'
def getTypeByExtension(self, extension):
if (extension.lower() == 'mvt'): return ('application/octet-stream+mvt', 'MVT') elif (extension.lower() == 'json'): return ('application/json', 'JSON') elif (extension.lower() == 'topojson'): return ('application/json', 'TopoJSON') elif (extension.lower() == 'pbf'): return ('application/x-protobuf', 'PBF') else: raise ValueError(extension)
'Render a single tile, return a Response instance.'
def renderTile(self, width, height, srs, coord):
return MultiResponse(self.layer.config, self.names, coord)
'Get mime-type and format by file extension, "json", "topojson" or "pbf" only.'
def getTypeByExtension(self, extension):
if (extension.lower() == 'json'): return ('application/json', 'JSON') elif (extension.lower() == 'topojson'): return ('application/json', 'TopoJSON') elif (extension.lower() == 'pbf'): return ('application/x-protobuf', 'PBF') else: raise ValueError(extension)
'Create a new response object with Postgres connection info and a query. bounds argument is a 4-tuple with (xmin, ymin, xmax, ymax).'
def __init__(self, dbinfo, srid, subquery, columns, bounds, tolerance, zoom, clip, coord, layer_name='', padding=0):
self.dbinfo = dbinfo self.bounds = bounds self.zoom = zoom self.clip = clip self.coord = coord self.layer_name = layer_name self.padding = padding tol_idx = (coord.zoom if (0 <= coord.zoom < len(tolerances)) else (-1)) tol_val = tolerances[tol_idx] self.padding = (self.padding * tol_val) geo_query = build_query(srid, subquery, columns, bounds, tolerance, True, clip, self.padding) merc_query = build_query(srid, subquery, columns, bounds, tolerance, False, clip, self.padding) pbf_query = build_query(srid, subquery, columns, bounds, tolerance, False, clip, self.padding, pbf.extents) self.query = dict(TopoJSON=geo_query, JSON=geo_query, MVT=merc_query, PBF=pbf_query)
''
def save(self, out, format):
with Connection(self.dbinfo) as db: db.execute(self.query[format]) features = [] for row in db.fetchall(): if (row['__geometry__'] is None): continue wkb = bytes(row['__geometry__']) prop = dict([(k, v) for (k, v) in row.items() if (k not in ('__geometry__', '__id__'))]) if ('__id__' in row): features.append((wkb, prop, row['__id__'])) else: features.append((wkb, prop)) if (format == 'MVT'): mvt.encode(out, features) elif (format == 'JSON'): geojson.encode(out, features, self.zoom, self.clip) elif (format == 'TopoJSON'): ll = SphericalMercator().projLocation(Point(*self.bounds[0:2])) ur = SphericalMercator().projLocation(Point(*self.bounds[2:4])) topojson.encode(out, features, (ll.lon, ll.lat, ur.lon, ur.lat), self.clip) elif (format == 'PBF'): pbf.encode(out, features, self.coord, layer_name=self.layer_name) else: raise ValueError(format)
''
def save(self, out, format):
if (format == 'MVT'): mvt.encode(out, []) elif (format == 'JSON'): geojson.encode(out, [], 0, False) elif (format == 'TopoJSON'): ll = SphericalMercator().projLocation(Point(*self.bounds[0:2])) ur = SphericalMercator().projLocation(Point(*self.bounds[2:4])) topojson.encode(out, [], (ll.lon, ll.lat, ur.lon, ur.lat), False) elif (format == 'PBF'): pbf.encode(out, [], None, self.bounds) else: raise ValueError(format)
'Create a new response object with TileStache config and layer names.'
def __init__(self, config, names, coord):
self.config = config self.names = names self.coord = coord
''
def save(self, out, format):
if (format == 'TopoJSON'): topojson.merge(out, self.names, self.config, self.coord) elif (format == 'JSON'): geojson.merge(out, self.names, self.config, self.coord) elif (format == 'PBF'): feature_layers = [] layers = [self.config.layers[name] for name in self.names] for layer in layers: (width, height) = (layer.dim, layer.dim) tile = layer.provider.renderTile(width, height, layer.projection.srs, self.coord) if isinstance(tile, EmptyResponse): continue feature_layers.append({'name': layer.name(), 'features': get_features(tile.dbinfo, tile.query['PBF'])}) pbf.merge(out, feature_layers, self.coord) else: raise ValueError(format)
'Make a new Datasource. Parameters: template: Required URL template with placeholders for tile zoom, x and y, e.g. "http://example.com/layer/{z}/{x}/{y}.json". sort_key: Optional field name to use when sorting features for rendering. E.g. "name" or "name ascending" to sort ascending by name, "name descending" to sort descending by name. clipped: Optional boolean flag to determine correct behavior for duplicate geometries. When tile data is not clipped, features() will check geometry uniqueness and throw out duplicates. Setting clipped to false for actually-clipped geometries has no effect but wastes time. Setting clipped to false for unclipped geometries will result in possibly wrong-looking output. Default is "true". zoom_data: Optional keyword specifying single or double zoom data tiles. Works especially well with relatively sparse label layers. When set to "double", tiles will be requested at one zoom level out from the map view, e.g. double-sized z13 tiles will be used to render a normal z14 map. Default is "single".'
def __init__(self, template, sort_key=None, clipped='true', zoom_data='single'):
(scheme, host, path, p, query, f) = urlparse(template) self.host = host self.port = (443 if (scheme == 'https') else 80) if (':' in host): self.host = host.split(':', 1)[0] self.port = int(host.split(':', 1)[1]) self.path = ((path + ('?' if query else '')) + query) self.path = self.path.replace('%', '%%') self.path = self.path.replace('{Z}', '{z}').replace('{z}', '%(z)d') self.path = self.path.replace('{X}', '{x}').replace('{x}', '%(x)d') self.path = self.path.replace('{Y}', '{y}').replace('{y}', '%(y)d') if (sort_key is None): (self.sort, self.reverse) = (None, None) elif sort_key.lower().endswith(' descending'): logging.debug(('Will sort by %s descending' % sort_key)) (self.sort, self.reverse) = (sort_key.split()[0], True) else: logging.debug(('Will sort by %s ascending' % sort_key)) (self.sort, self.reverse) = (sort_key.split()[0], False) self.clipped = (clipped.lower() not in ('false', 'no', '0')) self.zoom_adjust = {'double': 1}.get(zoom_data.lower(), 0) bbox = Box2d(((- diameter) / 2), ((- diameter) / 2), (diameter / 2), (diameter / 2)) PythonDatasource.__init__(self, envelope=bbox)
''
def features(self, query):
logging.debug(('Rendering %s' % str(query.bbox))) tiles = list_tiles(query, self.zoom_adjust) features = [] seen = set() for (wkb, props) in load_features(8, self.host, self.port, self.path, tiles): if (not self.clipped): key = (wkb, tuple(sorted(props.items()))) if (key in seen): continue seen.add(key) features.append((wkb, utf8_keys(props))) if self.sort: logging.debug(('Sorting by %s %s' % (self.sort, ('descending' if self.reverse else 'ascending')))) key_func = (lambda wkb_props: wkb_props[1].get(self.sort, None)) features.sort(reverse=self.reverse, key=key_func) if (len(features) == 0): return PythonDatasource.wkb_features(keys=[], features=[]) props = zip(*features)[1] keys = [set(prop.keys()) for prop in props] keys = reduce((lambda a, b: (a & b)), keys) return PythonDatasource.wkb_features(keys=keys, features=features)
'Get mime-type and format by file extension. This only accepts "json".'
def getTypeByExtension(self, extension):
if (extension.lower() != 'json'): raise KnownUnknown(('PostGeoJSON only makes .json tiles, not "%s"' % extension)) return ('application/json', 'JSON')
'Render a single tile, return a SaveableResponse instance.'
def renderTile(self, width, height, srs, coord):
(minx, miny, maxx, maxy) = self.layer.envelope(coord) y = (miny + ((maxy - miny) / 2)) x = (minx + ((maxx - minx) / 2)) (sw_lat, sw_lon) = self.unproject(minx, miny) (ne_lat, ne_lon) = self.unproject(maxx, maxy) (center_lat, center_lon) = self.unproject(x, y) bbox = ('%s:[%s TO %s] AND %s:[%s TO %s]' % (self.lon_field, sw_lon, ne_lon, self.lat_field, sw_lat, ne_lat)) query = bbox if self.solr_radius: query = ('{!%s lat=%s long=%s radius=%s calc=arc unit=km}%s' % (self.query_parser, center_lat, center_lon, self.solr_radius, bbox)) kwargs = {} if (self.query != '*:*'): kwargs['fq'] = self.query kwargs['omitHeader'] = 'true' rsp_fields = [] if self.solr_fields: rsp_fields = self.solr_fields.split(',') if (not (self.lat_field in rsp_fields)): rsp_fields.append(self.lat_field) if (not (self.lon_field in rsp_fields)): rsp_fields.append(self.lon_field) kwargs['fl'] = ','.join(rsp_fields) response = {'type': 'FeatureCollection', 'features': []} total = None start = 0 rows = 1000 while ((not total) or (start < total)): kwargs['start'] = start kwargs['rows'] = rows rsp = self.solr.search(query, **kwargs) if (not total): total = rsp.hits if (total == 0): break for row in rsp: if len(rsp_fields): for (key, ignore) in row.items(): if (not (key in rsp_fields)): del row[key] row['geometry'] = {'type': 'Point', 'coordinates': (row[self.lon_field], row[self.lat_field])} del row[self.lat_field] del row[self.lon_field] if (self.id_field != ''): row['id'] = row[self.id_field] response['features'].append(row) start += rows return SaveableResponse(response)
''
def __init__(self, layer, mapfile, fields, layer_index=0, wrapper=None, scale=4, buffer=0):
self.mapnik = None self.layer = layer maphref = urljoin(layer.config.dirpath, mapfile) (scheme, h, path, q, p, f) = urlparse(maphref) if (scheme in ('file', '')): self.mapfile = path else: self.mapfile = maphref self.layer_index = layer_index self.wrapper = wrapper self.scale = scale self.buffer = buffer self.fields = list((str(x) for x in fields)) self.mercator = getProjectionByName('spherical mercator')
''
def renderTile(self, width, height, srs, coord):
if (self.mapnik is None): self.mapnik = get_mapnikMap(self.mapfile) buffer = (float(self.buffer) / 256) nw = self.layer.projection.coordinateLocation(coord.left(buffer).up(buffer)) se = self.layer.projection.coordinateLocation(coord.right((1 + buffer)).down((1 + buffer))) ul = self.mercator.locationProj(nw) lr = self.mercator.locationProj(se) self.mapnik.width = (width + (2 * self.buffer)) self.mapnik.height = (height + (2 * self.buffer)) self.mapnik.zoom_to_box(mapnik.Box2d(ul.x, ul.y, lr.x, lr.y)) grid = mapnik.Grid((width + (2 * self.buffer)), (height + (2 * self.buffer))) mapnik.render_layer(self.mapnik, grid, layer=self.layer_index, fields=self.fields) grid_view = grid.view(self.buffer, self.buffer, width, height) grid_utf = grid_view.encode('utf', resolution=self.scale, add_features=True) if (self.wrapper is None): return SaveableResponse(json.dumps(grid_utf)) else: return SaveableResponse((((self.wrapper + '(') + json.dumps(grid_utf)) + ')'))
'Get mime-type and format by file extension. This only accepts "json".'
def getTypeByExtension(self, extension):
if (extension.lower() != 'json'): raise KnownUnknown(('MapnikGrid only makes .json tiles, not "%s"' % extension)) return ('application/json; charset=utf-8', 'JSON')
'Make a new Composite.Provider. Arguments: layer: The current TileStache.Core.Layer stack: A list or dictionary with configuration for the image stack, parsed by build_stack(). Also acceptable is a URL to a JSON file. stackfile: *Deprecated* filename for an XML representation of the image stack.'
def __init__(self, layer, stack=None, stackfile=None):
self.layer = layer if (type(stack) in (str, unicode)): stack = jsonload(urlopen(urljoin(layer.config.dirpath, stack)).read()) if (type(stack) in (list, dict)): self.stack = build_stack(stack) elif ((stack is None) and stackfile): stackfile = pathjoin(self.layer.config.dirpath, stackfile) stack = parseXML(stackfile).firstChild assert (stack.tagName == 'stack'), ('Expecting root element "stack" but got "%s"' % stack.tagName) self.stack = makeStack(stack) else: raise Exception(('Note sure what to do with this stack argument: %s' % repr(stack)))
'A new image layer. Arguments: layername: Name of the primary source image layer. colorname: Fill color, passed to make_color(). maskname: Name of the mask image layer.'
def __init__(self, layername=None, colorname=None, maskname=None, opacity=1.0, blendmode=None, adjustments=None, zoom=''):
self.layername = layername self.colorname = colorname self.maskname = maskname self.opacity = opacity self.blendmode = blendmode self.adjustments = adjustments zooms = (re.search('^(\\d+)-(\\d+)$|^(\\d+)$', zoom) if zoom else None) if zooms: (min_zoom, max_zoom, at_zoom) = zooms.groups() if ((min_zoom is not None) and (max_zoom is not None)): (self.min_zoom, self.max_zoom) = (int(min_zoom), int(max_zoom)) elif (at_zoom is not None): (self.min_zoom, self.max_zoom) = (int(at_zoom), int(at_zoom)) else: (self.min_zoom, self.max_zoom) = (0, float('inf'))
'Return true if the requested zoom level is valid for this layer.'
def in_zoom(self, zoom):
return ((self.min_zoom <= zoom) and (zoom <= self.max_zoom))
'Render this image layer. Given a configuration object, starting image, and coordinate, return an output image with the contents of this image layer.'
def render(self, config, input_rgba, coord):
(has_layer, has_color, has_mask) = (False, False, False) output_rgba = [chan.copy() for chan in input_rgba] if self.layername: layer = config.layers[self.layername] (mime, body) = TileStache.getTile(layer, coord, 'png') layer_img = Image.open(StringIO(body)).convert('RGBA') layer_rgba = _img2rgba(layer_img) has_layer = True if self.maskname: layer = config.layers[self.maskname] (mime, body) = TileStache.getTile(layer, coord, 'png') mask_img = Image.open(StringIO(body)).convert('L') mask_chan = (_img2arr(mask_img).astype(numpy.float32) / 255.0) has_mask = True if self.colorname: color = make_color(self.colorname) color_rgba = [(numpy.zeros(output_rgba[0].shape, numpy.float32) + (band / 255.0)) for band in color] has_color = True if has_layer: layer_rgba = apply_adjustments(layer_rgba, self.adjustments) if (has_layer and has_color and has_mask): raise KnownUnknown(("You can't specify src, color and mask together in a Composite Layer: %s, %s, %s" % (repr(self.layername), repr(self.colorname), repr(self.maskname)))) elif (has_layer and has_color): output_rgba = blend_images(output_rgba, color_rgba[:3], color_rgba[3], self.opacity, self.blendmode) output_rgba = blend_images(output_rgba, layer_rgba[:3], layer_rgba[3], self.opacity, self.blendmode) elif (has_layer and has_mask): layermask_chan = (layer_rgba[3] * mask_chan) output_rgba = blend_images(output_rgba, layer_rgba[:3], layermask_chan, self.opacity, self.blendmode) elif (has_color and has_mask): output_rgba = blend_images(output_rgba, color_rgba[:3], mask_chan, self.opacity, self.blendmode) elif has_layer: output_rgba = blend_images(output_rgba, layer_rgba[:3], layer_rgba[3], self.opacity, self.blendmode) elif has_color: output_rgba = blend_images(output_rgba, color_rgba[:3], color_rgba[3], self.opacity, self.blendmode) elif has_mask: raise KnownUnknown(('You have to provide more than just a mask to Composite Layer: %s' % repr(self.maskname))) else: raise KnownUnknown('You have to provide at least some combination of src, color and mask to Composite Layer') return output_rgba
'A new image stack. Argument: layers: List of Layer instances.'
def __init__(self, layers):
self.layers = layers
''
def in_zoom(self, level):
return True
'Render this image stack. Given a configuration object, starting image, and coordinate, return an output image with the results of all the layers in this stack pasted on in turn.'
def render(self, config, input_rgba, coord):
stack_rgba = [numpy.zeros(chan.shape, chan.dtype) for chan in input_rgba] for layer in self.layers: try: if layer.in_zoom(coord.zoom): stack_rgba = layer.render(config, stack_rgba, coord) except IOError: pass return blend_images(input_rgba, stack_rgba[:3], stack_rgba[3], 1, None)
'Get mime-type and format by file extension. This only accepts "json".'
def getTypeByExtension(self, extension):
if (extension.lower() != 'json'): raise KnownUnknown(('PostGeoJSON only makes .json tiles, not "%s"' % extension)) return ('application/json', 'JSON')
'Render a single tile, return a SaveableResponse instance.'
def renderTile(self, width, height, srs, coord):
nw = self.layer.projection.coordinateLocation(coord) se = self.layer.projection.coordinateLocation(coord.right().down()) ul = self.mercator.locationProj(nw) lr = self.mercator.locationProj(se) bbox = ('ST_SetSRID(ST_MakeBox2D(ST_MakePoint(%.6f, %.6f), ST_MakePoint(%.6f, %.6f)), 900913)' % (ul.x, ul.y, lr.x, lr.y)) clip = ((self.clipping and Polygon([(ul.x, ul.y), (lr.x, ul.y), (lr.x, lr.y), (ul.x, lr.y)])) or None) db = _connect(self.dbdsn).cursor(cursor_factory=RealDictCursor) db.execute(self.query.replace('!bbox!', bbox)) rows = db.fetchall() db.close() response = {'type': 'FeatureCollection', 'features': []} for row in rows: feature = row2feature(row, self.id_field, self.geometry_field) try: geom = shape2geometry(feature['geometry'], self.mercator, clip) except _InvisibleBike: pass else: feature['geometry'] = geom response['features'].append(feature) return SaveableResponse(response, self.indent, self.precision)
'Return a little thumbs-up / thumbs-down image with text in it.'
def do_I_have_to_draw_you_a_picture(self):
if self.success: (bytes, color) = (_thumbs_up_bytes, _thumbs_up_color) else: (bytes, color) = (_thumbs_down_bytes, _thumbs_down_color) thumb = Image.open(StringIO(bytes)) image = Image.new('RGB', (256, 256), color) image.paste(thumb.resize((128, 128)), (64, 80)) mapnik_url = ('http://tile.openstreetmap.org/%(zoom)d/%(column)d/%(row)d.png' % self.coord.__dict__) mapnik_img = Image.open(StringIO(urlopen(mapnik_url).read())) mapnik_img = mapnik_img.convert('L').convert('RGB') image = Image.blend(image, mapnik_img, 0.15) draw = ImageDraw(image) (margin, leading) = (8, 12) (x, y) = (margin, margin) for word in self.content.split(): (w, h) = draw.textsize(word) if ((x > margin) and ((x + w) > 250)): (x, y) = (margin, (y + leading)) draw.text((x, y), word, fill=(51, 51, 51)) x += draw.textsize((word + ' '))[0] return image
''
def __init__(self, layer, database, username, password=None, hostname=None, table_prefix='mirrorosm', api_base='http://open.mapquestapi.com/xapi/', osm2pgsql='osm2pgsql --utf8-sanitize'):
self.layer = layer self.dbkwargs = {'database': database} self.api_base = api_base self.prefix = table_prefix self.osm2pgsql = osm2pgsql if hostname: self.dbkwargs['host'] = hostname if username: self.dbkwargs['user'] = username if password: self.dbkwargs['password'] = password
'Get mime-type and format by file extension. This only accepts "txt".'
def getTypeByExtension(self, extension):
if (extension.lower() == 'txt'): return ('text/plain', 'TXT') elif (extension.lower() == 'png'): return ('image/png', 'PNG') else: raise KnownUnknown(('MirrorOSM only makes .txt and .png tiles, not "%s"' % extension))
'Render a single tile, return a ConfirmationResponse instance.'
def renderTile(self, width, height, srs, coord):
if (coord.zoom < 12): raise KnownUnknown(('MirrorOSM provider only handles data at zoom 12 or higher, not %d.' % coord.zoom)) start = time() garbage = [] (handle, filename) = mkstemp(prefix='mirrorosm-', suffix='.tablename') tmp_prefix = ('mirrorosm_' + b16encode(basename(filename)[10:(-10)]).lower()) garbage.append(filename) close(handle) (handle, filename) = mkstemp(prefix='mirrorosm-', suffix='.osm.gz') garbage.append(filename) close(handle) try: length = download_api_data(filename, coord, self.api_base, self.layer.projection) prepare_data(filename, tmp_prefix, self.dbkwargs, self.osm2pgsql, self.layer.projection) db = _connect(**self.dbkwargs).cursor() ul = self.layer.projection.coordinateProj(coord) lr = self.layer.projection.coordinateProj(coord.down().right()) create_tables(db, self.prefix, tmp_prefix) populate_tables(db, self.prefix, tmp_prefix, (ul.x, ul.y, lr.x, lr.y)) clean_up_tables(db, tmp_prefix) db.close() message = ('Retrieved %dK of OpenStreetMap data for tile %d/%d/%d in %.2fsec from %s (%s).\n' % (length, coord.zoom, coord.column, coord.row, (time() - start), self.api_base, datetime.now())) return ConfirmationResponse(coord, message, True) except Exception as e: message = ('Error in tile %d/%d/%d: %s' % (coord.zoom, coord.column, coord.row, e)) raise NoTileLeftBehind(ConfirmationResponse(coord, message, False)) finally: for filename in garbage: unlink(filename)
'Get mime-type and format by file extension. This only accepts "json".'
def getTypeByExtension(self, extension):
if (extension.lower() != 'json'): raise KnownUnknown(('UtfGridComposite only makes .json tiles, not "%s"' % extension)) return ('text/json', 'JSON')
''
def __init__(self, layer, database=None, username=None, password=None, hostname=None):
self.layer = layer self.dbkwargs = {} if hostname: self.dbkwargs['host'] = hostname if username: self.dbkwargs['user'] = username if database: self.dbkwargs['database'] = database if password: self.dbkwargs['password'] = password