desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'read a WebSocket frame header'
@classmethod def from_file(cls, fp):
(first_byte, second_byte) = fp.safe_read(2) fin = bits.getbit(first_byte, 7) rsv1 = bits.getbit(first_byte, 6) rsv2 = bits.getbit(first_byte, 5) rsv3 = bits.getbit(first_byte, 4) opcode = (first_byte & 15) mask_bit = bits.getbit(second_byte, 7) length_code = (second_byte & 127) if (length_code <= 125): payload_length = length_code elif (length_code == 126): (payload_length,) = struct.unpack('!H', fp.safe_read(2)) else: (payload_length,) = struct.unpack('!Q', fp.safe_read(8)) if (mask_bit == 1): masking_key = fp.safe_read(4) else: masking_key = None return cls(fin=fin, rsv1=rsv1, rsv2=rsv2, rsv3=rsv3, opcode=opcode, mask=mask_bit, length_code=length_code, payload_length=payload_length, masking_key=masking_key)
'Construct a websocket frame from an in-memory bytestring to construct a frame from a stream of bytes, use from_file() directly'
@classmethod def from_bytes(cls, bytestring):
return cls.from_file(tcp.Reader(io.BytesIO(bytestring)))
'Serialize the frame to wire format. Returns a string.'
def __bytes__(self):
b = bytes(self.header) if self.header.masking_key: b += Masker(self.header.masking_key)(self.payload) else: b += self.payload return b
'read a WebSocket frame sent by a server or client fp is a "file like" object that could be backed by a network stream or a disk or an in memory stream reader'
@classmethod def from_file(cls, fp):
header = FrameHeader.from_file(fp) payload = fp.safe_read(header.payload_length) if ((header.mask == 1) and header.masking_key): payload = Masker(header.masking_key)(payload) frame = cls(payload) frame.header = header return frame
'Retrieve object state.'
def get_state(self):
state = {} for (attr, cls) in self._stateobject_attributes.items(): val = getattr(self, attr) if (val is None): state[attr] = None elif hasattr(val, 'get_state'): state[attr] = val.get_state() elif _is_list(cls): state[attr] = [x.get_state() for x in val] elif isinstance(val, dict): s = {} for (k, v) in val.items(): if hasattr(v, 'get_state'): s[k] = v.get_state() else: s[k] = v state[attr] = s else: state[attr] = val return state
'Load object state from data returned by a get_state call.'
def set_state(self, state):
state = state.copy() for (attr, cls) in self._stateobject_attributes.items(): val = state.pop(attr) if (val is None): setattr(self, attr, val) else: curr = getattr(self, attr) if hasattr(curr, 'set_state'): curr.set_state(val) elif hasattr(cls, 'from_state'): obj = cls.from_state(val) setattr(self, attr, obj) elif _is_list(cls): cls = (cls.__parameters__[0] if cls.__parameters__ else cls.__args__[0]) setattr(self, attr, [cls.from_state(x) for x in val]) else: setattr(self, attr, cls(val)) if state: raise RuntimeWarning('Unexpected State in __setstate__: {}'.format(state))
'This function determines the next layer in the protocol stack. Arguments: top_layer: the current innermost layer. Returns: The next layer'
def next_layer(self, top_layer):
layer = self._next_layer(top_layer) return self.channel.ask('next_layer', layer)
'Send a log message to the master.'
def log(self, msg, level, subs=()):
full_msg = ['{}:{}: {}'.format(self.client_conn.address[0], self.client_conn.address[1], msg)] for i in subs: full_msg.append((' -> ' + i)) full_msg = '\n'.join(full_msg) self.channel.tell('log', log.LogEntry(full_msg, level))
'Each layer usually passes itself to its child layers as a context. Properties of the context are transparently mapped to the layer, so that the following works: .. code-block:: python root_layer = Layer(None) root_layer.client_conn = 42 sub_layer = Layer(root_layer) print(sub_layer.client_conn) # 42 The root layer is passed a :py:class:`mitmproxy.proxy.RootContext` object, which provides access to :py:attr:`.client_conn <mitmproxy.proxy.RootContext.client_conn>`, :py:attr:`.next_layer <mitmproxy.proxy.RootContext.next_layer>` and other basic attributes. Args: ctx: The (read-only) parent layer / context.'
def __init__(self, ctx, **mixin_args):
self.ctx = ctx '\n The parent layer.\n\n :type: :py:class:`Layer`\n ' super().__init__(**mixin_args)
'Logic of the layer. Returns: Once the protocol has finished without exceptions. Raises: ~mitmproxy.exceptions.ProtocolException: if an exception occurs. No other exceptions must be raised.'
def __call__(self):
raise NotImplementedError()
'Attributes not present on the current layer are looked up on the context.'
def __getattr__(self, name):
return getattr(self.ctx, name)
'We try to protect the proxy from _accidentally_ connecting to itself, e.g. because of a failed transparent lookup or an invalid configuration.'
def __check_self_connect(self):
address = self.server_conn.address if address: self_connect = ((address[1] == self.config.options.listen_port) and (address[0] in ('localhost', '127.0.0.1', '::1'))) if self_connect: raise exceptions.ProtocolException('Invalid server address: {}\r\nThe proxy shall not connect to itself.'.format(repr(address)))
'Sets a new server address. If there is an existing connection, it will be closed.'
def set_server(self, address):
if self.server_conn.connected(): self.disconnect() self.log('Set new server address: {}:{}'.format(address[0], address[1]), 'debug') self.server_conn.address = address self.__check_self_connect()
'Deletes (and closes) an existing server connection. Must not be called if there is no existing connection.'
def disconnect(self):
self.log('serverdisconnect', 'debug', [repr(self.server_conn.address)]) address = self.server_conn.address self.server_conn.finish() self.server_conn.close() self.channel.tell('serverdisconnect', self.server_conn) self.server_conn = connections.ServerConnection(address, (self.server_conn.source_address[0], 0), self.config.options.spoof_source_address)
'Establishes a server connection. Must not be called if there is an existing connection. Raises: ~mitmproxy.exceptions.ProtocolException: if the connection could not be established.'
def connect(self):
if (not self.server_conn.address): raise exceptions.ProtocolException('Cannot connect to server, no server address given.') self.log('serverconnect', 'debug', [repr(self.server_conn.address)]) self.channel.ask('serverconnect', self.server_conn) try: self.server_conn.connect() except exceptions.TcpException as e: raise exceptions.ProtocolException('Server connection to {} failed: {}'.format(repr(self.server_conn.address), str(e)))
'Adds a cert to the certstore. We register the CN in the cert plus any SANs, and also the list of names provided as an argument.'
def add_cert(self, entry, *names):
if entry.cert.cn: self.certs[entry.cert.cn] = entry for i in entry.cert.altnames: self.certs[i] = entry for i in names: self.certs[i] = entry
'Returns an (cert, privkey, cert_chain) tuple. commonname: Common name for the generated certificate. Must be a valid, plain-ASCII, IDNA-encoded domain name. sans: A list of Subject Alternate Names.'
def get_cert(self, commonname, sans):
potential_keys = self.asterisk_forms(commonname) for s in sans: potential_keys.extend(self.asterisk_forms(s)) potential_keys.append((commonname, tuple(sans))) name = next(filter((lambda key: (key in self.certs)), potential_keys), None) if name: entry = self.certs[name] else: entry = CertStoreEntry(cert=dummy_cert(self.default_privatekey, self.default_ca, commonname, sans), privatekey=self.default_privatekey, chain_file=self.default_chain_file) self.certs[(commonname, tuple(sans))] = entry self.expire(entry) return (entry.cert, entry.privatekey, entry.chain_file)
'Returns a (common name, [subject alternative names]) tuple.'
def __init__(self, cert):
self.x509 = cert
'Returns: All DNS altnames.'
@property def altnames(self):
altnames = [] for i in range(self.x509.get_extension_count()): ext = self.x509.get_extension(i) if (ext.get_short_name() == 'subjectAltName'): try: dec = decode(ext.get_data(), asn1Spec=_GeneralNames()) except PyAsn1Error: continue for i in dec[0]: if i[0].hasValue(): e = i[0].asOctets() altnames.append(e) return altnames
'.replacements is a list of tuples (fpat, rex, s): fpatt: a string specifying a filter pattern. rex: a regular expression, as string. s: the replacement string'
def configure(self, updated):
if ('replacements' in updated): lst = [] for rep in ctx.options.replacements: (fpatt, rex, s) = parse_hook(rep) flt = flowfilter.parse(fpatt) if (not flt): raise exceptions.OptionsError(('Invalid filter pattern: %s' % fpatt)) try: re.compile(rex) except re.error as e: raise exceptions.OptionsError(('Invalid regular expression: %s - %s' % (rex, str(e)))) if (s.startswith('@') and (not os.path.isfile(s[1:]))): raise exceptions.OptionsError('Invalid file path: {}'.format(s[1:])) lst.append((rex, s, flt)) self.lst = lst
'Serves app on flow, and prevents further handling of the flow.'
def serve(self, app, flow):
app = wsgi.WSGIAdaptor(app, flow.request.pretty_host, flow.request.port, version.MITMPROXY) err = app.serve(flow, flow.client_conn.wfile, **{'mitmproxy.master': ctx.master}) if err: ctx.log.error(('Error in wsgi app. %s' % err)) raise exceptions.AddonHalt() flow.reply.kill()
'Raw image data. Use `is_png` to determine whether this is an embedded PNG file (true) or a DIB bitmap (false) and call a relevant parser, if needed to parse image data further.'
@property def img(self):
if hasattr(self, '_m_img'): return (self._m_img if hasattr(self, '_m_img') else None) _pos = self._io.pos() self._io.seek(self.ofs_img) self._m_img = self._io.read_bytes(self.len_img) self._io.seek(_pos) return (self._m_img if hasattr(self, '_m_img') else None)
'Pre-reads first 8 bytes of the image to determine if it\'s an embedded PNG file.'
@property def png_header(self):
if hasattr(self, '_m_png_header'): return (self._m_png_header if hasattr(self, '_m_png_header') else None) _pos = self._io.pos() self._io.seek(self.ofs_img) self._m_png_header = self._io.read_bytes(8) self._io.seek(_pos) return (self._m_png_header if hasattr(self, '_m_png_header') else None)
'True if this image is in PNG format.'
@property def is_png(self):
if hasattr(self, '_m_is_png'): return (self._m_is_png if hasattr(self, '_m_is_png') else None) self._m_is_png = (self.png_header == struct.pack('8b', (-119), 80, 78, 71, 13, 10, 26, 10)) return (self._m_is_png if hasattr(self, '_m_is_png') else None)
'If true, then we have more bytes to read.'
@property def has_next(self):
if hasattr(self, '_m_has_next'): return (self._m_has_next if hasattr(self, '_m_has_next') else None) self._m_has_next = ((self.b & 128) != 0) return (self._m_has_next if hasattr(self, '_m_has_next') else None)
'The 7-bit (base128) numeric value of this group.'
@property def value(self):
if hasattr(self, '_m_value'): return (self._m_value if hasattr(self, '_m_value') else None) self._m_value = (self.b & 127) return (self._m_value if hasattr(self, '_m_value') else None)
'Resulting value as normal integer.'
@property def value(self):
if hasattr(self, '_m_value'): return (self._m_value if hasattr(self, '_m_value') else None) self._m_value = (((((((self.groups[0].value + ((self.groups[1].value << 7) if (self.len >= 2) else 0)) + ((self.groups[2].value << 14) if (self.len >= 3) else 0)) + ((self.groups[3].value << 21) if (self.len >= 4) else 0)) + ((self.groups[4].value << 28) if (self.len >= 5) else 0)) + ((self.groups[5].value << 35) if (self.len >= 6) else 0)) + ((self.groups[6].value << 42) if (self.len >= 7) else 0)) + ((self.groups[7].value << 49) if (self.len >= 8) else 0)) return (self._m_value if hasattr(self, '_m_value') else None)
'"Wire type" is a part of the "key" that carries enough information to parse value from the wire, i.e. read correct amount of bytes, but there\'s not enough informaton to interprete in unambiguously. For example, one can\'t clearly distinguish 64-bit fixed-sized integers from 64-bit floats, signed zigzag-encoded varints from regular unsigned varints, arbitrary bytes from UTF-8 encoded strings, etc.'
@property def wire_type(self):
if hasattr(self, '_m_wire_type'): return (self._m_wire_type if hasattr(self, '_m_wire_type') else None) self._m_wire_type = self._root.Pair.WireTypes((self.key.value & 7)) return (self._m_wire_type if hasattr(self, '_m_wire_type') else None)
'Identifies a field of protocol. One can look up symbolic field name in a `.proto` file by this field tag.'
@property def field_tag(self):
if hasattr(self, '_m_field_tag'): return (self._m_field_tag if hasattr(self, '_m_field_tag') else None) self._m_field_tag = (self.key.value >> 3) return (self._m_field_tag if hasattr(self, '_m_field_tag') else None)
'Decorate a message with a reply attribute, and send it to the master. Then wait for a response. Raises: exceptions.Kill: All connections should be closed immediately.'
def ask(self, mtype, m):
m.reply = Reply(m) self.q.put((mtype, m)) while (not self.should_exit.is_set()): try: g = m.reply.q.get(timeout=0.5) except queue.Empty: continue if (g == exceptions.Kill): raise exceptions.Kill() return g m.reply._state = 'committed' raise exceptions.Kill()
'Decorate a message with a dummy reply attribute, send it to the master, then return immediately.'
def tell(self, mtype, m):
m.reply = DummyReply() self.q.put((mtype, m))
'The state the reply is currently in. A normal reply object goes sequentially through the following lifecycle: 1. start: Initial State. 2. taken: The reply object has been taken to be commited. 3. committed: The reply has been sent back to the requesting party. This attribute is read-only and can only be modified by calling one of state transition functions.'
@property def state(self):
return self._state
'Scripts or other parties make "take" a reply out of a normal flow. For example, intercepted flows are taken out so that the connection thread does not proceed.'
def take(self):
if (self.state != 'start'): raise exceptions.ControlException('Reply is {}, but expected it to be start.'.format(self.state)) self._state = 'taken'
'Ultimately, messages are commited. This is done either automatically by if the message is not taken or manually by the entity which called .take().'
def commit(self):
if (self.state != 'taken'): raise exceptions.ControlException('Reply is {}, but expected it to be taken.'.format(self.state)) if (not self.has_message): raise exceptions.ControlException('There is no reply message.') self._state = 'committed' self.q.put(self.value)
'Change the data object to a path relative to the module.'
def push(self, subpath):
dirname = os.path.normpath(os.path.join(self.dirname, subpath)) ret = Data(self.name) ret.dirname = dirname return ret
'Returns a path to the package data housed at \'path\' under this module.Path can be a path to a file, or to a directory. This function will raise ValueError if the path does not exist.'
def path(self, path):
fullpath = os.path.normpath(os.path.join(self.dirname, path)) if (not os.path.exists(fullpath)): raise ValueError(('dataPath: %s does not exist.' % fullpath)) return fullpath
'Returns: True, if we should attempt to intercept the connection. False, if we want to employ pass-through instead.'
def should_intercept(self, server_address):
raise NotImplementedError()
'If a class defines mutable objects and implements an __eq__() method, it should not implement __hash__(), since the implementation of hashable collections requires that a key\'s hash value is immutable.'
def test_hash(self):
with pytest.raises(TypeError): assert hash(TMultiDict())
'regression test for https://github.com/mitmproxy/mitmproxy/issues/1605'
def test_intercept(self):
m = self.mkmaster(intercept='~b bar') f = tflow.tflow(req=tutils.treq(content='foo')) m.addons.handle_lifecycle('request', f) assert (not m.view[0].intercepted) f = tflow.tflow(req=tutils.treq(content='bar')) m.addons.handle_lifecycle('request', f) assert m.view[1].intercepted f = tflow.tflow(resp=tutils.tresp(content='bar')) m.addons.handle_lifecycle('request', f) assert m.view[2].intercepted
'Can be over-ridden to add a standard set of addons to tests.'
def addons(self):
return []
'Returns a connected Pathoc instance.'
def pathoc(self, sni=None):
if self.ssl: conn = ('127.0.0.1', self.server.port) else: conn = None return LazyPathoc(conn, ('localhost', self.proxy.port), ssl=self.ssl, sni=sni, fp=None)
'Constructs a pathod GET request, with the appropriate base and proxy.'
def pathod(self, spec, sni=None):
p = self.pathoc(sni=sni) if self.ssl: q = ("get:'/p/%s'" % spec) else: q = ("get:'%s/p/%s'" % (self.server.urlbase, spec)) with p.connect(): return p.request(q)
'Constructs a pathod GET request, with the appropriate base and proxy.'
def pathod(self, spec, sni=None):
if self.ssl: p = self.pathoc(sni=sni) q = ("get:'/p/%s'" % spec) else: p = self.pathoc() q = ("get:'/p/%s'" % spec) with p.connect(): return p.request(q)
'Returns a connected Pathoc instance.'
def pathoc(self, sni=None):
p = pathod.pathoc.Pathoc(('localhost', self.proxy.port), ssl=self.ssl, sni=sni, fp=None) return p
'Returns a connected Pathoc instance.'
def pathoc(self, sni=None):
p = pathod.pathoc.Pathoc(('localhost', self.proxy.port), ssl=self.ssl, sni=sni, fp=None) return p
'Constructs a pathod GET request, with the appropriate base and proxy.'
def pathod(self, spec, sni=None):
if self.ssl: p = self.pathoc(sni=sni) q = ("get:'/p/%s'" % spec) else: p = self.pathoc() q = ("get:'/p/%s'" % spec) with p.connect(): return p.request(q)
'ssl: A dictionary of SSL parameters: cert, key, request_client_cert, cipher_list, dhparams, v3_only'
def __init__(self, ssl, q, handler_klass, addr, **kwargs):
tcp.TCPServer.__init__(self, addr) if (ssl is True): self.ssl = dict() elif isinstance(ssl, dict): self.ssl = ssl else: self.ssl = None self.q = q self.handler_klass = handler_klass if (self.handler_klass is not None): self.handler_klass.kwargs = kwargs self.last_handler = None
'Test to bind to a given random port. Try again if the random port turned out to be blocked.'
def test_bind(self):
for i in range(20): random_port = random.randrange(1024, 65535) try: c = tcp.TCPClient(('127.0.0.1', self.port), source_address=('127.0.0.1', random_port)) with c.connect(): assert (c.rfile.readline() == str(('127.0.0.1', random_port)).encode()) return except exceptions.TcpException: pass
'Race condition: We don\'t want to replay the flow while it is still live.'
@staticmethod def wait_until_not_live(flow):
s = time.time() while flow.live: time.sleep(0.001) if ((time.time() - s) > 5): raise RuntimeError('Flow is live for too long.')
'If mitmproxy is a regular HTTP proxy, it must rewrite an absolute-form request like GET http://example.com/foo HTTP/1.0 to GET /foo HTTP/1.0 when sending the request upstream. While any server should technically accept the absolute form, this is not the case in practice.'
def test_first_line_rewrite(self):
req = ("get:'%s/p/200'" % self.server.urlbase) p = self.pathoc() with p.connect(): assert (p.request(req).status_code == 200) assert (self.server.last_log()['request']['first_line_format'] == 'relative')
'Certificate verification should be turned on by default.'
def test_secure_by_default(self):
default_opts = options.Options() assert (not default_opts.ssl_insecure)
'mitmproxy doesn\'t support UDP or BIND SOCKS CMDs'
def test_no_connect(self):
p = self.pathoc() with p.connect(): socks.ClientGreeting(socks.VERSION.SOCKS5, [socks.METHOD.NO_AUTHENTICATION_REQUIRED]).to_file(p.wfile) socks.Message(socks.VERSION.SOCKS5, socks.CMD.BIND, socks.ATYP.DOMAINNAME, ('example.com', 8080)).to_file(p.wfile) p.wfile.flush() p.rfile.read(2) f = p.request('get:/p/200') assert (f.status_code == 502) assert ('SOCKS5 mode failure' in f.content) assert ('mitmproxy only supports SOCKS5 CONNECT' in f.content)
'Returns a connected Pathoc instance.'
def pathoc(self, ssl, sni=None):
p = pathoc.Pathoc(('localhost', self.proxy.port), ssl=True, sni=sni, fp=None) return p
'Imagine a single HTTPS connection with three requests: 1. First request should pass through unmodified 2. Second request will be redirected to a different host by an inline script 3. Third request should pass through unmodified This test verifies that the original destination is restored for the third request.'
def test_redirect(self):
self.proxy.tmaster.addons.add(ARedirectRequest(self.server2.port)) p = self.pathoc() with p.connect(): self.server.clear_log() self.server2.clear_log() r1 = p.request("get:'/p/200'") assert (r1.status_code == 200) assert self.server.last_log() assert (not self.server2.expect_log(1, 0.5)) self.server.clear_log() self.server2.clear_log() r2 = p.request("get:'/p/201'") assert (r2.status_code == 201) assert (not self.server.expect_log(1, 0.5)) assert self.server2.last_log() self.server.clear_log() self.server2.clear_log() r3 = p.request("get:'/p/202'") assert (r3.status_code == 202) assert self.server.last_log() assert (not self.server2.expect_log(1, 0.5)) assert (r1.content == r2.content == r3.content)
'A replayed/fake response with no upstream_cert should not connect to an upstream server'
def test_unnecessary_serverconnect(self):
self.proxy.tmaster.addons.add(AFakeResponse()) assert (self.pathod('200').status_code == 200) assert (not self.proxy.tmaster.has_log('serverconnect'))
'Updates config.check_tcp or check_ignore, depending on attr.'
def _host_pattern_on(self, attr):
assert (not hasattr(self, ('_ignore_%s_backup' % attr))) backup = [] for proxy in self.chain: old_matcher = getattr(proxy.tmaster.server.config, ('check_%s' % attr)) backup.append(old_matcher) setattr(proxy.tmaster.server.config, ('check_%s' % attr), HostMatcher(([('.+:%s' % self.server.port)] + old_matcher.patterns))) setattr(self, ('_ignore_%s_backup' % attr), backup)
'https://github.com/mitmproxy/mitmproxy/issues/2329 Client <- HTTPS -> Proxy <- HTTP -> Proxy <- HTTPS -> Server'
def test_connect_https_to_http(self):
self.proxy.tmaster.addons.add(RewriteToHttp()) self.chain[1].tmaster.addons.add(RewriteToHttps()) p = self.pathoc() with p.connect(): resp = p.request("get:'/p/418'") assert self.proxy.tmaster.state.flows[0].client_conn.tls_established assert (not self.proxy.tmaster.state.flows[0].server_conn.tls_established) assert (not self.chain[1].tmaster.state.flows[0].client_conn.tls_established) assert self.chain[1].tmaster.state.flows[0].server_conn.tls_established assert (resp.status_code == 418)
'Tests proper functionality of ConnectionHandler.server_reconnect mock. If we have a disconnect on a secure connection that\'s transparently proxified to an upstream http proxy, we need to send the CONNECT request again.'
def test_reconnect(self):
class MockOnce: call = 0 def mock_once(self, http1obj, req): self.call += 1 if (self.call == 2): headers = http1.assemble_request_head(req) http1obj.server_conn.wfile.write(headers) http1obj.server_conn.wfile.flush() raise exceptions.TcpDisconnect else: headers = http1.assemble_request_head(req) http1obj.server_conn.wfile.write(headers) http1obj.server_conn.wfile.flush() self.chain[0].tmaster.addons.add(RequestKiller([1, 2])) self.chain[1].tmaster.addons.add(RequestKiller([1])) p = self.pathoc() with p.connect(): req = p.request('get:\'/p/418:b"content"\'') assert (req.content == 'content') assert (req.status_code == 418) assert (len(self.proxy.tmaster.state.flows) == 1) assert (len(self.chain[0].tmaster.state.flows) == 1) assert (len(self.chain[1].tmaster.state.flows) == 1) with mock.patch('mitmproxy.proxy.protocol.http1.Http1Layer.send_request_headers', side_effect=MockOnce().mock_once, autospec=True): req = p.request('get:\'/p/418:b"content2"\'') assert (req.status_code == 502) assert (len(self.proxy.tmaster.state.flows) == 2) assert (len(self.chain[0].tmaster.state.flows) == 2) assert (len(self.chain[1].tmaster.state.flows) == 3) assert (not self.chain[1].tmaster.state.flows[(-1)].response) assert (not self.chain[1].tmaster.state.flows[(-2)].response) with pytest.raises(exceptions.HttpException): p.request('get:\'/p/418:b"content3"\'')
'Returns a (messages, text log) tuple.'
def pathoc(self, specs, timeout=None, connect_to=None, ssl=None, ws_read_limit=None, use_http2=False):
if (ssl is None): ssl = self.ssl logfp = io.StringIO() c = pathoc.Pathoc(('localhost', self.d.port), ssl=ssl, ws_read_limit=ws_read_limit, timeout=timeout, fp=logfp, use_http2=use_http2) with c.connect(connect_to): ret = [] for i in specs: resp = c.request(i) if resp: ret.append(resp) for frm in c.wait(): ret.append(frm) c.stop() return (ret, logfp.getvalue())
'Fetch all tokens that are instances of klass'
def toks(self, klass):
return [i for i in self.tokens if isinstance(i, klass)]
'Fetch first token that is an instance of klass'
def tok(self, klass):
l = self.toks(klass) if l: return l[0]
'Calculate the length of the base message without any applied actions.'
def length(self, settings):
return sum((len(x) for x in self.values(settings)))
'Return a copy of this message that is safe for previews.'
def preview_safe(self):
tokens = [i for i in self.tokens if (not isinstance(i, actions.PauseAt))] return self.__class__(tokens)
'Calculate the maximum length of the base message with all applied actions.'
def maximum_length(self, settings):
l = self.length(settings) for i in self.actions: if isinstance(i, actions.InjectAt): l += len(i.value.get_generator(settings)) return l
'A dictionary that should be logged if this message is served.'
def log(self, settings):
ret = {} for i in self.logattrs: v = getattr(self, i) if hasattr(v, 'values'): v = [x[:LOG_TRUNCATE] for x in v.values(settings)] v = strutils.bytes_to_escaped_str(''.join(v)) elif hasattr(v, '__len__'): v = v[:LOG_TRUNCATE] v = strutils.bytes_to_escaped_str(v) ret[i] = v ret['spec'] = self.spec() return ret
'Resolves offset specifications to a numeric offset. Returns a copy of the action object.'
def resolve(self, settings, msg):
c = copy.copy(self) l = msg.length(settings) if (c.offset == 'r'): c.offset = random.randrange(l) elif (c.offset == 'a'): c.offset = (l + 1) return c
'Handle a CONNECT request.'
def handle_http_connect(self, connect, lg):
self.pathod_handler.wfile.write((('HTTP/1.1 200 Connection established\r\n' + ('Proxy-agent: %s\r\n' % version.PATHOD.encode())) + '\r\n')) self.pathod_handler.wfile.flush() if (not self.pathod_handler.server.ssloptions.not_after_connect): try: (cert, key, chain_file_) = self.pathod_handler.server.ssloptions.get_cert(connect[0].encode()) self.pathod_handler.convert_to_ssl(cert, key, handle_sni=self.pathod_handler.handle_sni, request_client_cert=self.pathod_handler.server.ssloptions.request_client_cert, cipher_list=self.pathod_handler.server.ssloptions.ciphers, method=self.pathod_handler.server.ssloptions.ssl_version, options=self.pathod_handler.server.ssloptions.ssl_options, alpn_select=self.pathod_handler.server.ssloptions.alpn_select) except exceptions.TlsException as v: s = str(v) lg(s) return (None, dict(type='error', msg=s)) return (self.pathod_handler.handle_http_request, None)
'Test that the correct number of layers are returned when the client is not logged in and all are public'
def test_layer_get_list_unauth_all_public(self):
resp = self.api_client.get(self.list_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 8)
'Test that if a layer is not public then not all are returned when the client is not logged in'
def test_layers_get_list_unauth_some_public(self):
layer = Layer.objects.all()[0] layer.set_permissions(self.perm_spec) resp = self.api_client.get(self.list_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 7)
'Test that if a layer is not public then all are returned if the client is not logged in'
def test_layers_get_list_auth_some_public(self):
self.api_client.client.login(username=self.user, password=self.passwd) layer = Layer.objects.all()[0] layer.set_permissions(self.perm_spec) resp = self.api_client.get(self.list_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 8)
'Test that if a layer is only visible by admin, then does not appear in the unauthenticated list nor in the list when logged is as bobby'
def test_layer_get_list_layer_private_to_one_user(self):
perm_spec = {'users': {'admin': ['view_resourcebase']}, 'groups': {}} layer = Layer.objects.all()[0] layer.set_permissions(perm_spec) resp = self.api_client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 7) self.api_client.client.login(username='bobby', password='bob') resp = self.api_client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 7) self.api_client.client.login(username=self.user, password=self.passwd) resp = self.api_client.get(self.list_url) self.assertEquals(len(self.deserialize(resp)['objects']), 8)
'Test that layer detail gives 401 when not public and not logged in'
def test_layer_get_detail_unauth_layer_not_public(self):
layer = Layer.objects.all()[0] layer.set_permissions(self.perm_spec) self.assertHttpUnauthorized(self.api_client.get(((self.list_url + str(layer.id)) + '/'))) self.api_client.client.login(username=self.user, password=self.passwd) resp = self.api_client.get(((self.list_url + str(layer.id)) + '/')) self.assertValidJSONResponse(resp)
'Test that a new user can access the public available layers'
def test_new_user_has_access_to_old_layers(self):
from geonode.people.models import Profile Profile.objects.create(username='imnew', password='pbkdf2_sha256$12000$UE4gAxckVj4Z$N 6NbOXIQWWblfInIoq/Ta34FdRiPhawCIZ+sOO3YQs=') self.api_client.client.login(username='imnew', password='thepwd') resp = self.api_client.get(self.list_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 8)
'Test category filtering'
def test_category_filters(self):
filter_url = (self.list_url + '?category__identifier=location') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 3) filter_url = (self.list_url + '?category__identifier__in=location&category__identifier__in=biota') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 5)
'Test keywords filtering'
def test_tag_filters(self):
filter_url = (self.list_url + '?keywords__slug=layertagunique') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 1) filter_url = (self.list_url + '?keywords__slug__in=layertagunique&keywords__slug__in=populartag') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 8)
'Test owner filtering'
def test_owner_filters(self):
filter_url = (self.list_url + '?owner__username=user1') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 2) filter_url = (self.list_url + '?owner__username__in=user1&owner__username__in=foo') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 3)
'Test title filtering'
def test_title_filter(self):
filter_url = (self.list_url + '?title=layer2') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 1)
'Test date filtering'
def test_date_filter(self):
filter_url = (self.list_url + '?date__exact=1985-01-01') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 1) filter_url = (self.list_url + '?date__gte=1985-01-01') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 3) filter_url = (self.list_url + '?date__range=1950-01-01,1985-01-01') resp = self.api_client.get(filter_url) self.assertValidJSONResponse(resp) self.assertEquals(len(self.deserialize(resp)['objects']), 4)
'adds filtering by current language'
def build_filters(self, filters={}):
id = filters.pop('id', None) orm_filters = super(ThesaurusKeywordResource, self).build_filters(filters) if (id is not None): orm_filters['keyword__id'] = id orm_filters['lang'] = (filters['lang'] if ('lang' in filters) else get_language()) if ('thesaurus' in filters): orm_filters['keyword__thesaurus__identifier'] = filters['thesaurus'] return orm_filters
'adds filtering by group functionality'
def build_filters(self, filters=None):
if (filters is None): filters = {} orm_filters = super(ProfileResource, self).build_filters(filters) if ('group' in filters): orm_filters['group'] = filters['group'] return orm_filters
'filter by group if applicable by group functionality'
def apply_filters(self, request, applicable_filters):
group = applicable_filters.pop('group', None) semi_filtered = super(ProfileResource, self).apply_filters(request, applicable_filters) if (group is not None): semi_filtered = semi_filtered.filter(groupmember__group__slug=group) return semi_filtered
'modify the queryset q to limit to data that intersects with the provided bbox bbox - 4 tuple of floats representing \'southwest_lng,southwest_lat, northeast_lng,northeast_lat\' returns the modified query'
def filter_bbox(self, queryset, bbox):
bbox = bbox.split(',') bbox = map(str, bbox) intersects = (~ (((Q(bbox_x0__gt=bbox[2]) | Q(bbox_x1__lt=bbox[0])) | Q(bbox_y0__gt=bbox[3])) | Q(bbox_y1__lt=bbox[1]))) return queryset.filter(intersects)
'Returns a serialized list of resources. Calls ``obj_get_list`` to provide the data, then handles that result set and serializes it. Should return a HttpResponse (200 OK).'
def get_list(self, request, **kwargs):
base_bundle = self.build_bundle(request=request) objects = self.obj_get_list(bundle=base_bundle, **self.remove_api_resource_names(kwargs)) sorted_objects = self.apply_sorting(objects, options=request.GET) paginator = self._meta.paginator_class(request.GET, sorted_objects, resource_uri=self.get_resource_uri(), limit=self._meta.limit, max_limit=self._meta.max_limit, collection_name=self._meta.collection_name) to_be_serialized = paginator.page() to_be_serialized = self.alter_list_data_to_serialize(request, to_be_serialized) return self.create_response(request, to_be_serialized, response_objects=objects)
'Format the objects for output in a response.'
def format_objects(self, objects):
return objects.values(*self.VALUES)
'Extracts the common "which-format/serialize/return-response" cycle. Mostly a useful shortcut/hook.'
def create_response(self, request, data, response_class=HttpResponse, response_objects=None, **response_kwargs):
filtered_objects_ids = None if response_objects: filtered_objects_ids = [item.id for item in response_objects if request.user.has_perm('view_resourcebase', item.get_self_resource())] if (isinstance(data, dict) and ('objects' in data) and (not isinstance(data['objects'], list))): if filtered_objects_ids: data['objects'] = [x for x in list(self.format_objects(data['objects'])) if (x['id'] in filtered_objects_ids)] else: data['objects'] = list(self.format_objects(data['objects'])) desired_format = self.determine_format(request) serialized = self.serialize(request, data, desired_format) return response_class(content=serialized, content_type=build_content_type(desired_format), **response_kwargs)
'do what we should at the given state of the upload'
def cleanup(self):
pass
'Method to login the GeoNode site'
def login(self):
self.csrf_token = self.get_csrf_token() params = {'csrfmiddlewaretoken': self.csrf_token, 'username': self.user, 'next': '/', 'password': self.passwd} self.make_request(reverse('account_login'), data=urllib.urlencode(params)) self.csrf_token = self.get_csrf_token()
'function that uploads a file, or a collection of files, to the GeoNode'
def upload_file(self, _file):
if (not self.csrf_token): self.login() spatial_files = ('dbf_file', 'shx_file', 'prj_file') (base, ext) = os.path.splitext(_file) params = {'permissions': '{ "users": {"AnonymousUser": ["view_resourcebase"]} , "groups":{}}', 'csrfmiddlewaretoken': self.csrf_token} if (ext.lower() == '.shp'): for spatial_file in spatial_files: (ext, _) = spatial_file.split('_') file_path = ((base + '.') + ext) if os.path.exists(file_path): params[spatial_file] = open(file_path, 'rb') params['base_file'] = open(_file, 'rb') resp = self.make_request(upload_step(), data=params, ajax=True) data = resp.read() try: return (resp, json.loads(data)) except ValueError: raise ValueError(('probably not json, status %s' % resp.getcode()), data)
'Method that make a get request and passes the results to bs4 Takes a path and returns a tuple'
def get_html(self, path, debug=True):
resp = self.get(path, debug) return (resp, BeautifulSoup(resp.read()))
'Get a csrf_token from the home page or read from the cookie jar based on the last response'
def get_csrf_token(self, last=False):
if (not last): self.get('/') csrf = [c for c in self.cookies.cookiejar if (c.name == 'csrftoken')] return (csrf[0].value if csrf else None)
'Check that the final layer page render\'s correctly after an layer is uploaded'
def check_layer_geonode_page(self, path):
(resp, _) = self.client.get_html(path) self.assertTrue(('content-type' in resp.headers)) self.assertTrue(resp.headers['content-type'].startswith('text/html'))
'Check that a layer shows up in GeoServer\'s get capabilities document'
def check_layer_geoserver_caps(self, type_name):
wms = get_wms(type_name=type_name) (ws, layer_name) = type_name.split(':') self.assertTrue((layer_name in wms.contents), ('%s is not in %s' % (layer_name, wms.contents)))
'Check that a layer shows up in GeoServer rest api after the uploader is done'
def check_layer_geoserver_rest(self, layer_name):
layer = self.catalog.get_layer(layer_name) self.assertIsNotNone((layer is not None))
'Verify the initial save step'
def check_save_step(self, resp, data):
self.assertEquals(resp.code, 200) self.assertTrue(isinstance(data, dict)) self.assertTrue(data['success'], ('expected success but got %s' % data)) self.assertTrue(('redirect_to' in data))
'Method to check if a layer was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a layer is configured in Django Checks to see if a layer is configured in GeoServer checks the Rest API checks the get cap document'
def complete_upload(self, file_path, resp, data, is_raster=False):
(layer_name, ext) = os.path.splitext(os.path.basename(file_path)) self.check_save_step(resp, data) layer_page = self.finish_upload(data['redirect_to'], layer_name, is_raster) self.check_layer_complete(layer_page, layer_name)
'check everything to verify the layer is complete'
def check_layer_complete(self, layer_page, original_name):
self.check_layer_geonode_page(layer_page) type_name = os.path.basename(layer_page) layer_name = type_name.split(':')[1] caps_found = False for i in range(10): time.sleep(0.5) try: self.check_layer_geoserver_caps(type_name) caps_found = True except: pass self.assertTrue(caps_found) self.check_layer_geoserver_rest(layer_name) self.check_upload_model(layer_name)
'Makes sure that we got the correct response from an layer that can\'t be uploaded'
def check_invalid_projection(self, layer_name, resp, data):
self.assertTrue(resp.code, 200) self.assertTrue(data['success']) self.assertEquals(upload_step('srs'), data['redirect_to']) (resp, soup) = self.client.get_html(data['redirect_to']) h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(layer_name))
'Tests if a vector layer can be upload to a running GeoNode GeoServer'
def test_shp_upload(self):
fname = os.path.join(GOOD_DATA, 'vector', 'san_andres_y_providencia_water.shp') self.upload_file(fname, self.complete_upload)
'Tests if a raster layer can be upload to a running GeoNode GeoServer'
def test_raster_upload(self):
fname = os.path.join(GOOD_DATA, 'raster', 'relief_san_andres.tif') self.upload_file(fname, self.complete_raster_upload)
'Test uploading a zipped shapefile'
def test_zipped_upload(self):
(fd, abspath) = self.temp_file('.zip') fp = os.fdopen(fd, 'wb') zf = ZipFile(fp, 'w') fpath = os.path.join(GOOD_DATA, 'vector', 'san_andres_y_providencia_poi.*') for f in glob.glob(fpath): zf.write(f, os.path.basename(f)) zf.close() self.upload_file(abspath, self.complete_upload, check_name='san_andres_y_providencia_poi')