desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Return kwargs for a HTTP request. :rtype: dict'
@property def request_kwargs(self):
kwargs = {'url': self.stats_url} if (self.config.get('scheme') == 'https'): kwargs['verify'] = self.config.get('verify_ssl_cert', False) if (('username' in self.config) and ('password' in self.config)): kwargs['auth'] = (self.config['username'], self.config['password']) LOGGER.debug('Request kwargs: %r', kwargs) return kwargs
'Fetch the data from the stats URL :rtype: dict'
def fetch_data(self):
data = super(CSVStatsPlugin, self).fetch_data() if (not data): return dict() temp = tempfile.TemporaryFile() temp.write(data) temp.seek(0) reader = csv.DictReader(temp) data = list() for row in reader: data.append(row) temp.close() return data
'Poll HTTP JSON endpoint for stats data'
def poll(self):
self.initialize() data = self.fetch_data() if data: self.add_datapoints(data) self.finish()
'Fetch the data from the stats URL :rtype: dict'
def fetch_data(self):
data = self.http_get() try: return (data.json() if data else {}) except Exception as error: LOGGER.error('JSON decoding error: %r', error) return {}
'Poll HTTP JSON endpoint for stats data'
def poll(self):
self.initialize() data = self.fetch_data() if data: self.add_datapoints(data) self.finish()
'Create a DSN to connect to :return str: The DSN to connect'
@property def dsn(self):
dsn = ("host='%(host)s' port=%(port)i dbname='pgbouncer' user='%(user)s'" % self.config) if self.config.get('password'): dsn += (" password='%s'" % self.config['password']) return dsn
'Return the summed data as a dict :rtype: dict'
def sum_data(self, stats):
data = {'Queue': {'Current': 0, 'Max': 0}, 'Sessions': {'Current': 0, 'Max': 0, 'Total': 0}, 'Bytes': {'In': 0, 'Out': 0}, 'Denied': {'Request': 0, 'Response': 0}, 'Errors': {'Request': 0, 'Response': 0, 'Connections': 0}, 'Warnings': {'Retry': 0, 'Redispatch': 0}, 'Server': {'Downtime': 0}} for row in stats: data['Queue']['Current'] += int((row.get('qcur') or 0)) data['Queue']['Max'] += int((row.get('qmax') or 0)) data['Sessions']['Current'] += int((row.get('scur') or 0)) data['Sessions']['Max'] += int((row.get('smax') or 0)) data['Sessions']['Total'] += int((row.get('stot') or 0)) data['Bytes']['In'] += int((row.get('bin') or 0)) data['Bytes']['Out'] += int((row.get('bout') or 0)) data['Denied']['Request'] += int((row.get('dreq') or 0)) data['Denied']['Response'] += int((row.get('dresp') or 0)) data['Errors']['Request'] += int((row.get('ereq') or 0)) data['Errors']['Response'] += int((row.get('eresp') or 0)) data['Errors']['Connections'] += int((row.get('econ') or 0)) data['Warnings']['Retry'] += int((row.get('wretr') or 0)) data['Warnings']['Redispatch'] += int((row.get('wredis') or 0)) data['Server']['Downtime'] += int((row.get('downtime') or 0)) return data
'Add all of the data points for a node :param list stats: The parsed csv content'
def add_datapoints(self, stats):
if (not stats): return stats = self.sum_data(stats) for section in [key for key in stats.keys() if (key != 'server')]: for key in stats[section].keys(): self.add_derive_value(('%s/%s' % (section, key)), self.UNIT.get(section, dict()).get(key, ''), stats[section][key]) self.add_gauge_value('Server/Downtime', 'ms', stats['Server']['Downtime'])
'Connect to PostgreSQL, returning the connection object. :rtype: psycopg2.connection'
def connect(self):
conn = psycopg2.connect(**self.connection_arguments) conn.set_isolation_level(extensions.ISOLATION_LEVEL_AUTOCOMMIT) return conn
'Create connection parameter dictionary for psycopg2.connect :return dict: The dictionary to be passed to psycopg2.connect via double-splat'
@property def connection_arguments(self):
filtered_args = ['name', 'superuser', 'relation_stats'] args = {} for key in (set(self.config) - set(filtered_args)): if (key == 'dbname'): args['database'] = self.config[key] else: args[key] = self.config[key] return args
'Return connection server version in PEP 369 format :returns: tuple'
@property def server_version(self):
return (((self.connection.server_version % 1000000) / 10000), ((self.connection.server_version % 10000) / 100), (self.connection.server_version % 100))
'Add all of the data points for a node :param str stats: The stub stats content'
def add_datapoints(self, stats):
if (not stats): return matches = PATTERN.match(stats) if matches: for key in self.KEYS.keys(): try: value = int((matches.group(key) or 0)) except (IndexError, ValueError): value = 0 if (key in self.GAUGES): self.add_gauge_value(self.KEYS[key], self.TYPES[key], value) else: self.add_derive_value(self.KEYS[key], self.TYPES[key], value) else: LOGGER.debug('Stats output: %r', stats)
'Add all of the data points for a node :param list node_data: all of the nodes :param list queue_data: all of the queues :param list channel_data: all of the channels'
def add_node_datapoints(self, node_data, queue_data, channel_data):
channels = 0 for node in node_data: name = node['name'].split('@')[(-1)] self.add_node_channel_datapoints(name, channel_data) self.add_node_message_datapoints(name, queue_data, channel_data) self.add_node_queue_datapoints(name, queue_data) count = 0 for channel in channel_data: if (channel['node'].split('@')[(-1)] == name): count += 1 channels += count base_name = ('Node/%s' % name) self.add_gauge_value(('%s/Channels/Open' % base_name), 'channels', count) self.add_gauge_value(('%s/Erlang Processes' % base_name), 'processes', node.get('proc_used', 0)) self.add_gauge_value(('%s/File Descriptors' % base_name), 'fds', node.get('fd_used', 0)) self.add_gauge_value(('%s/Memory' % base_name), 'bytes', node.get('mem_used', 0)) self.add_gauge_value(('%s/Sockets' % base_name), 'sockets', node.get('sockets_used', 0)) self.add_gauge_value('Summary/Channels', 'channels', channels) self.add_gauge_value('Summary/Consumers', 'consumers', self.consumers)
'Add datapoints for a node, creating summary values for top-level queue consumer counts and message counts. :param str node: The node name :param list channel_data: The full stack of queue metrics'
def add_node_channel_datapoints(self, node, channel_data):
channel_flow_blocked = 0 for channel in channel_data: if (channel['node'].split('@')[(-1)] == node): if channel.get('client_flow_blocked'): channel_flow_blocked += 1 self.add_gauge_value(('Node/%s/Channels/Blocked' % node), 'channels', channel_flow_blocked)
'Add message stats for the node :param str node: The node name :param list queue_data: all of the queues :param list channel_data: all of the channels'
def add_node_message_datapoints(self, node, queue_data, channel_data):
base_name = ('Node/%s/Messages' % node) keys = self.DUMMY_STATS.keys() (count, total, min_val, max_val, values) = self.initialize_counters(keys) for channel in channel_data: if (channel['node'].split('@')[(-1)] == node): for key in keys: total[key] += channel.get(key, 0) (count, total, min_val, max_val, values) = self.initialize_counters(keys) message_stats = list() for channel in channel_data: if (channel['node'].split('@')[(-1)] == node): stats = channel.get('message_stats') if stats: message_stats.append(stats) for stat_block in message_stats: for key in keys: total[key] += stat_block.get(key, 0) for key in keys: name = key if (key == 'ack'): name = 'Acknowledged' elif (key == 'deliver'): name = 'Delivered' elif (key == 'deliver_get'): name = 'Delivered (Total)' elif (key == 'deliver_no_ack'): name = 'Delivered No-Ack' elif (key == 'get'): name = 'Got' elif (key == 'get_no_ack'): name = 'Got No-Ack' elif (key == 'publish'): name = 'Published' elif (key == 'redeliver'): name = 'Redelivered' self.add_derive_value(('%s/%s' % (base_name, name)), 'messages', total[key]) keys = ['messages_ready', 'messages_unacknowledged'] (count, total, min_val, max_val, values) = self.initialize_counters(keys) for queue in queue_data: if (queue['node'].split('@')[(-1)] == node): for key in keys: total[key] += queue.get(key, 0) self.add_gauge_value(('%s Available' % base_name), 'messages', total['messages_ready']) self.add_gauge_value(('%s Unacknowledged' % base_name), 'messages', total['messages_unacknowledged'])
'Add datapoints for a node, creating summary values for top-level queue consumer counts and message counts. :param str node: The node name :param list queue_data: The full stack of queue metrics'
def add_node_queue_datapoints(self, node, queue_data):
keys = ['consumers', 'active_consumers', 'idle_consumers'] (count, total, min_val, max_val, values) = self.initialize_counters(keys) del keys[2] for queue in queue_data: if (queue['node'].split('@')[(-1)] == node): for key in keys: count[key] += 1 value = queue.get(key, 0) total[key] += value values[key].append(value) key = 'idle_consumers' count[key] += count['consumers'] idle_count = (total['consumers'] - total['active_consumers']) total[key] += idle_count values[key].append(idle_count) base_name = ('Node/%s/Consumers' % node) self.add_gauge_value(('%s/Count' % base_name), 'consumers', total['consumers'], None, None, count['consumers']) self.consumers += total['consumers'] self.add_gauge_value(('%s/Active' % base_name), 'consumers', total['active_consumers'], None, None, count['active_consumers']) base_name = ('Node/%s/Consumers' % node) self.add_gauge_value(('%s/Idle' % base_name), 'consumers', total['idle_consumers'], None, None, count['idle_consumers'])
'Checks whether the data for a vhost queue should be tracked or not The check is based on the user configs, no configs means track everything :param str vhost_name: the virtual host name :param str queue_name: the queue name'
def track_vhost_queue(self, vhost_name, queue_name):
TRACK_EVERYTHING = dict() tracked_vhosts = self.config.get('vhosts', TRACK_EVERYTHING) vhost_settings = (tracked_vhosts.get(vhost_name) or {}) vhost_queues = vhost_settings.get('queues', []) if (tracked_vhosts is TRACK_EVERYTHING): return True if ((vhost_name in tracked_vhosts) and (vhost_queues == [])): return True return (queue_name in vhost_queues)
'Add per-queue datapoints to the processing stack. :param list queue_data: The raw queue data list'
def add_queue_datapoints(self, queue_data):
count = 0 (available, consumers, deliver, publish, redeliver, unacked) = (0, 0, 0, 0, 0, 0) for (count, queue) in enumerate(queue_data): if (queue['name'][0:7] == 'amq.gen'): LOGGER.debug('Skipping auto-named queue: %s', queue['name']) continue message_stats = queue.get('message_stats', dict()) if (not message_stats): message_stats = self.DUMMY_STATS vhost = ('Default' if (queue['vhost'] == '/') else queue['vhost']) base_name = ('Queue/%s/%s' % (vhost, queue['name'])) if (not self.track_vhost_queue(vhost, queue['name'])): continue self.add_gauge_value(('%s/Consumers' % base_name), 'consumers', queue.get('consumers', 0)) base_name = ('Queue/%s/%s/Messages' % (vhost, queue['name'])) self.add_derive_value(('%s/Acknowledged' % base_name), 'messages', message_stats.get('ack', 0)) self.add_derive_value(('%s/Delivered (All)' % base_name), 'messages', message_stats.get('deliver_get', 0)) self.add_derive_value(('%s/Delivered' % base_name), 'messages', message_stats.get('deliver', 0)) self.add_derive_value(('%s/Delivered No-Ack' % base_name), 'messages', message_stats.get('deliver_no_ack', 0)) self.add_derive_value(('%s/Get' % base_name), 'messages', message_stats.get('get', 0)) self.add_derive_value(('%s/Get No-Ack' % base_name), 'messages', message_stats.get('get_no_ack', 0)) self.add_derive_value(('%s/Published' % base_name), 'messages', message_stats.get('publish', 0)) self.add_derive_value(('%s/Redelivered' % base_name), 'messages', message_stats.get('redeliver', 0)) self.add_gauge_value(('%s Available' % base_name), 'messages', queue.get('messages_ready', 0)) self.add_gauge_value(('%s Unacknowledged' % base_name), 'messages', queue.get('messages_unacknowledged', 0)) available += queue.get('messages_ready', 0) deliver += message_stats.get('deliver_get', 0) publish += message_stats.get('publish', 0) redeliver += message_stats.get('redeliver', 0) unacked += queue.get('messages_unacknowledged', 0) self.add_derive_value('Summary/Messages/Delivered', 'messages', deliver, count=count) self.add_derive_value('Summary/Messages/Published', 'messages', publish, count=count) self.add_derive_value('Summary/Messages/Redelivered', 'messages', redeliver, count=count) self.add_gauge_value('Summary/Messages Available', 'messages', available, count=count) self.add_gauge_value('Summary/Messages Unacknowledged', 'messages', unacked, count=count)
'Make a HTTP request for the URL. :param str url: The URL to request :param dict params: Get query string parameters'
def http_get(self, url, params=None):
kwargs = {'url': url, 'auth': (self.config.get('username', self.DEFAULT_USER), self.config.get('password', self.DEFAULT_PASSWORD)), 'verify': self.config.get('verify_ssl_cert', True)} if params: kwargs['params'] = params try: return self.requests_session.get(**kwargs) except requests.ConnectionError as error: LOGGER.error('Error fetching data from %s: %s', url, error) return None
'Fetch the data from the RabbitMQ server for the specified data type :param str data_type: The type of data to query :param list columns: Ask for specific columns :rtype: list'
def fetch_data(self, data_type, columns=None):
url = ('%s/%s' % (self.rabbitmq_base_url, data_type)) params = ({'columns': ','.join(columns)} if columns else {}) response = self.http_get(url, params) if ((not response) or (response.status_code != 200)): if response: LOGGER.error('Error response from %s (%s): %s', url, response.status_code, response.content) return list() try: return response.json() except Exception as error: LOGGER.error('JSON decoding error: %r', error) return list()
'Return the channel data from the RabbitMQ server :rtype: list'
def fetch_channel_data(self):
return self.fetch_data('channels')
'Return the node data from the RabbitMQ server :rtype: list'
def fetch_node_data(self):
return self.fetch_data('nodes')
'Return the queue data from the RabbitMQ server :rtype: list'
def fetch_queue_data(self):
return self.fetch_data('queues')
'Poll the RabbitMQ server'
def poll(self):
LOGGER.info('Polling RabbitMQ via %s', self.rabbitmq_base_url) start_time = time.time() self.requests_session = requests.Session() self.derive = dict() self.gauge = dict() self.rate = dict() self.consumers = 0 channel_data = self.fetch_channel_data() node_data = self.fetch_node_data() queue_data = self.fetch_queue_data() self.add_queue_datapoints(queue_data) self.add_node_datapoints(node_data, queue_data, channel_data) LOGGER.info('Polling complete in %.2f seconds', (time.time() - start_time))
'Return the fully composed RabbitMQ base URL :rtype: str'
@property def rabbitmq_base_url(self):
port = self.config.get('port', self.DEFAULT_PORT) secure = self.config.get('secure', False) host = self.config.get('host', self.DEFAULT_HOST) api_path = self.config.get('api_path', self.DEFAULT_API_PATH) scheme = ('https' if secure else 'http') return '{scheme}://{host}:{port}{api_path}'.format(scheme=scheme, host=host, port=port, api_path=api_path)
'Add all of the data points for a node :param dict stats: The stats content from APC as a string'
def add_datapoints(self, stats):
shared_memory = stats.get('shared_memory', dict()) self.add_gauge_value('Shared Memory/Available', 'bytes', shared_memory.get('avail_mem', 0)) self.add_gauge_value('Shared Memory/Segment Size', 'bytes', shared_memory.get('seg_size', 0)) self.add_gauge_value('Shared Memory/Segment Count', 'segments', shared_memory.get('nseg', shared_memory.get('num_seg', 0))) system_stats = stats.get('system_stats', dict()) self.add_gauge_value('System Cache/Slots', 'slots', system_stats.get('nslots', system_stats.get('num_slots', 0))) self.add_gauge_value('System Cache/Entries', 'files', system_stats.get('nentries', system_stats.get('num_entries', 0))) self.add_gauge_value('System Cache/Size', 'bytes', system_stats.get('mem_size', 0)) self.add_gauge_value('System Cache/Expunges', 'files', system_stats.get('nexpunges', system_stats.get('num_expunges', 0))) hits = system_stats.get('nhits', system_stats.get('num_hits', 0)) misses = system_stats.get('nmisses', system_stats.get('num_misses', 0)) total = (hits + misses) if (total > 0): effectiveness = (float((float(hits) / float(total))) * 100) else: effectiveness = 0 self.add_gauge_value('System Cache/Effectiveness', 'percent', effectiveness) self.add_derive_value('System Cache/Hits', 'files', hits) self.add_derive_value('System Cache/Misses', 'files', misses) self.add_derive_value('System Cache/Inserts', 'files', system_stats.get('ninserts', system_stats.get('num_inserts', 0))) user_stats = stats.get('user_stats', dict()) self.add_gauge_value('User Cache/Slots', 'slots', user_stats.get('nslots', user_stats.get('num_slots', 0))) self.add_gauge_value('User Cache/Entries', 'keys', user_stats.get('nentries', user_stats.get('num_entries', 0))) self.add_gauge_value('User Cache/Size', 'bytes', user_stats.get('mem_size', 0)) self.add_gauge_value('User Cache/Expunges', 'keys', user_stats.get('nexpunges', user_stats.get('num_expunges', 0))) hits = user_stats.get('nhits', user_stats.get('num_hits', 0)) misses = user_stats.get('nmisses', user_stats.get('num_misses', 0)) total = (hits + misses) if (total > 0): effectiveness = (float((float(hits) / float(total))) * 100) else: effectiveness = 0 self.add_gauge_value('User Cache/Effectiveness', 'percent', effectiveness) self.add_derive_value('User Cache/Hits', 'keys', hits) self.add_derive_value('User Cache/Misses', 'keys', misses) self.add_derive_value('User Cache/Inserts', 'keys', user_stats.get('ninserts', user_stats.get('num_inserts', 0)))
'Add all of the data points for a node :param dict stats: all of the nodes'
def add_datapoints(self, stats):
LOGGER.debug('Stats: %r', stats) self.add_database_stats(stats['couchdb']) self.add_request_methods(stats['httpd_request_methods']) self.add_request_stats(stats['couchdb'], stats['httpd']) self.add_response_code_stats(stats['httpd_status_codes'])
'Add all of the data points for a node :param dict stats: all of the nodes'
def add_datapoints(self, stats):
self.add_gauge_value('Delays/Convergence', 'us', stats.get('converge_delay_total', 0), min_val=stats.get('converge_delay_min', 0), max_val=stats.get('converge_delay_max', 0)) self.add_gauge_value('Delays/Rebalance', 'us', stats.get('rebalance_delay_total', 0), min_val=stats.get('rebalance_delay_min', 0), max_val=stats.get('rebalance_delay_max', 0)) self.add_gauge_value('FSM/Object Size/Mean', 'bytes', stats.get('node_get_fsm_objsize_mean', 0)) self.add_gauge_value('FSM/Object Size/Median', 'bytes', stats.get('node_get_fsm_objsize_median', 0)) self.add_gauge_value('FSM/Object Size/90th Percentile', 'bytes', stats.get('node_get_fsm_objsize_90', 0)) self.add_gauge_value('FSM/Object Size/95th Percentile', 'bytes', stats.get('node_get_fsm_objsize_95', 0)) self.add_gauge_value('FSM/Object Size/100th Percentile', 'bytes', stats.get('node_get_fsm_objsize_100', 0)) self.add_gauge_value('FSM/Siblings/Mean', 'siblings', stats.get('node_get_fsm_siblings_mean', 0)) self.add_gauge_value('FSM/Siblings/Mean', 'siblings', stats.get('node_get_fsm_siblings_media', 0)) self.add_gauge_value('FSM/Siblings/90th Percentile', 'siblings', stats.get('node_get_fsm_siblings_90', 0)) self.add_gauge_value('FSM/Siblings/95th Percentile', 'siblings', stats.get('node_get_fsm_siblings_95', 0)) self.add_gauge_value('FSM/Siblings/100th Percentile', 'siblings', stats.get('node_get_fsm_siblings_100', 0)) self.add_gauge_value('FSM/Time/Get/Mean', 'us', stats.get('node_get_fsm_time_mean', 0)) self.add_gauge_value('FSM/Time/Get/Median', 'us', stats.get('node_get_fsm_time_media', 0)) self.add_gauge_value('FSM/Time/Get/90th Percentile', 'us', stats.get('node_get_fsm_time_90', 0)) self.add_gauge_value('FSM/Time/Get/95th Percentile', 'us', stats.get('node_get_fsm_time_95', 0)) self.add_gauge_value('FSM/Time/Get/100th Percentile', 'us', stats.get('node_get_fsm_time_100', 0)) self.add_gauge_value('FSM/Time/Put/Mean', 'us', stats.get('node_put_fsm_time_mean', 0)) self.add_gauge_value('FSM/Time/Put/Median', 'us', stats.get('node_put_fsm_time_media', 0)) self.add_gauge_value('FSM/Time/Put/90th Percentile', 'us', stats.get('node_put_fsm_time_90', 0)) self.add_gauge_value('FSM/Time/Put/95th Percentile', 'us', stats.get('node_put_fsm_time_95', 0)) self.add_gauge_value('FSM/Time/Put/100th Percentile', 'us', stats.get('node_put_fsm_time_100', 0)) self.add_derive_value('Failures/Pre-commit', 'failures', stats.get('precommit_fail', 0)) self.add_derive_value('Failures/Post-commit', 'failures', stats.get('postcommit_fail', 0)) self.add_derive_value('Gossip/Ignored', 'gossip', stats.get('ignored_gossip_total', 0)) self.add_derive_value('Gossip/Received', 'gossip', stats.get('gossip_received', 0)) self.add_derive_value('Handoff Timeouts', '', stats.get('handoff_timeouts', 0)) self.add_gauge_value('Mappers/Executing', 'timeouts', stats.get('executing_mappers', 0)) self.add_gauge_value('Memory/Allocated', 'bytes', stats.get('mem_allocated', 0)) self.add_gauge_value('Memory/Total', 'bytes', stats.get('mem_total', 0)) self.add_gauge_value('Memory/Erlang/Atom/Allocated', 'bytes', stats.get('memory_atom', 0)) self.add_gauge_value('Memory/Erlang/Atom/Used', 'bytes', stats.get('memory_atom_used', 0)) self.add_gauge_value('Memory/Erlang/Binary', 'bytes', stats.get('memory_binary', 0)) self.add_gauge_value('Memory/Erlang/Code', 'bytes', stats.get('memory_code', 0)) self.add_gauge_value('Memory/Erlang/ETS', 'bytes', stats.get('memory_ets', 0)) self.add_gauge_value('Memory/Erlang/Processes/Allocated', 'bytes', stats.get('memory_processes', 0)) self.add_gauge_value('Memory/Erlang/Processes/Used', 'bytes', stats.get('memory_processes_used', 0)) self.add_gauge_value('Memory/Erlang/System', 'bytes', stats.get('memory_system', 0)) self.add_gauge_value('Memory/Erlang/Total', 'bytes', stats.get('memory_total', 0)) self.add_gauge_value('Nodes/Connected', 'nodes', len(stats.get('connected_nodes', list()))) self.add_gauge_value('Pipeline/Active', 'pipelines', stats.get('pipeline_active', 0)) self.add_derive_value('Pipeline/Created', 'pipelines', stats.get('pipeline_create_count', 0)) self.add_derive_value('Pipeline/Creation Errors', 'pipelines', stats.get('pipeline_create_error_count', 0)) self.add_gauge_value('Processes/OS', 'processes', stats.get('cpu_nprocs', 0)) self.add_gauge_value('Processes/Erlang', 'processes', stats.get('cpu_nprocs', 0)) self.add_gauge_value('Protocol Buffer Connections', 'active', stats.get('pbc_active', 0)) self.add_derive_value('Protocol Buffer Connections', 'total', stats.get('pbc_connects_total', 0)) self.add_derive_value('Read Repairs', 'reads', stats.get('read_repairs_total', 0)) self.add_derive_value('Requests/Gets', 'requests', stats.get('node_gets_total', 0)) self.add_derive_value('Requests/Puts', 'requests', stats.get('node_puts_total', 0)) self.add_derive_value('Requests/Redirected', 'requests', stats.get('coord_redirs_total', 0)) self.add_gauge_value('Ring/Members', 'members', len(stats.get('ring_members', list()))) self.add_gauge_value('Ring/Partitions', 'partitions', stats.get('ring_num_partitions', 0)) self.add_gauge_value('Ring/Size', 'members', stats.get('ring_creation_size', 0)) self.add_derive_value('Ring/Reconciled', 'members', stats.get('rings_reconciled_total', 0)) self.add_derive_value('VNodes/Gets', 'vnodes', stats.get('vnode_gets_total', 0)) self.add_derive_value('VNodes/Puts', 'vnodes', stats.get('vnode_puts_total', 0)) self.add_derive_value('VNodes/Index', 'deletes', stats.get('vnode_index_deletes_total', 0)) self.add_derive_value('VNodes/Index', 'delete-postings', stats.get('vnode_index_deletes_postings_total', 0)) self.add_derive_value('VNodes/Index', 'reads', stats.get('vnode_index_reads_total', 0)) self.add_derive_value('VNodes/Index', 'writes', stats.get('vnode_index_writes_total', 0)) self.add_derive_value('VNodes/Index', 'postings', stats.get('vnode_writes_postings_total', 0))
'Add all of the data points for a node :param dict stats: all of the nodes'
def add_datapoints(self, stats):
self.command_value('CAS', 'cas', stats) self.add_derive_value('Command/Requests/Flush', 'flush', stats['cmd_flush']) self.add_derive_value('Command/Errors/CAS', 'errors', stats['cas_badval']) self.command_value('Decr', 'decr', stats) self.command_value('Delete', 'delete', stats) self.command_value('Get', 'get', stats) self.command_value('Incr', 'incr', stats) self.add_derive_value('Command/Requests/Set', 'requests', stats['cmd_set']) self.add_gauge_value('Connection/Count', 'connections', stats['curr_connections']) self.add_gauge_value('Connection/Structures', 'connection structures', stats['connection_structures']) self.add_derive_value('Connection/Yields', 'yields', stats['conn_yields']) self.add_derive_value('Evictions', 'items', stats['evictions']) self.add_gauge_value('Items', 'items', stats['curr_items']) self.add_derive_value('Network/In', 'bytes', stats['bytes_read']) self.add_derive_value('Network/Out', 'bytes', stats['bytes_written']) self.add_derive_value('System/CPU/System', 'seconds', stats['rusage_user']) self.add_derive_value('System/CPU/User', 'seconds', stats['rusage_user']) self.add_gauge_value('System/Memory', 'bytes', stats['bytes'])
'Process commands adding the command and the hit ratio. :param str name: The command name :param str prefix: The command prefix :param dict stats: The request stats'
def command_value(self, name, prefix, stats):
total = (stats[('%s_hits' % prefix)] + stats[('%s_misses' % prefix)]) if (total > 0): ratio = ((float(stats[('%s_hits' % prefix)]) / float(total)) * 100) else: ratio = 0 self.add_derive_value(('Command/Requests/%s' % name), 'requests', total) self.add_gauge_value(('Command/Hit Ratio/%s' % name), 'ratio', ratio)
'Loop in and read in all the data until we have received it all. :param socket connection: The connection'
def fetch_data(self, connection):
connection.send('stats\n') data = super(Memcached, self).fetch_data(connection) data_in = [] for line in data.replace('\r', '').split('\n'): if (line == 'END'): return self.process_data(data_in) data_in.append(line.strip()) return None
'Loop through all the rows and parse each line, looking to see if it is in the data points we would like to process, adding the key => value pair to values if it is. :param list data: The list of rows :returns: dict'
def process_data(self, data):
values = dict() for row in data: parts = row.split(' ') if (parts[1] in self.KEYS): try: values[parts[1]] = int(parts[2]) except ValueError: try: values[parts[1]] = float(parts[2]) except ValueError: LOGGER.warning('Could not parse line: %r', parts) values[parts[1]] = 0 for key in self.KEYS: if (key not in values): LOGGER.info('Populating missing element with 0: %s', key) values[key] = 0 return values
'Add all of the data points for a database :param str name: The name of the database for the stats :param dict stats: The stats data to add'
def add_datapoints(self, name, stats):
base_key = ('Database/%s' % name) self.add_gauge_value(('%s/Extents' % base_key), 'extents', stats.get('extents', 0)) self.add_gauge_value(('%s/Size' % base_key), 'bytes', (stats.get('dataSize', 0) / 1048576)) self.add_gauge_value(('%s/File Size' % base_key), 'bytes', (stats.get('fileSize', 0) / 1048576)) self.add_gauge_value(('%s/Objects' % base_key), 'objects', stats.get('objects', 0)) self.add_gauge_value(('%s/Collections' % base_key), 'collections', stats.get('collections', 0)) self.add_gauge_value(('%s/Index/Count' % base_key), 'indexes', stats.get('indexes', 0)) self.add_gauge_value(('%s/Index/Size' % base_key), 'bytes', stats.get('indexSize', 0))
'Add all of the data points for a server :param dict stats: The stats data to add'
def add_server_datapoints(self, stats):
asserts = stats.get('asserts', dict()) self.add_derive_value('Asserts/Regular', 'asserts', asserts.get('regular', 0)) self.add_derive_value('Asserts/Warning', 'asserts', asserts.get('warning', 0)) self.add_derive_value('Asserts/Message', 'asserts', asserts.get('msg', 0)) self.add_derive_value('Asserts/User', 'asserts', asserts.get('user', 0)) self.add_derive_value('Asserts/Rollovers', 'asserts', asserts.get('rollovers', 0)) flush = stats.get('backgroundFlushing', dict()) self.add_derive_timing_value('Background Flushes', 'ms', flush.get('flushes', 0), flush.get('total_ms', 0), flush.get('last_ms', 0)) self.add_gauge_value('Seconds since last flush', 'seconds', (datetime.datetime.now() - flush.get('last_finished', datetime.datetime.now())).seconds) conn = stats.get('connections', dict()) self.add_gauge_value('Connections/Available', 'connections', conn.get('available', 0)) self.add_gauge_value('Connections/Current', 'connections', conn.get('current', 0)) cursors = stats.get('cursors', dict()) self.add_gauge_value('Cursors/Open', 'cursors', cursors.get('totalOpen', 0)) self.add_derive_value('Cursors/Timed Out', 'cursors', cursors.get('timedOut', 0)) dur = stats.get('dur', dict()) self.add_gauge_value('Durability/Commits in Write Lock', 'commits', dur.get('commitsInWriteLock', 0)) self.add_gauge_value('Durability/Early Commits', 'commits', dur.get('earlyCommits', 0)) self.add_gauge_value('Durability/Journal Commits', 'commits', dur.get('commits', 0)) self.add_gauge_value('Durability/Journal Bytes Written', 'bytes', (dur.get('journaledMB', 0) / 1048576)) self.add_gauge_value('Durability/Data File Bytes Written', 'bytes', (dur.get('writeToDataFilesMB', 0) / 1048576)) timems = dur.get('timeMs', dict()) self.add_gauge_value('Durability/Timings/Duration Measured', 'ms', timems.get('dt', 0)) self.add_gauge_value('Durability/Timings/Log Buffer Preparation', 'ms', timems.get('prepLogBuffer', 0)) self.add_gauge_value('Durability/Timings/Write to Journal', 'ms', timems.get('writeToJournal', 0)) self.add_gauge_value('Durability/Timings/Write to Data Files', 'ms', timems.get('writeToDataFiles', 0)) self.add_gauge_value('Durability/Timings/Remaping Private View', 'ms', timems.get('remapPrivateView', 0)) locks = stats.get('globalLock', dict()) self.add_derive_value('Global Locks/Held', 'ms', (locks.get('lockTime', 0) / 1000)) self.add_derive_value('Global Locks/Ratio', 'ratio', locks.get('ratio', 0)) active = locks.get('activeClients', dict()) self.add_derive_value('Global Locks/Active Clients/Total', 'clients', active.get('total', 0)) self.add_derive_value('Global Locks/Active Clients/Readers', 'clients', active.get('readers', 0)) self.add_derive_value('Global Locks/Active Clients/Writers', 'clients', active.get('writers', 0)) queue = locks.get('currentQueue', dict()) self.add_derive_value('Global Locks/Queue/Total', 'locks', queue.get('total', 0)) self.add_derive_value('Global Locks/Queue/Readers', 'readers', queue.get('readers', 0)) self.add_derive_value('Global Locks/Queue/Writers', 'writers', queue.get('writers', 0)) index = stats.get('indexCounters', dict()) btree_index = index.get('btree', dict()) self.add_derive_value('Index/Accesses', 'accesses', (index.get('accesses', 0) + btree_index.get('accesses', 0))) self.add_derive_value('Index/Hits', 'hits', (index.get('hits', 0) + btree_index.get('hits', 0))) self.add_derive_value('Index/Misses', 'misses', (index.get('misses', 0) + btree_index.get('misses', 0))) self.add_derive_value('Index/Resets', 'resets', (index.get('resets', 0) + btree_index.get('resets', 0))) mem = stats.get('mem', dict()) self.add_gauge_value('Memory/Mapped', 'bytes', (mem.get('mapped', 0) / 1048576)) self.add_gauge_value('Memory/Mapped with Journal', 'bytes', (mem.get('mappedWithJournal', 0) / 1048576)) self.add_gauge_value('Memory/Resident', 'bytes', (mem.get('resident', 0) / 1048576)) self.add_gauge_value('Memory/Virtual', 'bytes', (mem.get('virtual', 0) / 1048576)) net = stats.get('network', dict()) self.add_derive_value('Network/Requests', 'requests', net.get('numRequests', 0)) self.add_derive_value('Network/Transfer/In', 'bytes', net.get('bytesIn', 0)) self.add_derive_value('Network/Transfer/Out', 'bytes', net.get('bytesOut', 0)) ops = stats.get('opcounters', dict()) self.add_derive_value('Operations/Insert', 'ops', ops.get('insert', 0)) self.add_derive_value('Operations/Query', 'ops', ops.get('query', 0)) self.add_derive_value('Operations/Update', 'ops', ops.get('update', 0)) self.add_derive_value('Operations/Delete', 'ops', ops.get('delete', 0)) self.add_derive_value('Operations/Get More', 'ops', ops.get('getmore', 0)) self.add_derive_value('Operations/Command', 'ops', ops.get('command', 0)) extra = stats.get('extra_info', dict()) self.add_gauge_value('System/Heap Usage', 'bytes', extra.get('heap_usage_bytes', 0)) self.add_derive_value('System/Page Faults', 'faults', extra.get('page_faults', 0))
'Fetch the data from the MongoDB server and add the datapoints'
def get_and_add_db_stats(self):
databases = self.config.get('databases', list()) if isinstance(databases, list): self.get_and_add_db_list(databases) else: self.get_and_add_db_dict(databases)
'Handle the list of databases while supporting authentication for the admin if needed :param list databases: The database list'
def get_and_add_db_list(self, databases):
LOGGER.debug('Processing list of mongo databases') client = self.connect() if (not client): return for database in databases: LOGGER.debug('Collecting stats for %s', database) db = client[database] try: self.add_datapoints(database, db.command('dbStats')) except errors.OperationFailure as error: LOGGER.critical('Could not fetch stats: %s', error)
'Handle the nested database structure with username and password. :param dict databases: The databases data structure'
def get_and_add_db_dict(self, databases):
LOGGER.debug('Processing dict of mongo databases') client = self.connect() if (not client): return db_names = databases.keys() for database in db_names: db = client[database] try: if ('username' in databases[database]): db.authenticate(databases[database]['username'], databases[database].get('password')) self.add_datapoints(database, db.command('dbStats')) if ('username' in databases[database]): db.logout() except errors.OperationFailure as error: LOGGER.critical('Could not fetch stats: %s', error)
'Add all of the data points for a node :param dict stats: all of the nodes'
def add_datapoints(self, stats):
self.add_gauge_value('Listen Queue Size', 'connections', stats.get('listen_queue', 0)) self.add_gauge_value('Listen Queue Errors', 'errors', stats.get('listen_queue_errors', 0)) for lock in stats.get('locks', list()): lock_name = lock.keys()[0] self.add_gauge_value(('Locks/%s' % lock_name), 'locks', lock[lock_name]) exceptions = 0 harakiris = 0 requests = 0 respawns = 0 signals = 0 apps = dict() for worker in stats.get('workers', list()): id = worker['id'] exceptions += worker.get('exceptions', 0) harakiris += worker.get('harakiri_count', 0) requests += worker.get('requests', 0) respawns += worker.get('respawn_count', 0) signals += worker.get('signals', 0) self.add_derive_value(('Worker/%s/Exceptions' % id), 'exceptions', worker.get('exceptions', 0)) self.add_derive_value(('Worker/%s/Harakiri' % id), 'harakiris', worker.get('harakiri_count', 0)) self.add_derive_value(('Worker/%s/Requests' % id), 'requests', worker.get('requests', 0)) self.add_derive_value(('Worker/%s/Respawns' % id), 'respawns', worker.get('respawn_count', 0)) self.add_derive_value(('Worker/%s/Signals' % id), 'signals', worker.get('signals', 0)) for app in worker['apps']: if (app['id'] not in apps): apps[app['id']] = {'exceptions': 0, 'requests': 0} apps[app['id']]['exceptions'] += app['exceptions'] apps[app['id']]['requests'] += app['requests'] for app in apps: self.add_derive_value(('Application/%s/Exceptions' % app), 'exceptions', apps[app].get('exceptions', 0)) self.add_derive_value(('Application/%s/Requests' % app), 'requests', apps[app].get('requests', 0)) self.add_derive_value('Summary/Applications', 'applications', len(apps)) self.add_derive_value('Summary/Exceptions', 'exceptions', exceptions) self.add_derive_value('Summary/Harakiris', 'harakiris', harakiris) self.add_derive_value('Summary/Requests', 'requests', requests) self.add_derive_value('Summary/Respawns', 'respawns', respawns) self.add_derive_value('Summary/Signals', 'signals', signals) self.add_derive_value('Summary/Workers', 'workers', len(stats.get('workers', ())))
'Read the data from the socket :param socket connection: The connection :return: dict'
def fetch_data(self, connection):
data = super(uWSGI, self).fetch_data(connection, read_till_empty=True) if data: data = re.sub('"HTTP_COOKIE=[^"]*"', '""', data) return json.loads(data) return {}
'Add all of the datapoints for the Elasticsearch poll :param dict stats: The stats to process for the values'
def add_datapoints(self, stats):
totals = dict() for node in stats.get('nodes'): for key in stats['nodes'][node].keys(): if isinstance(stats['nodes'][node][key], dict): if (key not in totals): totals[key] = dict() self.process_tree(totals[key], stats['nodes'][node][key]) self.add_index_datapoints(totals) self.add_network_datapoints(totals) self.add_cluster_stats()
'Add stats that go under Component/Cluster'
def add_cluster_stats(self):
url = self.stats_url.replace(self.DEFAULT_PATH, '/_cluster/health') response = self.http_get(url) if (response.status_code == 200): data = response.json() self.add_gauge_value('Cluster/Status', 'level', self.STATUS_CODE[data.get('status', 'red')]) self.add_gauge_value('Cluster/Nodes', 'nodes', data.get('number_of_nodes', 0)) self.add_gauge_value('Cluster/Data Nodes', 'nodes', data.get('number_of_data_nodes', 0)) self.add_gauge_value('Cluster/Shards/Active', 'shards', data.get('active_shards', 0)) self.add_gauge_value('Cluster/Shards/Initializing', 'shards', data.get('initializing_shards', 0)) self.add_gauge_value('Cluster/Shards/Primary', 'shards', data.get('active_primary_shards', 0)) self.add_gauge_value('Cluster/Shards/Relocating', 'shards', data.get('relocating_shards', 0)) self.add_gauge_value('Cluster/Shards/Unassigned', 'shards', data.get('unassigned_shards', 0)) else: LOGGER.error('Error collecting cluster stats (%s): %s', response.status_code, response.content)
'Add the data points for Component/Indices :param dict stats: The stats to process for the values'
def add_index_datapoints(self, stats):
indices = stats.get('indices', dict()) docs = indices.get('docs', dict()) self.add_gauge_value('Indices/Documents/Count', 'docs', docs.get('count', 0)) self.add_derive_value('Indices/Documents/Added', 'docs', docs.get('count', 0)) self.add_derive_value('Indices/Documents/Deleted', 'docs', docs.get('deleted', 0)) store = indices.get('store', dict()) self.add_gauge_value('Indices/Storage', 'bytes', store.get('size_in_bytes', 0)) self.add_derive_value('Indices/Storage Throttled', 'ms', store.get('throttle_time_in_millis', 0)) indexing = indices.get('indexing', dict()) self.add_derive_value('Indices/Indexing', 'ms', indexing.get('index_time_in_millis', 0)) self.add_derive_value('Indices/Indexing', 'count', indexing.get('index_total', 0)) self.add_derive_value('Indices/Index Deletes', 'ms', indexing.get('delete_time_in_millis', 0)) self.add_derive_value('Indices/Index Deletes', 'count', indexing.get('delete_total', 0)) get_stats = indices.get('get', dict()) self.add_derive_value('Indices/Get', 'count', get_stats.get('total', 0)) self.add_derive_value('Indices/Get', 'ms', get_stats.get('time_in_millis', 0)) self.add_derive_value('Indices/Get Hits', 'count', get_stats.get('exists_total', 0)) self.add_derive_value('Indices/Get Hits', 'ms', get_stats.get('exists_time_in_millis', 0)) self.add_derive_value('Indices/Get Misses', 'count', get_stats.get('missing_total', 0)) self.add_derive_value('Indices/Get Misses', 'ms', get_stats.get('missing_time_in_millis', 0)) search = indices.get('search', dict()) self.add_gauge_value('Indices/Open Search Contexts', 'count', search.get('open_contexts', 0)) self.add_derive_value('Indices/Search Query', 'count', search.get('query_total', 0)) self.add_derive_value('Indices/Search Query', 'ms', search.get('query_time_in_millis', 0)) self.add_derive_value('Indices/Search Fetch', 'count', search.get('fetch_total', 0)) self.add_derive_value('Indices/Search Fetch', 'ms', search.get('fetch_time_in_millis', 0)) merge_stats = indices.get('merge', dict()) self.add_derive_value('Indices/Merge', 'count', merge_stats.get('total', 0)) self.add_derive_value('Indices/Merge', 'ms', merge_stats.get('total_time_in_millis', 0)) flush_stats = indices.get('flush', dict()) self.add_gauge_value('Indices/Flush', 'count', flush_stats.get('total', 0)) self.add_derive_value('Indices/Flush', 'ms', flush_stats.get('total_time_in_millis', 0))
'Add the data points for Component/Network :param dict stats: The stats to process for the values'
def add_network_datapoints(self, stats):
transport = stats.get('transport', dict()) self.add_derive_value('Network/Traffic/Received', 'bytes', transport.get('rx_size_in_bytes', 0)) self.add_derive_value('Network/Traffic/Sent', 'bytes', transport.get('tx_size_in_bytes', 0)) network = stats.get('network', dict()) self.add_derive_value('Network/Connections/Active', 'conn', network.get('active_opens', 0)) self.add_derive_value('Network/Connections/Passive', 'conn', network.get('passive_opens', 0)) self.add_derive_value('Network/Connections/Reset', 'conn', network.get('estab_resets', 0)) self.add_derive_value('Network/Connections/Failures', 'conn', network.get('attempt_fails', 0)) self.add_derive_value('Network/HTTP Connections', 'conn', stats.get('http', dict()).get('total_opened', 0)) self.add_derive_value('Network/Segments/In', 'seg', network.get('in_seg', 0)) self.add_derive_value('Network/Segments/In', 'errors', network.get('in_errs', 0)) self.add_derive_value('Network/Segments/Out', 'seg', network.get('out_seg', 0)) self.add_derive_value('Network/Segments/Retransmitted', 'seg', network.get('retrans_segs', 0))
'Recursively combine all node stats into a single top-level value :param dict tree: The output values :param dict values: The input values'
def process_tree(self, tree, values):
for key in values: if (key == 'timestamp'): continue if isinstance(values[key], dict): if (key not in tree): tree[key] = dict() self.process_tree(tree[key], values[key]) elif isinstance(values[key], int): if (key not in tree): tree[key] = 0 tree[key] += values[key]
'Add all of the data points for a fpm-pool :param dict stats: Stats from php-fpm for a pool'
def add_datapoints(self, stats):
self.add_derive_value('Connections/Accepted', 'connections', stats.get('accepted conn', 0)) self.add_gauge_value('Connections/Pending', 'connections', stats.get('listen queue', 0), max_val=stats.get('max listen queue', 0)) self.add_gauge_value('Socket Queue', 'connections', stats.get('listen queue len', 0)) self.add_gauge_value('Processes/Active', 'processes', stats.get('active processes', 0), max_val=stats.get('max processes', 0)) self.add_gauge_value('Processes/Idle', 'processes', stats.get('idle processes', 0)) self.add_derive_value('Process Limit Reached', 'processes', stats.get('max children reached', 0)) self.add_derive_value('Slow Requests', 'requests', stats.get('slow requests', 0))
'Fetch the scoreboard from the stats URL :rtype: str'
def get_scoreboard(self, data):
keys = ['_', 'S', 'R', 'W', 'K', 'D', 'C', 'L', 'G', 'I', '.'] values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] score_out = dict(zip(keys, values)) for line in data.splitlines(): if (line.find('Scoreboard') != (-1)): scoreboard = line.replace('Scoreboard: ', '') for i in range(0, len(scoreboard)): score_out[scoreboard[i]] += 1 return score_out
'Add all of the data points for a node :param str stats: The stats content from Apache as a string'
def add_datapoints(self, stats):
matches = PATTERN.findall((stats or '')) for (key, value) in matches: try: value = int(value) except ValueError: try: value = float(value) except ValueError: value = 0 if (key in self.KEYS): if (self.KEYS[key].get('type') == 'gauge'): self.add_gauge_value(self.KEYS[key]['label'], self.KEYS[key].get('suffix', ''), value) else: self.add_derive_value(self.KEYS[key]['label'], self.KEYS[key].get('suffix', ''), value) else: LOGGER.debug('Found unmapped key/value pair: %s = %s', key, value) score_data = self.get_scoreboard(stats) for (key, value) in score_data.iteritems(): if (key in self.KEYS): if (self.KEYS[key].get('type') == 'gauge'): self.add_gauge_value(self.KEYS[key]['label'], self.KEYS[key].get('suffix', ''), value) else: self.add_derive_value(self.KEYS[key]['label'], self.KEYS[key].get('suffix', ''), value) else: LOGGER.debug('Found unmapped key/value pair: %s = %s', key, value)
'ConfigDict should behaves like a normal dict.'
def test_isadict(self):
(d, m) = (dict(), ConfigDict()) (d['key'], m['key']) = ('value', 'value') (d['k2'], m['k2']) = ('v1', 'v1') (d['k2'], m['k2']) = ('v2', 'v2') self.assertEqual(d.keys(), m.keys()) self.assertEqual(list(d.values()), list(m.values())) self.assertEqual(d.get('key'), m.get('key')) self.assertEqual(d.get('cay'), m.get('cay')) self.assertEqual(list(iter(d)), list(iter(m))) self.assertEqual([k for k in d], [k for k in m]) self.assertEqual(len(d), len(m)) self.assertEqual(('key' in d), ('key' in m)) self.assertEqual(('cay' in d), ('cay' in m)) self.assertRaises(KeyError, (lambda : m['cay'])) self.assertEquals(d.setdefault('key', 'Val2'), m.setdefault('key', 'Val2')) self.assertEquals(d.setdefault('key', 'Val3'), m.setdefault('key', 'Val3')) self.assertEqual(d.get('key'), m.get('key')) with self.assertRaises(KeyError): del m['No key']
'DateParser: RFC 1123 format'
def test_rfc1123(self):
ts = time.time() rs = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(ts)) self.assertEqual(int(ts), int(parse_date(rs)))
'DateParser: RFC 850 format'
def test_rfc850(self):
ts = time.time() rs = time.strftime('%A, %d-%b-%y %H:%M:%S GMT', time.gmtime(ts)) self.assertEqual(int(ts), int(parse_date(rs)))
'DateParser: asctime format'
def test_asctime(self):
ts = time.time() rs = time.strftime('%a %b %d %H:%M:%S %Y', time.gmtime(ts)) self.assertEqual(int(ts), int(parse_date(rs)))
'DateParser: Bad format'
def test_bad(self):
self.assertEqual(None, parse_date('Bad 123'))
'SendFile: Valid requests'
def test_valid(self):
out = static_file(basename, root=root) self.assertEqual(open(__file__, 'rb').read(), out.body.read())
'SendFile: Invalid requests'
def test_invalid(self):
self.assertEqual(404, static_file('not/a/file', root=root).status_code) f = static_file(os.path.join('./../', basename), root='./views/') self.assertEqual(403, f.status_code) try: (fp, fn) = tempfile.mkstemp() os.chmod(fn, 0) self.assertEqual(403, static_file(fn, root='/').status_code) finally: os.close(fp) os.unlink(fn)
'SendFile: Mime Guessing'
def test_mime(self):
f = static_file(basename, root=root) self.assertTrue((f.headers['Content-Type'].split(';')[0] in ('application/x-python-code', 'text/x-python'))) f = static_file(basename, root=root, mimetype='some/type') self.assertEqual('some/type', f.headers['Content-Type']) f = static_file(basename, root=root, mimetype='text/foo') self.assertEqual('text/foo; charset=UTF-8', f.headers['Content-Type']) f = static_file(basename, root=root, mimetype='text/foo', charset='latin1') self.assertEqual('text/foo; charset=latin1', f.headers['Content-Type'])
'SendFile: If-Modified-Since'
def test_ims(self):
request.environ['HTTP_IF_MODIFIED_SINCE'] = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime()) res = static_file(basename, root=root) self.assertEqual(304, res.status_code) self.assertEqual(int(os.stat(__file__).st_mtime), parse_date(res.headers['Last-Modified'])) self.assertAlmostEqual(int(time.time()), parse_date(res.headers['Date'])) request.environ['HTTP_IF_MODIFIED_SINCE'] = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(100)) self.assertEqual(open(__file__, 'rb').read(), static_file(basename, root=root).body.read())
'SendFile: If-Modified-Since'
def test_etag(self):
res = static_file(basename, root=root) self.assertTrue(('ETag' in res.headers)) self.assertEqual(200, res.status_code) etag = res.headers['ETag'] request.environ['HTTP_IF_NONE_MATCH'] = etag res = static_file(basename, root=root) self.assertTrue(('ETag' in res.headers)) self.assertEqual(etag, res.headers['ETag']) self.assertEqual(304, res.status_code) request.environ['HTTP_IF_NONE_MATCH'] = etag res = static_file(basename2, root=root2) self.assertTrue(('ETag' in res.headers)) self.assertNotEqual(etag, res.headers['ETag']) self.assertEqual(200, res.status_code)
'SendFile: Download as attachment'
def test_download(self):
f = static_file(basename, root=root, download='foo.mp3') self.assertEqual('audio/mpeg', f.headers['Content-Type']) f = static_file(basename, root=root, download=True) self.assertEqual(('attachment; filename="%s"' % basename), f.headers['Content-Disposition']) request.environ['HTTP_IF_MODIFIED_SINCE'] = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(100)) f = static_file(basename, root=root) self.assertEqual(open(__file__, 'rb').read(), f.body.read())
'Templates: Parse string'
def test_string(self):
self.assertRenders('start {{var}} end', 'start var end', var='var')
'Templates: utf8 code in file'
def test_unicode_code(self):
with chdir(__file__): t = SimpleTemplate(name='./views/stpl_unicode.tpl', lookup=['.']) self.assertRenders(t, 'start \xc3\xb1\xc3\xa7 \xc3\xa4\xc3\xb6\xc3\xbc end\n', var=touni('\xc3\xa4\xc3\xb6\xc3\xbc'))
'Templates: import statement'
def test_import(self):
t = '%from base64 import b64encode\nstart {{b64encode(var.encode("ascii") if hasattr(var, "encode") else var)}} end' self.assertRenders(t, 'start dmFy end', var='var')
'Templates: Data representation'
def test_data(self):
t = SimpleTemplate('<{{var}}>') self.assertRenders('<{{var}}>', '<True>', var=True) self.assertRenders('<{{var}}>', '<False>', var=False) self.assertRenders('<{{var}}>', '<>', var=None) self.assertRenders('<{{var}}>', '<0>', var=0) self.assertRenders('<{{var}}>', '<5>', var=5) self.assertRenders('<{{var}}>', '<b>', var=tob('b')) self.assertRenders('<{{var}}>', '<1.0>', var=1.0) self.assertRenders('<{{var}}>', '<[1, 2]>', var=[1, 2])
'Templates: Code blocks and loops'
def test_blocks(self):
t = 'start\n%for i in l:\n{{i}} \n%end\nend' self.assertRenders(t, 'start\n1 \n2 \n3 \nend', l=[1, 2, 3]) self.assertRenders(t, 'start\nend', l=[]) t = 'start\n%if i:\n{{i}} \n%end\nend' self.assertRenders(t, 'start\nTrue \nend', i=True) self.assertRenders(t, 'start\nend', i=False)
'Whirespace between block keyword and colon is allowed'
def test_elsebug(self):
self.assertRenders('%if 1:\nyes\n%else:\nno\n%end\n', 'yes\n') self.assertRenders('%if 1:\nyes\n%else :\nno\n%end\n', 'yes\n')
'A "#" sign within an string is not a comment'
def test_commentbug(self):
self.assertRenders("%if '#':\nyes\n%end\n", 'yes\n')
'Block statements with non-terminating newlines'
def test_multiline(self):
self.assertRenders('%if 1\\\n%and 1:\nyes\n%end\n', 'yes\n')
'Block statements with non-terminating newlines in list'
def test_newline_in_parameterlist(self):
self.assertRenders('%a=[1,\n%2]\n{{len(a)}}', '2')
'One-Line dednet blocks should not change indention'
def test_dedentbug(self):
t = '%if x: a="if"\n%else: a="else"\n%end\n{{a}}' self.assertRenders(t, 'if', x=True) self.assertRenders(t, 'else', x=False) t = '%if x:\n%a="if"\n%else: a="else"\n%end\n{{a}}' self.assertRenders(t, 'if', x=True) self.assertRenders(t, 'else', x=False) t = SimpleTemplate('%if x: a="if"\n%else: a="else"\n%end') self.assertRaises(NameError, t.render)
'One-Line blocks should not change indention'
def test_onelinebugs(self):
t = '%if x:\n%a=1\n%end\n{{a}}' self.assertRenders(t, '1', x=True) t = '%if x: a=1; end\n{{a}}' self.assertRenders(t, '1', x=True) t = '%if x:\n%a=1\n%else:\n%a=2\n%end\n{{a}}' self.assertRenders(t, '1', x=True) self.assertRenders(t, '2', x=False) t = '%if x: a=1\n%else:\n%a=2\n%end\n{{a}}' self.assertRenders(t, '1', x=True) self.assertRenders(t, '2', x=False) t = '%if x:\n%a=1\n%else: a=2; end\n{{a}}' self.assertRenders(t, '1', x=True) self.assertRenders(t, '2', x=False) t = '%if x: a=1\n%else: a=2; end\n{{a}}' self.assertRenders(t, '1', x=True) self.assertRenders(t, '2', x=False)
'Templates: one line code blocks'
def test_onelineblocks(self):
t = "start\n%a=''\n%for i in l: a += str(i); end\n{{a}}\nend" self.assertRenders(t, 'start\n123\nend', l=[1, 2, 3]) self.assertRenders(t, 'start\n\nend', l=[])
'Templates: Nobreak statements'
def test_nobreak(self):
self.assertRenders('start\\\\\n%pass\nend', 'startend')
'Templates: Escaped nobreak statements'
def test_nonobreak(self):
self.assertRenders('start\\\\\n\\\\\n%pass\nend', 'start\\\\\nend')
'Templates: Include statements'
def test_include(self):
with chdir(__file__): t = SimpleTemplate(name='stpl_include', lookup=['./views/']) self.assertRenders(t, 'before\nstart var end\nafter\n', var='var')
'Templates: %rebase and method passing'
def test_rebase(self):
with chdir(__file__): t = SimpleTemplate(name='stpl_t2main', lookup=['./views/']) result = '+base+\n+main+\n!1234!\n+include+\n-main-\n+include+\n-base-\n' self.assertRenders(t, result, content='1234')
'Templates: Unavailable templates'
def test_notfound(self):
self.assertRaises(TemplateError, SimpleTemplate, name='abcdef', lookup=['.'])
'Templates: Exceptions'
def test_error(self):
self.assertRaises(SyntaxError, (lambda : SimpleTemplate('%for badsyntax').co)) self.assertRaises(IndexError, SimpleTemplate('{{i[5]}}', lookup=['.']).render, i=[0])
'Templates: Test windows line breaks'
def test_winbreaks(self):
self.assertRenders('%var+=1\r\n{{var}}\r\n', '6\r\n', var=5)
'Templates: Commentd should behave like code-lines (e.g. flush text-lines)'
def test_commentonly(self):
t = SimpleTemplate('...\n%#test\n...') self.assertNotEqual('#test', t.code.splitlines()[0])
'#595: Everything before an \'if\' statement is removed, resulting in SyntaxError.'
def test_bug_block_keywords_eat_prefixed_code(self):
tpl = "% m = 'x' if True else 'y'\n{{m}}" self.assertRenders(tpl, 'x')
'FomsDict.attribute returs string values as unicode.'
def test_attr_access(self):
d = FormsDict(py2=tob('\xe7\x93\xb6'), py3=tob('\xe7\x93\xb6').decode('latin1')) self.assertEqual(touni('\xe7\x93\xb6'), d.py2) self.assertEqual(touni('\xe7\x93\xb6'), d.py3)
'FomsDict.attribute returs u\'\' on missing keys.'
def test_attr_missing(self):
d = FormsDict() self.assertEqual(touni(''), d.missing)
'FomsDict.attribute returs u\'\' on UnicodeError.'
def test_attr_unicode_error(self):
d = FormsDict(latin=touni('\xc3\xb6\xc3\xa4\xc3\xbc\xc3\x9f').encode('latin1')) self.assertEqual(touni(''), d.latin) d.input_encoding = 'latin1' self.assertEqual(touni('\xc3\xb6\xc3\xa4\xc3\xbc\xc3\x9f'), d.latin)
'Test a simple static page with this server adapter.'
def test_simple(self):
if self.skip: return self.assertEqual(tob('OK'), self.fetch('test'))
'MultiDict should behaves like a normal dict'
def test_isadict(self):
(d, m) = (dict(a=5), MultiDict(a=5)) (d['key'], m['key']) = ('value', 'value') (d['k2'], m['k2']) = ('v1', 'v1') (d['k2'], m['k2']) = ('v2', 'v2') self.assertEqual(list(d.keys()), list(m.keys())) self.assertEqual(list(d.values()), list(m.values())) self.assertEqual(list(d.keys()), list(m.iterkeys())) self.assertEqual(list(d.values()), list(m.itervalues())) self.assertEqual(d.get('key'), m.get('key')) self.assertEqual(d.get('cay'), m.get('cay')) self.assertEqual(list(iter(d)), list(iter(m))) self.assertEqual([k for k in d], [k for k in m]) self.assertEqual(len(d), len(m)) self.assertEqual(('key' in d), ('key' in m)) self.assertEqual(('cay' in d), ('cay' in m)) self.assertRaises(KeyError, (lambda : m['cay']))
'MultiDict has some special features'
def test_ismulti(self):
m = MultiDict(a=5) m['a'] = 6 self.assertEqual([5, 6], m.getall('a')) self.assertEqual([], m.getall('b')) self.assertEqual([('a', 5), ('a', 6)], list(m.iterallitems()))
'HeaderDict replaces by default and title()s its keys'
def test_isheader(self):
m = HeaderDict(abc_def=5) m['abc_def'] = 6 self.assertEqual(['6'], m.getall('abc_def')) m.append('abc_def', 7) self.assertEqual(['6', '7'], m.getall('abc_def')) self.assertEqual([('Abc-Def', '6'), ('Abc-Def', '7')], list(m.iterallitems()))
'Assure HeaderDict.get() to be case insensitive'
def test_headergetbug(self):
d = HeaderDict() d['UPPER'] = 'UPPER' d['lower'] = 'lower' self.assertEqual(d.get('upper'), 'UPPER') self.assertEqual(d.get('LOWER'), 'lower')
'Verify that 500 errors serializing dictionaries don\'t return content-type application/json'
def test_json_serialization_error(self):
self.app.route('/')((lambda : {'a': set()})) try: self.assertStatus(500) self.assertHeader('Content-Type', 'text/html; charset=UTF-8') except ImportError: warn('Skipping JSON tests.')
'WSGI: Cookies'
def test_cookie(self):
@bottle.route('/cookie') def test(): bottle.response.set_cookie('b', 'b') bottle.response.set_cookie('c', 'c', path='/') return 'hello' try: c = self.urlopen('/cookie')['header'].get_all('Set-Cookie', '') except: c = self.urlopen('/cookie')['header'].get('Set-Cookie', '').split(',') c = [x.strip() for x in c] self.assertTrue(('b=b' in c)) self.assertTrue(('c=c; Path=/' in c))
'WSGI: GET routes'
def test_get(self):
@bottle.route('/') def test(): return 'test' self.assertStatus(404, '/not/found') self.assertStatus(405, '/', post='var=value') self.assertBody('test', '/')
'WSGI: POST routes'
def test_post(self):
@bottle.route('/', method='POST') def test(): return 'test' self.assertStatus(404, '/not/found') self.assertStatus(405, '/') self.assertBody('test', '/', post='var=value')
'WSGI: HEAD routes and GET fallback'
def test_headget(self):
@bottle.route('/get') def test(): return 'test' @bottle.route('/head', method='HEAD') def test2(): return 'test' self.assertStatus(405, '/head') self.assertStatus(200, '/head', method='HEAD') self.assertBody('', '/head', method='HEAD') self.assertStatus(200, '/get', method='HEAD') self.assertBody('', '/get', method='HEAD')
'WSGI: POST routes'
def test_request_attrs(self):
@bottle.route('/') def test(): self.assertEqual(bottle.request.app, bottle.default_app()) self.assertEqual(bottle.request.route, bottle.default_app().routes[0]) return 'foo' self.assertBody('foo', '/')
'204 responses must not return some entity headers'
def get204(self):
bad = ('content-length', 'content-type') for h in bad: bottle.response.set_header(h, 'foo') bottle.status = 204 for (h, v) in bottle.response.headerlist: self.assertFalse((h.lower() in bad), ('Header %s not deleted' % h))
'304 responses must not return entity headers'
def get304(self):
bad = ('allow', 'content-encoding', 'content-language', 'content-length', 'content-md5', 'content-range', 'content-type', 'last-modified') for h in bad: bottle.response.set_header(h, 'foo') bottle.status = 304 for (h, v) in bottle.response.headerlist: self.assertFalse((h.lower() in bad), ('Header %s not deleted' % h))
'WSGI: Exceptions within handler code (HTTP 500)'
def test_500(self):
@bottle.route('/') def test(): return (1 / 0) self.assertStatus(500, '/')
'WSGI: UTF-8 Characters in the URL'
def test_utf8_url(self):
@bottle.route('/my-\xc3\xb6\xc3\xa4\xc3\xbc/:string') def test(string): return string self.assertBody(tob('urf8-\xc3\xb6\xc3\xa4\xc3\xbc'), '/my-\xc3\xb6\xc3\xa4\xc3\xbc/urf8-\xc3\xb6\xc3\xa4\xc3\xbc')
'WSGI: abort(401, \'\') (HTTP 401)'
def test_401(self):
@bottle.route('/') def test(): bottle.abort(401) self.assertStatus(401, '/') @bottle.error(401) def err(e): bottle.response.status = 200 return str(type(e)) self.assertStatus(200, '/') self.assertBody("<class 'bottle.HTTPError'>", '/')
'WSGI: redirect (HTTP 303)'
def test_303(self):
@bottle.route('/') def test(): bottle.redirect('/yes') @bottle.route('/one') def test2(): bottle.redirect('/yes', 305) env = {'SERVER_PROTOCOL': 'HTTP/1.1'} self.assertStatus(303, '/', env=env) self.assertHeader('Location', 'http://127.0.0.1/yes', '/', env=env) env = {'SERVER_PROTOCOL': 'HTTP/1.0'} self.assertStatus(302, '/', env=env) self.assertHeader('Location', 'http://127.0.0.1/yes', '/', env=env) self.assertStatus(305, '/one', env=env) self.assertHeader('Location', 'http://127.0.0.1/yes', '/one', env=env)
'WSGI: Cookies'
def test_cookie(self):
@bottle.route('/cookie') def test(): bottle.response.set_cookie('b', 'b') bottle.response.set_cookie('c', 'c', path='/') return 'hello' try: c = self.urlopen('/cookie')['header'].get_all('Set-Cookie', '') except: c = self.urlopen('/cookie')['header'].get('Set-Cookie', '').split(',') c = [x.strip() for x in c] self.assertTrue(('b=b' in c)) self.assertTrue(('c=c; Path=/' in c))