Code
stringlengths
103
85.9k
Summary
sequencelengths
0
94
Please provide a description of the function:def set_log_console(log_level=logging.INFO): # Change the logger and all its handlers log level logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME) logger_.setLevel(log_level) # Adding a console logger... csh = ColorStreamHandler(sys.stdout) csh.setFormatter(Formatter('[%(asctime)s] %(levelname)s: [%(name)s] %(message)s', "%Y-%m-%d %H:%M:%S")) logger_.addHandler(csh)
[ "Set the Alignak daemons logger have a console log handler.\n\n This is only used for the arbiter verify mode to add a console log handler.\n\n :param log_level: log level\n :return: n/a\n " ]
Please provide a description of the function:def set_log_level(log_level=logging.INFO, handlers=None): # print("Setting log level: %s" % (log_level)) # Change the logger and all its handlers log level logger_ = logging.getLogger(ALIGNAK_LOGGER_NAME) logger_.setLevel(log_level) if handlers is not None: for handler in logger_.handlers: if getattr(handler, '_name', None) in handlers: handler.setLevel(log_level)
[ "Set the Alignak logger log level. This is mainly used for the arbiter verify code to\n set the log level at INFO level whatever the configured log level is set.\n\n This is also used when changing the daemon log level thanks to the WS interface\n\n If an handlers name list is provided, all the handlers which name is in this list are\n concerned else only the `daemons` handler log level is changed.\n\n :param handlers: list of concerned handlers\n :type: list\n :param log_level: log level\n :return: n/a\n " ]
Please provide a description of the function:def make_monitoring_log(level, message, timestamp=None, to_logger=False): level = level.lower() if level not in ['debug', 'info', 'warning', 'error', 'critical']: return False if to_logger: logging.getLogger(ALIGNAK_LOGGER_NAME).debug("Monitoring log: %s / %s", level, message) # Emit to our monitoring log logger message = message.replace('\r', '\\r') message = message.replace('\n', '\\n') logger_ = logging.getLogger(MONITORING_LOGGER_NAME) logging_function = getattr(logger_, level) try: message = message.decode('utf8', 'ignore') except UnicodeEncodeError: pass except AttributeError: # Python 3 raises an exception! pass if timestamp: st = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') logging_function(message, extra={'my_date': st}) else: logging_function(message) return True # ... and returns a brok return Brok({'type': 'monitoring_log', 'data': {'level': level, 'message': message}})
[ "\n Function used to build the monitoring log.\n\n Emit a log message with the provided level to the monitoring log logger.\n Build a Brok typed as monitoring_log with the provided message\n\n When to_logger is True, the information is sent to the python logger, else a monitoring_log\n Brok is returned. The Brok is managed by the daemons to build an Event that will br logged\n by the Arbiter when it collects all the events.\n\n TODO: replace with dedicated brok for each event to log - really useful?\n\n :param level: log level as defined in logging\n :type level: str\n :param message: message to send to the monitoring log logger\n :type message: str\n :param to_logger: when set, send to the logger, else raise a brok\n :type to_logger: bool\n :param timestamp: if set, force the log event timestamp\n :return: a monitoring_log Brok\n :rtype: alignak.brok.Brok\n " ]
Please provide a description of the function:def want_service_notification(self, notifways, timeperiods, timestamp, state, n_type, business_impact, cmd=None): if not self.service_notifications_enabled: return False # If we are in downtime, we do not want notification for downtime_id in self.downtimes: downtime = self.downtimes[downtime_id] if downtime.is_in_effect: self.in_scheduled_downtime = True return False self.in_scheduled_downtime = False # Now the rest is for sub notificationways. If one is OK, we are ok # We will filter in another phase for notifway_id in self.notificationways: notifway = notifways[notifway_id] nw_b = notifway.want_service_notification(timeperiods, timestamp, state, n_type, business_impact, cmd) if nw_b: return True # Oh... no one is ok for it? so no, sorry return False
[ "Check if notification options match the state of the service\n\n :param timestamp: time we want to notify the contact (usually now)\n :type timestamp: int\n :param state: host or service state (\"WARNING\", \"CRITICAL\" ..)\n :type state: str\n :param n_type: type of notification (\"PROBLEM\", \"RECOVERY\" ..)\n :type n_type: str\n :param business_impact: impact of this service\n :type business_impact: int\n :param cmd: command launched to notify the contact\n :type cmd: str\n :return: True if contact wants notification, otherwise False\n :rtype: bool\n " ]
Please provide a description of the function:def want_host_notification(self, notifways, timeperiods, timestamp, state, n_type, business_impact, cmd=None): if not self.host_notifications_enabled: return False # If we are in downtime, we do not want notification for downtime in self.downtimes: if downtime.is_in_effect: self.in_scheduled_downtime = True return False self.in_scheduled_downtime = False # Now it's all for sub notificationways. If one is OK, we are OK # We will filter in another phase for notifway_id in self.notificationways: notifway = notifways[notifway_id] nw_b = notifway.want_host_notification(timeperiods, timestamp, state, n_type, business_impact, cmd) if nw_b: return True # Oh, nobody..so NO :) return False
[ "Check if notification options match the state of the host\n\n :param timestamp: time we want to notify the contact (usually now)\n :type timestamp: int\n :param state: host or service state (\"UP\", \"DOWN\" ..)\n :type state: str\n :param n_type: type of notification (\"PROBLEM\", \"RECOVERY\" ..)\n :type n_type: str\n :param business_impact: impact of this host\n :type business_impact: int\n :param cmd: command launch to notify the contact\n :type cmd: str\n :return: True if contact wants notification, otherwise False\n :rtype: bool\n " ]
Please provide a description of the function:def get_notification_commands(self, notifways, n_type, command_name=False): res = [] for notifway_id in self.notificationways: notifway = notifways[notifway_id] res.extend(notifway.get_notification_commands(n_type)) # Update inner notification commands property with command name or command if command_name: setattr(self, n_type + '_notification_commands', [c.get_name() for c in res]) else: setattr(self, n_type + '_notification_commands', res) return res
[ "Get notification commands for object type\n\n :param notifways: list of alignak.objects.NotificationWay objects\n :type notifways: NotificationWays\n :param n_type: object type (host or service)\n :type n_type: string\n :param command_name: True to update the inner property with the name of the command,\n False to update with the Command objects list\n :type command_name: bool\n :return: command list\n :rtype: list[alignak.objects.command.Command]\n " ]
Please provide a description of the function:def is_correct(self): state = True cls = self.__class__ # Internal checks before executing inherited function... # There is a case where there is no nw: when there is not special_prop defined # at all!! if not self.notificationways: for prop in self.special_properties: if not hasattr(self, prop): msg = "[contact::%s] %s property is missing" % (self.get_name(), prop) self.add_error(msg) state = False if not hasattr(self, 'contact_name'): if hasattr(self, 'alias'): # Use the alias if we miss the contact_name self.contact_name = self.alias for char in cls.illegal_object_name_chars: if char not in self.contact_name: continue msg = "[contact::%s] %s character not allowed in contact_name" \ % (self.get_name(), char) self.add_error(msg) state = False return super(Contact, self).is_correct() and state
[ "Check if this object configuration is correct ::\n\n * Check our own specific properties\n * Call our parent class is_correct checker\n\n :return: True if the configuration is correct, otherwise False\n :rtype: bool\n " ]
Please provide a description of the function:def raise_enter_downtime_log_entry(self): brok = make_monitoring_log( 'info', "CONTACT DOWNTIME ALERT: %s;STARTED; " "Contact has entered a period of scheduled downtime" % self.get_name() ) self.broks.append(brok)
[ "Raise CONTACT DOWNTIME ALERT entry (info level)\n Format is : \"CONTACT DOWNTIME ALERT: *get_name()*;STARTED;\n Contact has entered a period of scheduled downtime\"\n Example : \"CONTACT DOWNTIME ALERT: test_contact;STARTED;\n Contact has entered a period of scheduled downtime\"\n\n :return: None\n " ]
Please provide a description of the function:def raise_exit_downtime_log_entry(self): brok = make_monitoring_log( 'info', "CONTACT DOWNTIME ALERT: %s;STOPPED; " "Contact has exited from a period of scheduled downtime" % self.get_name() ) self.broks.append(brok)
[ "Raise CONTACT DOWNTIME ALERT entry (info level)\n Format is : \"CONTACT DOWNTIME ALERT: *get_name()*;STOPPED;\n Contact has entered a period of scheduled downtime\"\n Example : \"CONTACT DOWNTIME ALERT: test_contact;STOPPED;\n Contact has entered a period of scheduled downtime\"\n\n :return: None\n " ]
Please provide a description of the function:def raise_cancel_downtime_log_entry(self): brok = make_monitoring_log( 'info', "CONTACT DOWNTIME ALERT: %s;CANCELLED; " "Scheduled downtime for contact has been cancelled." % self.get_name() ) self.broks.append(brok)
[ "Raise CONTACT DOWNTIME ALERT entry (info level)\n Format is : \"CONTACT DOWNTIME ALERT: *get_name()*;CANCELLED;\n Contact has entered a period of scheduled downtime\"\n Example : \"CONTACT DOWNTIME ALERT: test_contact;CANCELLED;\n Contact has entered a period of scheduled downtime\"\n\n :return: None\n " ]
Please provide a description of the function:def linkify(self, commands, notificationways): self.linkify_with_notificationways(notificationways) self.linkify_command_list_with_commands(commands, 'service_notification_commands') self.linkify_command_list_with_commands(commands, 'host_notification_commands')
[ "Create link between objects::\n\n * contacts -> notificationways\n\n :param notificationways: notificationways to link\n :type notificationways: alignak.objects.notificationway.Notificationways\n :return: None\n TODO: Clean this function\n " ]
Please provide a description of the function:def linkify_with_notificationways(self, notificationways): for i in self: if not hasattr(i, 'notificationways'): continue new_notificationways = [] for nw_name in strip_and_uniq(i.notificationways): notifway = notificationways.find_by_name(nw_name) if notifway is not None: new_notificationways.append(notifway.uuid) else: err = "The 'notificationways' of the %s '%s' named '%s' is unknown!" %\ (i.__class__.my_type, i.get_name(), nw_name) i.add_error(err) # Get the list, but first make elements unique i.notificationways = list(set(new_notificationways)) # Update the contact host/service notification commands properties i.get_notification_commands(notificationways, 'host', command_name=True) i.get_notification_commands(notificationways, 'service', command_name=True)
[ "Link hosts with realms\n\n :param notificationways: notificationways object to link with\n :type notificationways: alignak.objects.notificationway.Notificationways\n :return: None\n " ]
Please provide a description of the function:def explode(self, contactgroups, notificationways): # Contactgroups property need to be fulfill for got the information self.apply_partial_inheritance('contactgroups') # _special properties maybe came from a template, so # import them before grok ourselves for prop in Contact.special_properties: if prop == 'contact_name': continue self.apply_partial_inheritance(prop) # Register ourselves into the contactsgroups we are in for contact in self: if not (hasattr(contact, 'contact_name') and hasattr(contact, 'contactgroups')): continue for contactgroup in contact.contactgroups: contactgroups.add_member(contact.contact_name, contactgroup.strip()) # Now create a notification way with the simple parameter of the # contacts for contact in self: need_notificationway = False params = {} for param in Contact.simple_way_parameters: if hasattr(contact, param): need_notificationway = True params[param] = getattr(contact, param) elif contact.properties[param].has_default: # put a default text value # Remove the value and put a default value setattr(contact, param, contact.properties[param].default) if need_notificationway: cname = getattr(contact, 'contact_name', getattr(contact, 'alias', '')) nw_name = cname + '_inner_nw' notificationways.new_inner_member(nw_name, params) if not hasattr(contact, 'notificationways'): contact.notificationways = [nw_name] else: contact.notificationways = list(contact.notificationways) contact.notificationways.append(nw_name)
[ "Explode all contact for each contactsgroup\n\n :param contactgroups: contactgroups to explode\n :type contactgroups: alignak.objects.contactgroup.Contactgroups\n :param notificationways: notificationways to explode\n :type notificationways: alignak.objects.notificationway.Notificationways\n :return: None\n " ]
Please provide a description of the function:def zlib_processor(entity): # pragma: no cover, not used in the testing environment... if not entity.headers.get(ntou("Content-Length"), ntou("")): raise cherrypy.HTTPError(411) body = entity.fp.read() try: body = zlib.decompress(body) except zlib.error: raise cherrypy.HTTPError(400, 'Invalid zlib data') try: raw_params = json.loads(body) except ValueError: raise cherrypy.HTTPError(400, 'Invalid JSON document in zlib data') try: params = {} for key, value in list(raw_params.items()): params[key] = unserialize(value.encode("utf8")) except TypeError: raise cherrypy.HTTPError(400, 'Invalid serialized data in JSON document') except AlignakClassLookupException as exp: cherrypy.HTTPError(400, 'Cannot un-serialize data received: %s' % exp) # Now that all values have been successfully parsed and decoded, # apply them to the entity.params dict. for key, value in list(params.items()): if key in entity.params: if not isinstance(entity.params[key], list): entity.params[key] = [entity.params[key]] entity.params[key].append(value) else: entity.params[key] = value
[ "Read application/zlib data and put content into entity.params for later use.\n\n :param entity: cherrypy entity\n :type entity: cherrypy._cpreqbody.Entity\n :return: None\n " ]
Please provide a description of the function:def get_instance(mod_conf): logger.info("Giving an instance of %s for alias: %s", mod_conf.python_name, mod_conf.module_alias) return InnerRetention(mod_conf)
[ "\n Return a module instance for the modules manager\n\n :param mod_conf: the module properties as defined globally in this file\n :return:\n " ]
Please provide a description of the function:def hook_load_retention(self, scheduler): # pylint: disable=too-many-locals, too-many-branches if not self.enabled: logger.warning("Alignak retention module is not enabled." "Loading objects state is not possible.") return None if self.retention_file and not os.path.isfile(self.retention_file): logger.info("The configured state retention file (%s) does not exist. " "Loading objects state is not available.", self.retention_file) return None if self.retention_dir and not os.path.isdir(self.retention_dir): logger.info("The configured state retention directory (%s) does not exist. " "Loading objects state is not available.", self.retention_dir) return None all_data = {'hosts': {}, 'services': {}} retention_files = [] if self.retention_file: retention_files = [self.retention_file] else: if self.retention_dir: for root, _, walk_files in os.walk(self.retention_dir, followlinks=True): for found_file in walk_files: if not re.search(r"\.json$", found_file): continue retention_files.append(os.path.join(root, found_file)) logger.debug("Loading retention files: %s ", retention_files) if retention_files: logger.info("Loading retention data from %d files", len(retention_files)) start_time = time.time() for retention_file in retention_files: # Get data from the retention files try: logger.debug('Loading data from: %s', retention_file) with open(retention_file, "r") as fd: response = json.load(fd) if not isinstance(response, list): response = [response] # Is looks like a list of host dictionaries ? if isinstance(response[0], dict) and 'name' in response[0]: logger.debug('Loaded: %s', response) else: logger.info("Supposed retention file %s is not correctly encoded! Got: %s", retention_file, response[0]) continue except Exception as exp: # pylint: disable=broad-except # pragma: no cover, should never happen... logger.warning("Error when loading retention data from %s", retention_file) logger.exception(exp) else: for host in response: hostname = host['name'] service_key = 'services' if 'retention_services' in host: service_key = 'retention_services' if service_key in host: for service in host[service_key]: all_data['services'][(host['name'], service)] = \ host[service_key][service] all_data['hosts'][hostname] = host logger.debug('- loaded: %s', host) try: logger.info('%d hosts loaded from retention', len(all_data['hosts'])) self.statsmgr.counter('retention-load.hosts', len(all_data['hosts'])) logger.info('%d services loaded from retention', len(all_data['services'])) self.statsmgr.counter('retention-load.services', len(all_data['services'])) self.statsmgr.timer('retention-load.time', time.time() - start_time) # Restore the scheduler objects scheduler.restore_retention_data(all_data) logger.info("Retention data loaded in %s seconds", (time.time() - start_time)) except Exception as exp: # pylint: disable=broad-except logger.warning("Retention load failed: %s", exp) logger.exception(exp) return False return True
[ "Load retention data from a file\n\n :param scheduler: scheduler instance of alignak\n :type scheduler: object\n :return: None\n " ]
Please provide a description of the function:def hook_save_retention(self, scheduler): if not self.enabled: logger.warning("Alignak retention module is not enabled." "Saving objects state is not possible.") return None try: start_time = time.time() # Get retention data from the scheduler data_to_save = scheduler.get_retention_data() if not data_to_save: logger.warning("Alignak retention data to save are not containing any information.") return None # Move services data to their respective hosts dictionary # Alignak scheduler do not merge the services into the host dictionary! for host_name in data_to_save['hosts']: data_to_save['hosts'][host_name]['services'] = {} data_to_save['hosts'][host_name]['name'] = host_name for host_name, service_description in data_to_save['services']: data_to_save['hosts'][host_name]['services'][service_description] = \ data_to_save['services'][(host_name, service_description)] try: if not self.retention_file: logger.info('Saving retention data to: %s', self.retention_dir) for host_name in data_to_save['hosts']: file_name = os.path.join(self.retention_dir, self.retention_file, "%s.json" % host_name) with open(file_name, "w") as fd: fd.write(json.dumps(data_to_save['hosts'][host_name], indent=2, separators=(',', ': '), sort_keys=True)) logger.debug('- saved: %s', file_name) logger.info('Saved') else: logger.info('Saving retention data to: %s', self.retention_file) with open(self.retention_file, "w") as fd: fd.write(json.dumps(data_to_save['hosts'], indent=2, separators=(',', ': '), sort_keys=True)) logger.info('Saved') except Exception as exp: # pylint: disable=broad-except # pragma: no cover, should never happen... logger.warning("Error when saving retention data to %s", self.retention_file) logger.exception(exp) logger.info('%d hosts saved in retention', len(data_to_save['hosts'])) self.statsmgr.counter('retention-save.hosts', len(data_to_save['hosts'])) logger.info('%d services saved in retention', len(data_to_save['services'])) self.statsmgr.counter('retention-save.services', len(data_to_save['services'])) self.statsmgr.timer('retention-save.time', time.time() - start_time) logger.info("Retention data saved in %s seconds", (time.time() - start_time)) except Exception as exp: # pylint: disable=broad-except self.enabled = False logger.warning("Retention save failed: %s", exp) logger.exception(exp) return False return True
[ "Save retention data to a Json formated file\n\n :param scheduler: scheduler instance of alignak\n :type scheduler: object\n :return: None\n " ]
Please provide a description of the function:def get_check_command(self, timeperiods, t_to_go): if not self.check_period or timeperiods[self.check_period].is_time_valid(t_to_go): return self.check_command return None
[ "Get the check_command if we are in the check period modulation\n\n :param t_to_go: time to check if we are in the timeperiod\n :type t_to_go:\n :return: A check command if we are in the check period, None otherwise\n :rtype: alignak.objects.command.Command\n " ]
Please provide a description of the function:def is_correct(self): state = True # Internal checks before executing inherited function... if not hasattr(self, 'check_command'): msg = "[checkmodulation::%s] do not have any check_command defined" % ( self.get_name() ) self.add_error(msg) state = False else: if self.check_command is None: msg = "[checkmodulation::%s] a check_command is missing" % (self.get_name()) self.add_error(msg) state = False if self.check_command and not self.check_command.is_valid(): msg = "[checkmodulation::%s] a check_command is invalid" % (self.get_name()) self.add_error(msg) state = False # Ok just put None as check_period, means 24x7 if not hasattr(self, 'check_period'): self.check_period = None return super(CheckModulation, self).is_correct() and state
[ "Check if this object configuration is correct ::\n\n * Check our own specific properties\n * Call our parent class is_correct checker\n\n :return: True if the configuration is correct, otherwise False\n :rtype: bool\n " ]
Please provide a description of the function:def linkify(self, timeperiods, commands): self.linkify_with_timeperiods(timeperiods, 'check_period') self.linkify_one_command_with_commands(commands, 'check_command')
[ "Replace check_period by real Timeperiod object into each CheckModulation\n Replace check_command by real Command object into each CheckModulation\n\n :param timeperiods: timeperiods to link to\n :type timeperiods: alignak.objects.timeperiod.Timeperiods\n :param commands: commands to link to\n :type commands: alignak.objects.command.Commands\n :return: None\n " ]
Please provide a description of the function:def new_inner_member(self, name=None, params=None): if name is None: name = 'Generated_checkmodulation_%s' % uuid.uuid4() if params is None: params = {} params['checkmodulation_name'] = name checkmodulation = CheckModulation(params) self.add_item(checkmodulation)
[ "Create a CheckModulation object and add it to items\n\n :param name: CheckModulation name\n :type name: str\n :param params: parameters to init CheckModulation\n :type params: dict\n :return: None\n TODO: Remove this default mutable argument. Usually result in unexpected behavior\n " ]
Please provide a description of the function:def open(self): if not self._is_connected: print("Connecting to arduino on {}... ".format(self.device),end="") self.comm = serial.Serial() self.comm.port = self.device self.comm.baudrate = self.baud_rate self.comm.timeout = self.timeout self.dtr = self.enable_dtr self.comm.open() time.sleep(self.settle_time) self._is_connected = True print("done.")
[ "\n Open the serial connection.\n " ]
Please provide a description of the function:def close(self): if self._is_connected: self.comm.close() self._is_connected = False
[ "\n Close serial connection.\n " ]
Please provide a description of the function:def send(self,cmd,*args,arg_formats=None): # Turn the command into an integer. try: command_as_int = self._cmd_name_to_int[cmd] except KeyError: err = "Command '{}' not recognized.\n".format(cmd) raise ValueError(err) # Figure out what formats to use for each argument. arg_format_list = [] if arg_formats != None: # The user specified formats arg_format_list = list(arg_formats) else: try: # See if class was initialized with a format for arguments to this # command arg_format_list = self._cmd_name_to_format[cmd] except KeyError: # if not, guess for all arguments arg_format_list = ["g" for i in range(len(args))] # Deal with "*" format arg_format_list = self._treat_star_format(arg_format_list,args) if len(args) > 0: if len(arg_format_list) != len(args): err = "Number of argument formats must match the number of arguments." raise ValueError(err) # Go through each argument and create a bytes representation in the # proper format to send. Escape appropriate characters. fields = ["{}".format(command_as_int).encode("ascii")] for i, a in enumerate(args): fields.append(self._send_methods[arg_format_list[i]](a)) fields[-1] = self._escape_re.sub(self._byte_escape_sep + r"\1".encode("ascii"),fields[-1]) # Make something that looks like cmd,field1,field2,field3; compiled_bytes = self._byte_field_sep.join(fields) + self._byte_command_sep # Send the message. self.board.write(compiled_bytes)
[ "\n Send a command (which may or may not have associated arguments) to an \n arduino using the CmdMessage protocol. The command and any parameters\n should be passed as direct arguments to send. \n\n arg_formats is an optional string that specifies the formats to use for\n each argument when passed to the arduino. If specified here,\n arg_formats supercedes formats specified on initialization. \n " ]
Please provide a description of the function:def receive(self,arg_formats=None): # Read serial input until a command separator or empty character is # reached msg = [[]] raw_msg = [] escaped = False command_sep_found = False while True: tmp = self.board.read() raw_msg.append(tmp) if escaped: # Either drop the escape character or, if this wasn't really # an escape, keep previous escape character and new character if tmp in self._escaped_characters: msg[-1].append(tmp) escaped = False else: msg[-1].append(self._byte_escape_sep) msg[-1].append(tmp) escaped = False else: # look for escape character if tmp == self._byte_escape_sep: escaped = True # or field separator elif tmp == self._byte_field_sep: msg.append([]) # or command separator elif tmp == self._byte_command_sep: command_sep_found = True break # or any empty characater elif tmp == b'': break # okay, must be something else: msg[-1].append(tmp) # No message received given timeouts if len(msg) == 1 and len(msg[0]) == 0: return None # Make sure the message terminated properly if not command_sep_found: # empty message (likely from line endings being included) joined_raw = b''.join(raw_msg) if joined_raw.strip() == b'': return None err = "Incomplete message ({})".format(joined_raw.decode()) raise EOFError(err) # Turn message into fields fields = [b''.join(m) for m in msg] # Get the command name. cmd = fields[0].strip().decode() try: cmd_name = self._int_to_cmd_name[int(cmd)] except (ValueError,IndexError): if self.give_warnings: cmd_name = "unknown" w = "Recieved unrecognized command ({}).".format(cmd) warnings.warn(w,Warning) # Figure out what formats to use for each argument. arg_format_list = [] if arg_formats != None: # The user specified formats arg_format_list = list(arg_formats) else: try: # See if class was initialized with a format for arguments to this # command arg_format_list = self._cmd_name_to_format[cmd_name] except KeyError: # if not, guess for all arguments arg_format_list = ["g" for i in range(len(fields[1:]))] # Deal with "*" format arg_format_list = self._treat_star_format(arg_format_list,fields[1:]) if len(fields[1:]) > 0: if len(arg_format_list) != len(fields[1:]): err = "Number of argument formats must match the number of recieved arguments." raise ValueError(err) received = [] for i, f in enumerate(fields[1:]): received.append(self._recv_methods[arg_format_list[i]](f)) # Record the time the message arrived message_time = time.time() return cmd_name, received, message_time
[ "\n Recieve commands coming off the serial port. \n\n arg_formats is an optimal keyword that specifies the formats to use to\n parse incoming arguments. If specified here, arg_formats supercedes\n the formats specified on initialization. \n " ]
Please provide a description of the function:def _treat_star_format(self,arg_format_list,args): num_stars = len([a for a in arg_format_list if a == "*"]) if num_stars > 0: # Make sure the repeated format argument only occurs once, is last, # and that there is at least one format in addition to it. if num_stars == 1 and arg_format_list[-1] == "*" and len(arg_format_list) > 1: # Trim * from end arg_format_list = arg_format_list[:-1] # If we need extra arguments... if len(arg_format_list) < len(args): f = arg_format_list[-1] len_diff = len(args) - len(arg_format_list) tmp = list(arg_format_list) tmp.extend([f for i in range(len_diff)]) arg_format_list = "".join(tmp) else: err = "'*' format must occur only once, be at end of string, and be preceded by at least one other format." raise ValueError(err) return arg_format_list
[ "\n Deal with \"*\" format if specified.\n " ]
Please provide a description of the function:def _send_char(self,value): if type(value) != str and type(value) != bytes: err = "char requires a string or bytes array of length 1" raise ValueError(err) if len(value) != 1: err = "char must be a single character, not \"{}\"".format(value) raise ValueError(err) if type(value) != bytes: value = value.encode("ascii") if value in self._escaped_characters: err = "Cannot send a control character as a single char to arduino. Send as string instead." raise OverflowError(err) return struct.pack('c',value)
[ "\n Convert a single char to a bytes object.\n " ]
Please provide a description of the function:def _send_byte(self,value): # Coerce to int. This will throw a ValueError if the value can't # actually be converted. if type(value) != int: new_value = int(value) if self.give_warnings: w = "Coercing {} into int ({})".format(value,new_value) warnings.warn(w,Warning) value = new_value # Range check if value > 255 or value < 0: err = "Value {} exceeds the size of the board's byte.".format(value) raise OverflowError(err) return struct.pack("B",value)
[ "\n Convert a numerical value into an integer, then to a byte object. Check\n bounds for byte.\n " ]
Please provide a description of the function:def _send_int(self,value): # Coerce to int. This will throw a ValueError if the value can't # actually be converted. if type(value) != int: new_value = int(value) if self.give_warnings: w = "Coercing {} into int ({})".format(value,new_value) warnings.warn(w,Warning) value = new_value # Range check if value > self.board.int_max or value < self.board.int_min: err = "Value {} exceeds the size of the board's int.".format(value) raise OverflowError(err) return struct.pack(self.board.int_type,value)
[ "\n Convert a numerical value into an integer, then to a bytes object Check\n bounds for signed int.\n " ]
Please provide a description of the function:def _send_unsigned_int(self,value): # Coerce to int. This will throw a ValueError if the value can't # actually be converted. if type(value) != int: new_value = int(value) if self.give_warnings: w = "Coercing {} into int ({})".format(value,new_value) warnings.warn(w,Warning) value = new_value # Range check if value > self.board.unsigned_int_max or value < self.board.unsigned_int_min: err = "Value {} exceeds the size of the board's unsigned int.".format(value) raise OverflowError(err) return struct.pack(self.board.unsigned_int_type,value)
[ "\n Convert a numerical value into an integer, then to a bytes object. Check\n bounds for unsigned int.\n " ]
Please provide a description of the function:def _send_long(self,value): # Coerce to int. This will throw a ValueError if the value can't # actually be converted. if type(value) != int: new_value = int(value) if self.give_warnings: w = "Coercing {} into int ({})".format(value,new_value) warnings.warn(w,Warning) value = new_value # Range check if value > self.board.long_max or value < self.board.long_min: err = "Value {} exceeds the size of the board's long.".format(value) raise OverflowError(err) return struct.pack(self.board.long_type,value)
[ "\n Convert a numerical value into an integer, then to a bytes object. Check\n bounds for signed long.\n " ]
Please provide a description of the function:def _send_unsigned_long(self,value): # Coerce to int. This will throw a ValueError if the value can't # actually be converted. if type(value) != int: new_value = int(value) if self.give_warnings: w = "Coercing {} into int ({})".format(value,new_value) warnings.warn(w,Warning) value = new_value # Range check if value > self.board.unsigned_long_max or value < self.board.unsigned_long_min: err = "Value {} exceeds the size of the board's unsigned long.".format(value) raise OverflowError(err) return struct.pack(self.board.unsigned_long_type,value)
[ "\n Convert a numerical value into an integer, then to a bytes object. \n Check bounds for unsigned long.\n " ]
Please provide a description of the function:def _send_float(self,value): # convert to float. this will throw a ValueError if the type is not # readily converted if type(value) != float: value = float(value) # Range check if value > self.board.float_max or value < self.board.float_min: err = "Value {} exceeds the size of the board's float.".format(value) raise OverflowError(err) return struct.pack(self.board.float_type,value)
[ "\n Return a float as a IEEE 754 format bytes object.\n " ]
Please provide a description of the function:def _send_double(self,value): # convert to float. this will throw a ValueError if the type is not # readily converted if type(value) != float: value = float(value) # Range check if value > self.board.float_max or value < self.board.float_min: err = "Value {} exceeds the size of the board's float.".format(value) raise OverflowError(err) return struct.pack(self.board.double_type,value)
[ "\n Return a float as a IEEE 754 format bytes object.\n " ]
Please provide a description of the function:def _send_string(self,value): if type(value) != bytes: value = "{}".format(value).encode("ascii") return value
[ "\n Convert a string to a bytes object. If value is not a string, it is\n be converted to one with a standard string.format call. \n " ]
Please provide a description of the function:def _send_bool(self,value): # Sanity check. if type(value) != bool and value not in [0,1]: err = "{} is not boolean.".format(value) raise ValueError(err) return struct.pack("?",value)
[ "\n Convert a boolean value into a bytes object. Uses 0 and 1 as output.\n " ]
Please provide a description of the function:def _send_guess(self,value): if type(value) != str and type(value) != bytes and self.give_warnings: w = "Warning: Sending {} as a string. This can give wildly incorrect values. Consider specifying a format and sending binary data.".format(value) warnings.warn(w,Warning) if type(value) == float: return "{:.10e}".format(value).encode("ascii") elif type(value) == bool: return "{}".format(int(value)).encode("ascii") else: return self._send_string(value)
[ "\n Send the argument as a string in a way that should (probably, maybe!) be\n processed properly by C++ calls like atoi, atof, etc. This method is\n NOT RECOMMENDED, particularly for floats, because values are often \n mangled silently. Instead, specify a format (e.g. \"f\") and use the \n CmdMessenger::readBinArg<CAST> method (e.g. c.readBinArg<float>();) to\n read the values on the arduino side.\n " ]
Please provide a description of the function:def _recv_string(self,value): s = value.decode('ascii') # Strip null characters s = s.strip("\x00") # Strip other white space s = s.strip() return s
[ "\n Recieve a binary (bytes) string, returning a python string.\n " ]
Please provide a description of the function:def makePickle(self, record): gelf_dict = self._make_gelf_dict(record) packed = self._pack_gelf_dict(gelf_dict) pickle = zlib.compress(packed) if self.compress else packed return pickle
[ "Convert a :class:`logging.LogRecord` into a bytes object\n representing a GELF log\n\n :param record: :class:`logging.LogRecord` to convert into a\n Graylog GELF log.\n :type record: logging.LogRecord\n\n :return: A bytes object representing a GELF log.\n :rtype: bytes\n " ]
Please provide a description of the function:def _make_gelf_dict(self, record): # construct the base GELF format gelf_dict = { 'version': "1.0", 'host': BaseGELFHandler._resolve_host(self.fqdn, self.localname), 'short_message': self.formatter.format(record) if self.formatter else record.getMessage(), 'timestamp': record.created, 'level': SYSLOG_LEVELS.get(record.levelno, record.levelno), 'facility': self.facility or record.name, } # add in specified optional extras self._add_full_message(gelf_dict, record) if self.level_names: self._add_level_names(gelf_dict, record) if self.facility is not None: self._set_custom_facility(gelf_dict, self.facility, record) if self.debugging_fields: self._add_debugging_fields(gelf_dict, record) if self.extra_fields: self._add_extra_fields(gelf_dict, record) return gelf_dict
[ "Create a dictionary representing a Graylog GELF log from a\n python :class:`logging.LogRecord`\n\n :param record: :class:`logging.LogRecord` to create a Graylog GELF\n log from.\n :type record: logging.LogRecord\n\n :return: dictionary representing a Graylog GELF log.\n :rtype: dict\n " ]
Please provide a description of the function:def _add_full_message(gelf_dict, record): # if a traceback exists add it to the log as the full_message field full_message = None # format exception information if present if record.exc_info: full_message = '\n'.join( traceback.format_exception(*record.exc_info)) # use pre-formatted exception information in cases where the primary # exception information was removed, eg. for LogRecord serialization if record.exc_text: full_message = record.exc_text if full_message: gelf_dict["full_message"] = full_message
[ "Add the ``full_message`` field to the ``gelf_dict`` if any\n traceback information exists within the logging record\n\n :param gelf_dict: dictionary representation of a GELF log.\n :type gelf_dict: dict\n\n :param record: :class:`logging.LogRecord` to extract a full\n logging message from to insert into the given ``gelf_dict``.\n :type record: logging.LogRecord\n " ]
Please provide a description of the function:def _resolve_host(fqdn, localname): if fqdn: return socket.getfqdn() elif localname is not None: return localname return socket.gethostname()
[ "Resolve the ``host`` GELF field\n\n :param fqdn: Boolean indicating whether to use :meth:`socket.getfqdn`\n to obtain the ``host`` GELF field.\n :type fqdn: bool\n\n :param localname: Use specified hostname as the ``host`` GELF field.\n :type localname: str or None\n\n :return: String value representing the ``host`` GELF field.\n :rtype: str\n " ]
Please provide a description of the function:def _add_debugging_fields(gelf_dict, record): gelf_dict.update({ 'file': record.pathname, 'line': record.lineno, '_function': record.funcName, '_pid': record.process, '_thread_name': record.threadName, }) # record.processName was added in Python 2.6.2 pn = getattr(record, 'processName', None) if pn is not None: gelf_dict['_process_name'] = pn
[ "Add debugging fields to the given ``gelf_dict``\n\n :param gelf_dict: dictionary representation of a GELF log.\n :type gelf_dict: dict\n\n :param record: :class:`logging.LogRecord` to extract debugging\n fields from to insert into the given ``gelf_dict``.\n :type record: logging.LogRecord\n " ]
Please provide a description of the function:def _add_extra_fields(gelf_dict, record): # skip_list is used to filter additional fields in a log message. skip_list = ( 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', 'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module', 'msecs', 'message', 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'thread', 'threadName') for key, value in record.__dict__.items(): if key not in skip_list and not key.startswith('_'): gelf_dict['_%s' % key] = value
[ "Add extra fields to the given ``gelf_dict``\n\n However, this does not add additional fields in to ``message_dict``\n that are either duplicated from standard :class:`logging.LogRecord`\n attributes, duplicated from the python logging module source\n (e.g. ``exc_text``), or violate GLEF format (i.e. ``id``).\n\n .. seealso::\n\n The list of standard :class:`logging.LogRecord` attributes can be\n found at:\n\n http://docs.python.org/library/logging.html#logrecord-attributes\n\n :param gelf_dict: dictionary representation of a GELF log.\n :type gelf_dict: dict\n\n :param record: :class:`logging.LogRecord` to extract extra fields\n from to insert into the given ``gelf_dict``.\n :type record: logging.LogRecord\n " ]
Please provide a description of the function:def _pack_gelf_dict(gelf_dict): gelf_dict = BaseGELFHandler._sanitize_to_unicode(gelf_dict) packed = json.dumps( gelf_dict, separators=',:', default=BaseGELFHandler._object_to_json ) return packed.encode('utf-8')
[ "Convert a given ``gelf_dict`` to a JSON-encoded string, thus,\n creating an uncompressed GELF log ready for consumption by Graylog.\n\n Since we cannot be 100% sure of what is contained in the ``gelf_dict``\n we have to do some sanitation.\n\n :param gelf_dict: dictionary representation of a GELF log.\n :type gelf_dict: dict\n\n :return: A prepped JSON-encoded GELF log as a bytes string\n encoded in UTF-8.\n :rtype: bytes\n " ]
Please provide a description of the function:def _sanitize_to_unicode(obj): if isinstance(obj, dict): return dict((BaseGELFHandler._sanitize_to_unicode(k), BaseGELFHandler._sanitize_to_unicode(v)) for k, v in obj.items()) if isinstance(obj, (list, tuple)): return obj.__class__([BaseGELFHandler._sanitize_to_unicode(i) for i in obj]) if isinstance(obj, data): obj = obj.decode('utf-8', errors='replace') return obj
[ "Convert all strings records of the object to unicode\n\n :param obj: object to sanitize to unicode.\n :type obj: object\n\n :return: Unicode string representation of the given object.\n :rtype: str\n " ]
Please provide a description of the function:def _object_to_json(obj): if isinstance(obj, datetime.datetime): return obj.isoformat() return repr(obj)
[ "Convert objects that cannot be natively serialized into JSON\n into their string representation\n\n For datetime based objects convert them into their ISO formatted\n string as specified by :meth:`datetime.datetime.isoformat`.\n\n :param obj: object to convert into a JSON via getting its string\n representation.\n :type obj: object\n\n :return: String value representing the given object ready to be\n encoded into a JSON.\n :rtype: str\n " ]
Please provide a description of the function:def makeSocket(self, timeout=1): plain_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if hasattr(plain_socket, 'settimeout'): plain_socket.settimeout(timeout) wrapped_socket = ssl.wrap_socket( plain_socket, ca_certs=self.ca_certs, cert_reqs=self.reqs, keyfile=self.keyfile, certfile=self.certfile ) wrapped_socket.connect((self.host, self.port)) return wrapped_socket
[ "Override SocketHandler.makeSocket, to allow creating wrapped\n TLS sockets" ]
Please provide a description of the function:def emit(self, record): pickle = self.makePickle(record) connection = httplib.HTTPConnection( host=self.host, port=self.port, timeout=self.timeout ) connection.request('POST', self.path, pickle, self.headers)
[ "Convert a :class:`logging.LogRecord` to GELF and emit it to Graylog\n via an HTTP POST request\n\n :param record: :class:`logging.LogRecord` to convert into a\n Graylog GELF log and emit to Graylog via HTTP POST.\n :type record: logging.LogRecord\n " ]
Please provide a description of the function:def to_unicode(string): if isinstance(string, six.binary_type): return string.decode('utf8') if isinstance(string, six.text_type): return string if six.PY2: return unicode(string) return str(string)
[ "\n Ensure a passed string is unicode\n " ]
Please provide a description of the function:def to_utf8(string): if isinstance(string, six.text_type): return string.encode('utf8') if isinstance(string, six.binary_type): return string return str(string)
[ "\n Encode a string as a UTF8 bytestring. This function could be passed a\n bytestring or unicode string so must distinguish between the two.\n " ]
Please provide a description of the function:def dict_to_unicode(raw_dict): decoded = {} for key, value in raw_dict.items(): decoded[to_unicode(key)] = map( to_unicode, value) return decoded
[ "\n Ensure all keys and values in a dict are unicode.\n\n The passed dict is assumed to have lists for all values.\n " ]
Please provide a description of the function:def unicode_urlencode(query, doseq=True): pairs = [] for key, value in query.items(): if isinstance(value, list): value = list(map(to_utf8, value)) else: value = to_utf8(value) pairs.append((to_utf8(key), value)) encoded_query = dict(pairs) xx = urlencode(encoded_query, doseq) return xx
[ "\n Custom wrapper around urlencode to support unicode\n\n Python urlencode doesn't handle unicode well so we need to convert to\n bytestrings before using it:\n http://stackoverflow.com/questions/6480723/urllib-urlencode-doesnt-like-unicode-values-how-about-this-workaround\n " ]
Please provide a description of the function:def parse(url_str): url_str = to_unicode(url_str) result = urlparse(url_str) netloc_parts = result.netloc.rsplit('@', 1) if len(netloc_parts) == 1: username = password = None host = netloc_parts[0] else: user_and_pass = netloc_parts[0].split(':') if len(user_and_pass) == 2: username, password = user_and_pass elif len(user_and_pass) == 1: username = user_and_pass[0] password = None host = netloc_parts[1] if host and ':' in host: host = host.split(':')[0] return {'host': host, 'username': username, 'password': password, 'scheme': result.scheme, 'port': result.port, 'path': result.path, 'query': result.query, 'fragment': result.fragment}
[ "\n Extract all parts from a URL string and return them as a dictionary\n " ]
Please provide a description of the function:def netloc(self): url = self._tuple if url.username and url.password: netloc = '%s:%s@%s' % (url.username, url.password, url.host) elif url.username and not url.password: netloc = '%s@%s' % (url.username, url.host) else: netloc = url.host if url.port: netloc = '%s:%s' % (netloc, url.port) return netloc
[ "\n Return the netloc\n " ]
Please provide a description of the function:def host(self, value=None): if value is not None: return URL._mutate(self, host=value) return self._tuple.host
[ "\n Return the host\n\n :param string value: new host string\n " ]
Please provide a description of the function:def username(self, value=None): if value is not None: return URL._mutate(self, username=value) return unicode_unquote(self._tuple.username)
[ "\n Return or set the username\n\n :param string value: the new username to use\n :returns: string or new :class:`URL` instance\n " ]
Please provide a description of the function:def password(self, value=None): if value is not None: return URL._mutate(self, password=value) return unicode_unquote(self._tuple.password)
[ "\n Return or set the password\n\n :param string value: the new password to use\n :returns: string or new :class:`URL` instance\n " ]
Please provide a description of the function:def subdomains(self, value=None): if value is not None: return URL._mutate(self, host='.'.join(value)) return self.host().split('.')
[ "\n Returns a list of subdomains or set the subdomains and returns a\n new :class:`URL` instance.\n\n :param list value: a list of subdomains\n " ]
Please provide a description of the function:def subdomain(self, index, value=None): if value is not None: subdomains = self.subdomains() subdomains[index] = value return URL._mutate(self, host='.'.join(subdomains)) return self.subdomains()[index]
[ "\n Return a subdomain or set a new value and return a new :class:`URL`\n instance.\n\n :param integer index: 0-indexed subdomain\n :param string value: New subdomain\n " ]
Please provide a description of the function:def scheme(self, value=None): if value is not None: return URL._mutate(self, scheme=value) return self._tuple.scheme
[ "\n Return or set the scheme.\n\n :param string value: the new scheme to use\n :returns: string or new :class:`URL` instance\n " ]
Please provide a description of the function:def path(self, value=None): if value is not None: if not value.startswith('/'): value = '/' + value encoded_value = unicode_quote(value) return URL._mutate(self, path=encoded_value) return self._tuple.path
[ "\n Return or set the path\n\n :param string value: the new path to use\n :returns: string or new :class:`URL` instance\n " ]
Please provide a description of the function:def query(self, value=None): if value is not None: return URL._mutate(self, query=value) return self._tuple.query
[ "\n Return or set the query string\n\n :param string value: the new query string to use\n :returns: string or new :class:`URL` instance\n " ]
Please provide a description of the function:def port(self, value=None): if value is not None: return URL._mutate(self, port=value) return self._tuple.port
[ "\n Return or set the port\n\n :param string value: the new port to use\n :returns: string or new :class:`URL` instance\n " ]
Please provide a description of the function:def fragment(self, value=None): if value is not None: return URL._mutate(self, fragment=value) return unicode_unquote(self._tuple.fragment)
[ "\n Return or set the fragment (hash)\n\n :param string value: the new fragment to use\n :returns: string or new :class:`URL` instance\n " ]
Please provide a description of the function:def path_segment(self, index, value=None, default=None): if value is not None: segments = list(self.path_segments()) segments[index] = unicode_quote_path_segment(value) new_path = '/' + '/'.join(segments) if self._tuple.path.endswith('/'): new_path += '/' return URL._mutate(self, path=new_path) try: return self.path_segments()[index] except IndexError: return default
[ "\n Return the path segment at the given index\n\n :param integer index:\n :param string value: the new segment value\n :param string default: the default value to return if no path segment exists with the given index\n " ]
Please provide a description of the function:def path_segments(self, value=None): if value is not None: encoded_values = map(unicode_quote_path_segment, value) new_path = '/' + '/'.join(encoded_values) return URL._mutate(self, path=new_path) parts = self._tuple.path.split('/') segments = parts[1:] if self._tuple.path.endswith('/'): segments.pop() segments = map(unicode_unquote, segments) return tuple(segments)
[ "\n Return the path segments\n\n :param list value: the new path segments to use\n " ]
Please provide a description of the function:def add_path_segment(self, value): segments = self.path_segments() + (to_unicode(value),) return self.path_segments(segments)
[ "\n Add a new path segment to the end of the current string\n\n :param string value: the new path segment to use\n\n Example::\n\n >>> u = URL('http://example.com/foo/')\n >>> u.add_path_segment('bar').as_string()\n 'http://example.com/foo/bar'\n " ]
Please provide a description of the function:def query_param(self, key, value=None, default=None, as_list=False): parse_result = self.query_params() if value is not None: # Need to ensure all strings are unicode if isinstance(value, (list, tuple)): value = list(map(to_unicode, value)) else: value = to_unicode(value) parse_result[to_unicode(key)] = value return URL._mutate( self, query=unicode_urlencode(parse_result, doseq=True)) try: result = parse_result[key] except KeyError: return default if as_list: return result return result[0] if len(result) == 1 else result
[ "\n Return or set a query parameter for the given key\n\n The value can be a list.\n\n :param string key: key to look for\n :param string default: value to return if ``key`` isn't found\n :param boolean as_list: whether to return the values as a list\n :param string value: the new query parameter to use\n " ]
Please provide a description of the function:def append_query_param(self, key, value): values = self.query_param(key, as_list=True, default=[]) values.append(value) return self.query_param(key, values)
[ "\n Append a query parameter\n\n :param string key: The query param key\n :param string value: The new value\n " ]
Please provide a description of the function:def query_params(self, value=None): if value is not None: return URL._mutate(self, query=unicode_urlencode(value, doseq=True)) query = '' if self._tuple.query is None else self._tuple.query # In Python 2.6, urlparse needs a bytestring so we encode and then # decode the result. if not six.PY3: result = parse_qs(to_utf8(query), True) return dict_to_unicode(result) return parse_qs(query, True)
[ "\n Return or set a dictionary of query params\n\n :param dict value: new dictionary of values\n " ]
Please provide a description of the function:def remove_query_param(self, key, value=None): parse_result = self.query_params() if value is not None: index = parse_result[key].index(value) del parse_result[key][index] else: del parse_result[key] return URL._mutate(self, query=unicode_urlencode(parse_result, doseq=True))
[ "\n Remove a query param from a URL\n\n Set the value parameter if removing from a list.\n\n :param string key: The key to delete\n :param string value: The value of the param to delete (of more than one)\n " ]
Please provide a description of the function:def expand(template, variables=None): if variables is None: variables = {} return patterns.sub(functools.partial(_replace, variables), template)
[ "\n Expand a URL template string using the passed variables\n " ]
Please provide a description of the function:def _format_pair_no_equals(explode, separator, escape, key, value): if not value: return key return _format_pair(explode, separator, escape, key, value)
[ "\n Format a key, value pair but don't include the equals sign\n when there is no value\n " ]
Please provide a description of the function:def _format_pair_with_equals(explode, separator, escape, key, value): if not value: return key + '=' return _format_pair(explode, separator, escape, key, value)
[ "\n Format a key, value pair including the equals sign\n when there is no value\n " ]
Please provide a description of the function:def _split_basic(string): tuples = [] for word in string.split(','): # Attempt to split on colon parts = word.split(':', 2) key, modifier_fn, explode = parts[0], _identity, False if len(parts) > 1: modifier_fn = functools.partial( _truncate, num_chars=int(parts[1])) if word[len(word) - 1] == '*': key = word[:len(word) - 1] explode = True tuples.append((key, modifier_fn, explode)) return tuples
[ "\n Split a string into a list of tuples of the form (key, modifier_fn,\n explode) where modifier_fn is a function that applies the appropriate\n modification to the variable.\n " ]
Please provide a description of the function:def _replace(variables, match): expression = match.group(1) # Look-up chars and functions for the specified operator (prefix_char, separator_char, split_fn, escape_fn, format_fn) = operator_map.get(expression[0], defaults) replacements = [] for key, modify_fn, explode in split_fn(expression): if key in variables: variable = modify_fn(variables[key]) replacement = format_fn( explode, separator_char, escape_fn, key, variable) replacements.append(replacement) if not replacements: return '' return prefix_char + separator_char.join(replacements)
[ "\n Return the appropriate replacement for `match` using the passed variables\n " ]
Please provide a description of the function:def predict(self, document_path: str, model_name: str, consent_id: str = None) -> Prediction: content_type = self._get_content_type(document_path) consent_id = consent_id or str(uuid4()) document_id = self._upload_document(document_path, content_type, consent_id) prediction_response = self.post_predictions(document_id, model_name) return Prediction(document_id, consent_id, model_name, prediction_response)
[ "Run inference and create prediction on document.\n This method takes care of creating and uploading a document specified by document_path.\n as well as running inference using model specified by model_name to create prediction on the document.\n\n >>> from las import ApiClient\n >>> api_client = ApiClient(endpoint='<api endpoint>')\n >>> api_client.predict(document_path='document.jpeg', model_name='invoice')\n\n :param document_path: Path to document to run inference on\n :type document_path: str\n :param model_name: The name of the model to use for inference\n :type model_name: str\n :param consent_id: An identifier to mark the owner of the document handle\n :type consent_id: str\n :return: Prediction on document\n :rtype: Prediction\n :raises InvalidCredentialsException: If the credentials are invalid\n :raises TooManyRequestsException: If limit of requests per second is reached\n :raises LimitExceededException: If limit of total requests per month is reached\n :raises requests.exception.RequestException: If error was raised by requests\n " ]
Please provide a description of the function:def send_feedback(self, document_id: str, feedback: List[Field]) -> dict: return self.post_document_id(document_id, feedback)
[ "Send feedback to the model.\n This method takes care of sending feedback related to document specified by document_id.\n Feedback consists of ground truth values for the document specified as a list of Field instances.\n\n >>> from las import ApiClient\n >>> api_client = ApiClient(endpoint='<api endpoint>')\n >>> feedback = [Field(label='total_amount', value='120.00'), Field(label='purchase_date', value='2019-03-10')]\n >>> api_client.send_feedback('<document id>', feedback)\n\n :param document_id: The document id of the document that will receive the feedback\n :type document_id: str\n :param feedback: A list of :py:class:`~las.Field` representing the ground truth values for the document\n :type feedback: List[Field]\n :return: Feedback response\n :rtype: dict\n :raises InvalidCredentialsException: If the credentials are invalid\n :raises TooManyRequestsException: If limit of requests per second is reached\n :raises LimitExceededException: If limit of total requests per month is reached\n :raises requests.exception.RequestException: If error was raised by requests\n " ]
Please provide a description of the function:def extra_what(file, h=None): tests = [] def test_pdf(h, f): if b'PDF' in h[0:10]: return 'pdf' tests.append(test_pdf) f = None try: if h is None: if isinstance(file, (str, PathLike)): f = open(file, 'rb') h = f.read(32) else: location = file.tell() h = file.read(32) file.seek(location) for tf in tests: res = tf(h, f) if res: return res finally: if f: f.close() return None
[ "Code mostly copied from imghdr.what" ]
Please provide a description of the function:def post_documents(self, content_type: str, consent_id: str) -> dict: body = json.dumps({'contentType': content_type, 'consentId': consent_id}).encode() uri, headers = self._create_signing_headers('POST', '/documents', body) post_documents_response = requests.post( url=uri.geturl(), headers=headers, data=body ) response = _json_decode(post_documents_response) return response
[ "Creates a document handle, calls the POST /documents endpoint.\n\n >>> from las import Client\n >>> client = Client(endpoint='<api endpoint>')\n >>> client.post_documents('image/jpeg', consent_id='foobar')\n\n :param content_type: A mime type for the document handle\n :type content_type: str\n :param consent_id: An identifier to mark the owner of the document handle\n :type consent_id: str\n :return: Document handle id and pre-signed upload url\n :rtype: dict\n :raises InvalidCredentialsException: If the credentials are invalid\n :raises TooManyRequestsException: If limit of requests per second is reached\n :raises LimitExceededException: If limit of total requests per month is reached\n :raises requests.exception.RequestException: If error was raised by requests\n " ]
Please provide a description of the function:def put_document(document_path: str, content_type: str, presigned_url: str) -> str: body = pathlib.Path(document_path).read_bytes() headers = {'Content-Type': content_type} put_document_response = requests.put(presigned_url, data=body, headers=headers) put_document_response.raise_for_status() return put_document_response.content.decode()
[ "Convenience method for putting a document to presigned url.\n\n >>> from las import Client\n >>> client = Client(endpoint='<api endpoint>')\n >>> client.put_document(document_path='document.jpeg', content_type='image/jpeg',\n >>> presigned_url='<presigned url>')\n\n :param document_path: Path to document to upload\n :type document_path: str\n :param content_type: Mime type of document to upload. Same as provided to :py:func:`~las.Client.post_documents`\n :type content_type: str\n :param presigned_url: Presigned upload url from :py:func:`~las.Client.post_documents`\n :type presigned_url: str\n :return: Response from put operation\n :rtype: str\n :raises requests.exception.RequestException: If error was raised by requests\n " ]
Please provide a description of the function:def post_predictions(self, document_id: str, model_name: str) -> dict: body = json.dumps({'documentId': document_id, 'modelName': model_name}).encode() uri, headers = self._create_signing_headers('POST', '/predictions', body) post_predictions_response = requests.post( url=uri.geturl(), headers=headers, data=body ) response = _json_decode(post_predictions_response) return response
[ "Run inference and create a prediction, calls the POST /predictions endpoint.\n\n >>> from las import Client\n >>> client = Client(endpoint='<api endpoint>')\n >>> client.post_predictions(document_id='<document id>', model_name='invoice')\n\n :param document_id: The document id to run inference and create a prediction on\n :type document_id: str\n :param model_name: The name of the model to use for inference\n :type model_name: str\n :return: Prediction on document\n :rtype: dict\n :raises InvalidCredentialsException: If the credentials are invalid\n :raises TooManyRequestsException: If limit of requests per second is reached\n :raises LimitExceededException: If limit of total requests per month is reached\n :raises requests.exception.RequestException: If error was raised by requests\n " ]
Please provide a description of the function:def post_document_id(self, document_id: str, feedback: List[Dict[str, str]]) -> dict: body = json.dumps({'feedback': feedback}).encode() uri, headers = self._create_signing_headers('POST', f'/documents/{document_id}', body) post_document_id_response = requests.post( url=uri.geturl(), headers=headers, data=body ) response = _json_decode(post_document_id_response) return response
[ "Post feedback to the REST API, calls the POST /documents/{documentId} endpoint.\n Posting feedback means posting the ground truth data for the particular document.\n This enables the API to learn from past mistakes.\n\n >>> from las import Client\n >>> client = Client(endpoint='<api endpoint>')\n >>> feedback = [{'label': 'total_amount', 'value': '156.00'}, {'label': 'invoice_date', 'value': '2018-10-23'}]\n >>> client.post_document_id(document_id='<document id>', feedback=feedback)\n\n :param document_id: The document id to run inference and create a prediction on\n :type document_id: str\n :param feedback: A list of feedback items\n :type feedback: List[Dict[str, str]]\n :return: Feedback response from REST API\n :rtype: dict\n :raises InvalidCredentialsException: If the credentials are invalid\n :raises TooManyRequestsException: If limit of requests per second is reached\n :raises LimitExceededException: If limit of total requests per month is reached\n :raises requests.exception.RequestException: If error was raised by requests\n " ]
Please provide a description of the function:def delete_consent_id(self, consent_id: str) -> dict: body = json.dumps({}).encode() uri, headers = self._create_signing_headers('DELETE', f'/consents/{consent_id}', body) delete_consent_id_consent = requests.delete( url=uri.geturl(), headers=headers, data=body ) response = _json_decode(delete_consent_id_consent) return response
[ "Delete documents with this consent_id, calls the DELETE /consent/{consentId} endpoint.\n\n >>> from las import Client\n >>> client = Client(endpoint='<api endpoint>')\n >>> client.delete_consent_id('<consent id>')\n\n :param consent_id: Delete documents with this consent_id\n :type consent_id: str\n :return: Delete consent id response from REST API\n :rtype: dict\n :raises InvalidCredentialsException: If the credentials are invalid\n :raises TooManyRequestsException: If limit of requests per second is reached\n :raises LimitExceededException: If limit of total requests per month is reached\n :raises requests.exception.RequestException: If error was raised by requests\n " ]
Please provide a description of the function:def get_expiration(self): exp = self._get_int('expiration') if exp is not None: return datetime.datetime.fromtimestamp( exp ) return None
[ "Returns the expiration date.\n\n :returns: expiration date\n :rtype: datetime object\n " ]
Please provide a description of the function:def login(self, user_id, password): self._session = requests.session() self._session.verify = self._verify_certs self._session.auth = (user_id, password) try: self._update_capabilities() url_components = parse.urlparse(self.url) if self._dav_endpoint_version == 1: self._davpath = url_components.path + 'remote.php/dav/files/' + parse.quote(user_id) self._webdav_url = self.url + 'remote.php/dav/files/' + parse.quote(user_id) else: self._davpath = url_components.path + 'remote.php/webdav' self._webdav_url = self.url + 'remote.php/webdav' except HTTPResponseError as e: self._session.close() self._session = None raise e
[ "Authenticate to ownCloud.\n This will create a session on the server.\n\n :param user_id: user id\n :param password: password\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def file_info(self, path): res = self._make_dav_request('PROPFIND', path, headers={'Depth': '0'}) if res: return res[0] return None
[ "Returns the file info for the given remote file\n\n :param path: path to the remote file\n :returns: file info\n :rtype: :class:`FileInfo` object or `None` if file\n was not found\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def list(self, path, depth=1): if not path.endswith('/'): path += '/' headers = {} if isinstance(depth, int) or depth == "infinity": headers['Depth'] = str(depth) res = self._make_dav_request('PROPFIND', path, headers=headers) # first one is always the root, remove it from listing if res: return res[1:] return None
[ "Returns the listing/contents of the given remote directory\n\n :param path: path to the remote directory\n :param depth: depth of the listing, integer or \"infinity\"\n :returns: directory listing\n :rtype: array of :class:`FileInfo` objects\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def get_file_contents(self, path): path = self._normalize_path(path) res = self._session.get( self._webdav_url + parse.quote(self._encode_string(path)) ) if res.status_code == 200: return res.content elif res.status_code >= 400: raise HTTPResponseError(res) return False
[ "Returns the contents of a remote file\n\n :param path: path to the remote file\n :returns: file contents\n :rtype: binary data\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def get_directory_as_zip(self, remote_path, local_file): remote_path = self._normalize_path(remote_path) url = self.url + 'index.php/apps/files/ajax/download.php?dir=' \ + parse.quote(remote_path) res = self._session.get(url, stream=True) if res.status_code == 200: if local_file is None: # use downloaded file name from Content-Disposition # targetFile = res.headers['content-disposition'] local_file = os.path.basename(remote_path) file_handle = open(local_file, 'wb', 8192) for chunk in res.iter_content(8192): file_handle.write(chunk) file_handle.close() return True elif res.status_code >= 400: raise HTTPResponseError(res) return False
[ "Downloads a remote directory as zip\n\n :param remote_path: path to the remote directory to download\n :param local_file: path and name of the target local file\n :returns: True if the operation succeeded, False otherwise\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def put_file(self, remote_path, local_source_file, **kwargs): if kwargs.get('chunked', True): return self._put_file_chunked( remote_path, local_source_file, **kwargs ) stat_result = os.stat(local_source_file) headers = {} if kwargs.get('keep_mtime', True): headers['X-OC-MTIME'] = str(int(stat_result.st_mtime)) if remote_path[-1] == '/': remote_path += os.path.basename(local_source_file) file_handle = open(local_source_file, 'rb', 8192) res = self._make_dav_request( 'PUT', remote_path, data=file_handle, headers=headers ) file_handle.close() return res
[ "Upload a file\n\n :param remote_path: path to the target file. A target directory can\n also be specified instead by appending a \"/\"\n :param local_source_file: path to the local file to upload\n :param chunked: (optional) use file chunking (defaults to True)\n :param chunk_size: (optional) chunk size in bytes, defaults to 10 MB\n :param keep_mtime: (optional) also update the remote file to the same\n mtime as the local one, defaults to True\n :returns: True if the operation succeeded, False otherwise\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def put_directory(self, target_path, local_directory, **kwargs): target_path = self._normalize_path(target_path) if not target_path.endswith('/'): target_path += '/' gathered_files = [] if not local_directory.endswith('/'): local_directory += '/' basedir = os.path.basename(local_directory[0: -1]) + '/' # gather files to upload for path, _, files in os.walk(local_directory): gathered_files.append( (path, basedir + path[len(local_directory):], files) ) for path, remote_path, files in gathered_files: self.mkdir(target_path + remote_path + '/') for name in files: if not self.put_file(target_path + remote_path + '/', path + '/' + name, **kwargs): return False return True
[ "Upload a directory with all its contents\n\n :param target_path: path of the directory to upload into\n :param local_directory: path to the local directory to upload\n :param \\*\\*kwargs: optional arguments that ``put_file`` accepts\n :returns: True if the operation succeeded, False otherwise\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def _put_file_chunked(self, remote_path, local_source_file, **kwargs): chunk_size = kwargs.get('chunk_size', 10 * 1024 * 1024) result = True transfer_id = int(time.time()) remote_path = self._normalize_path(remote_path) if remote_path.endswith('/'): remote_path += os.path.basename(local_source_file) stat_result = os.stat(local_source_file) file_handle = open(local_source_file, 'rb', 8192) file_handle.seek(0, os.SEEK_END) size = file_handle.tell() file_handle.seek(0) headers = {} if kwargs.get('keep_mtime', True): headers['X-OC-MTIME'] = str(int(stat_result.st_mtime)) if size == 0: return self._make_dav_request( 'PUT', remote_path, data='', headers=headers ) chunk_count = int(math.ceil(float(size) / float(chunk_size))) if chunk_count > 1: headers['OC-CHUNKED'] = '1' for chunk_index in range(0, int(chunk_count)): data = file_handle.read(chunk_size) if chunk_count > 1: chunk_name = '%s-chunking-%s-%i-%i' % \ (remote_path, transfer_id, chunk_count, chunk_index) else: chunk_name = remote_path if not self._make_dav_request( 'PUT', chunk_name, data=data, headers=headers ): result = False break file_handle.close() return result
[ "Uploads a file using chunks. If the file is smaller than\n ``chunk_size`` it will be uploaded directly.\n\n :param remote_path: path to the target file. A target directory can\n also be specified instead by appending a \"/\"\n :param local_source_file: path to the local file to upload\n :param \\*\\*kwargs: optional arguments that ``put_file`` accepts\n :returns: True if the operation succeeded, False otherwise\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def list_open_remote_share(self): res = self._make_ocs_request( 'GET', self.OCS_SERVICE_SHARE, 'remote_shares/pending' ) if res.status_code == 200: tree = ET.fromstring(res.content) self._check_ocs_status(tree) shares = [] for element in tree.find('data').iter('element'): share_attr = {} for child in element: key = child.tag value = child.text share_attr[key] = value shares.append(share_attr) return shares raise HTTPResponseError(res)
[ "List all pending remote shares\n\n :returns: array of pending remote shares\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def accept_remote_share(self, share_id): if not isinstance(share_id, int): return False res = self._make_ocs_request( 'POST', self.OCS_SERVICE_SHARE, 'remote_shares/pending/' + str(share_id) ) if res.status_code == 200: return res raise HTTPResponseError(res)
[ "Accepts a remote share\n\n :param share_id: Share ID (int)\n :returns: True if the operation succeeded, False otherwise\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def update_share(self, share_id, **kwargs): perms = kwargs.get('perms', None) password = kwargs.get('password', None) public_upload = kwargs.get('public_upload', None) if (isinstance(perms, int)) and (perms > self.OCS_PERMISSION_ALL): perms = None if not (perms or password or (public_upload is not None)): return False if not isinstance(share_id, int): return False data = {} if perms: data['permissions'] = perms if isinstance(password, six.string_types): data['password'] = password if (public_upload is not None) and (isinstance(public_upload, bool)): data['publicUpload'] = str(public_upload).lower() res = self._make_ocs_request( 'PUT', self.OCS_SERVICE_SHARE, 'shares/' + str(share_id), data=data ) if res.status_code == 200: return True raise HTTPResponseError(res)
[ "Updates a given share\n\n :param share_id: (int) Share ID\n :param perms: (int) update permissions (see share_file_with_user() below)\n :param password: (string) updated password for public link Share\n :param public_upload: (boolean) enable/disable public upload for public shares\n :returns: True if the operation succeeded, False otherwise\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def share_file_with_link(self, path, **kwargs): perms = kwargs.get('perms', None) public_upload = kwargs.get('public_upload', 'false') password = kwargs.get('password', None) path = self._normalize_path(path) post_data = { 'shareType': self.OCS_SHARE_TYPE_LINK, 'path': self._encode_string(path), } if (public_upload is not None) and (isinstance(public_upload, bool)): post_data['publicUpload'] = str(public_upload).lower() if isinstance(password, six.string_types): post_data['password'] = password if perms: post_data['permissions'] = perms res = self._make_ocs_request( 'POST', self.OCS_SERVICE_SHARE, 'shares', data=post_data ) if res.status_code == 200: tree = ET.fromstring(res.content) self._check_ocs_status(tree) data_el = tree.find('data') return ShareInfo( { 'id': data_el.find('id').text, 'path': path, 'url': data_el.find('url').text, 'token': data_el.find('token').text } ) raise HTTPResponseError(res)
[ "Shares a remote file with link\n\n :param path: path to the remote file to share\n :param perms (optional): permission of the shared object\n defaults to read only (1)\n :param public_upload (optional): allows users to upload files or folders\n :param password (optional): sets a password\n http://doc.owncloud.org/server/6.0/admin_manual/sharing_api/index.html\n :returns: instance of :class:`ShareInfo` with the share info\n or False if the operation failed\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def is_shared(self, path): # make sure that the path exist - if not, raise HTTPResponseError self.file_info(path) try: result = self.get_shares(path) if result: return len(result) > 0 except OCSResponseError as e: if e.status_code != 404: raise e return False return False
[ "Checks whether a path is already shared\n\n :param path: path to the share to be checked\n :returns: True if the path is already shared, else False\n :raises: HTTPResponseError in case an HTTP error status was returned\n " ]
Please provide a description of the function:def get_share(self, share_id): if (share_id is None) or not (isinstance(share_id, int)): return None res = self._make_ocs_request( 'GET', self.OCS_SERVICE_SHARE, 'shares/' + str(share_id) ) if res.status_code == 200: tree = ET.fromstring(res.content) self._check_ocs_status(tree) return self._get_shareinfo(tree.find('data').find('element')) raise HTTPResponseError(res)
[ "Returns share information about known share\n\n :param share_id: id of the share to be checked\n :returns: instance of ShareInfo class\n :raises: ResponseError in case an HTTP error status was returned\n " ]