code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def wrap(sig): if isclass(sig) and issubclass(sig, Object): return TypeSignature(sig) elif isinstance(sig, TypeSignature): return sig
Convert a Python class into a type signature.
def create(type_dict, *type_parameters): name, parameters = type_parameters for param in parameters: assert isinstance(param, tuple) typemap = dict((attr, TypeSignature.deserialize(param, type_dict)) for attr, param in parameters) attributes = {'TYPEMAP': typemap} return TypeMetaclass(str(name), (Structural,), attributes)
StructFactory.create(*type_parameters) expects: class name, ((binding requirement1,), (binding requirement2, bound_to_scope), ...), ((attribute_name1, attribute_sig1 (serialized)), (attribute_name2, attribute_sig2 ...), ... (attribute_nameN, ...))
def trigger_modified(self, filepath): mod_time = self._get_modified_time(filepath) if mod_time > self._watched_files.get(filepath, 0): self._trigger('modified', filepath) self._watched_files[filepath] = mod_time
Triggers modified event if the given filepath mod time is newer.
def trigger_created(self, filepath): if os.path.exists(filepath): self._trigger('created', filepath)
Triggers created event if file exists.
def trigger_deleted(self, filepath): if not os.path.exists(filepath): self._trigger('deleted', filepath)
Triggers deleted event if the flie doesn't exist.
def log(self, *message): if self._logger is None: return s = " ".join([str(m) for m in message]) self._logger.write(s+'\n') self._logger.flush()
Logs a messate to a defined io stream if available.
def _trigger(self, event_name, *args, **kwargs): self.log('event: %s' % event_name, *args) for f in self._events[event_name]: f(*args, **kwargs)
Triggers a given event with the following *args and **kwargs parameters.
def default_validator(self, filepath): return filepath.endswith('.py') and \ not os.path.basename(filepath).startswith('.')
The default validator only accepts files ending in .py (and not prefixed by a period).
def in_repo(self, filepath): filepath = set(filepath.replace('\\', '/').split('/')) for p in ('.git', '.hg', '.svn', '.cvs', '.bzr'): if p in filepath: return True return False
This excludes repository directories because they cause some exceptions occationally.
def is_valid_type(self, filepath): if self.in_repo(filepath): return False validators = self._validators if len(validators) == 0: validators = [self.default_validator] if any([hasattr(v, 'runnable') for v in self._validators]): # case where we select the runnable function by the validator for validator in validators: if validator(filepath): if hasattr(validator, 'runnable'): self._scent.set_runner(validator.runnable) return True return False for validator in validators: if not validator(filepath): return False return True
Returns True if the given filepath is a valid watchable filetype. The filepath can be assumed to be a file (not a directory).
def _modify_event(self, event_name, method, func): if event_name not in self.ALL_EVENTS: raise TypeError(('event_name ("%s") can only be one of the ' 'following: %s') % (event_name, repr(self.ALL_EVENTS))) if not isinstance(func, collections.Callable): raise TypeError(('func must be callable to be added as an ' 'observer.')) getattr(self._events[event_name], method)(func)
Wrapper to call a list's method from one of the events
def observe(self, event_name, func): if isinstance(event_name, list) or isinstance(event_name, tuple): for name in event_name: self.observe(name, func) return self.log(func.__name__, "attached to", event_name) self._modify_event(event_name, 'append', func)
event_name := {'created', 'modified', 'deleted'}, list, tuple Attaches a function to run to a particular event. The function must be unique to be removed cleanly. Alternatively, event_name can be an list/ tuple if any of the string possibilities to be added on multiple events
def _watch_file(self, filepath, trigger_event=True): is_new = filepath not in self._watched_files if trigger_event: if is_new: self.trigger_created(filepath) else: self.trigger_modified(filepath) try: self._watched_files[filepath] = self._get_modified_time(filepath) except OSError: return
Adds the file's modified time into its internal watchlist.
def _unwatch_file(self, filepath, trigger_event=True): if filepath not in self._watched_files: return if trigger_event: self.trigger_deleted(filepath) del self._watched_files[filepath]
Removes the file from the internal watchlist if exists.
def _is_modified(self, filepath): if self._is_new(filepath): return False mtime = self._get_modified_time(filepath) return self._watched_files[filepath] < mtime
Returns True if the file has been modified since last seen. Will return False if the file has not been seen before.
def loop(self, sleep_time=1, callback=None): self.log("No supported libraries found: using polling-method.") self._running = True self.trigger_init() self._scan(trigger=False) # put after the trigger if self._warn: print(""" You should install a third-party library so I don't eat CPU. Supported libraries are: - pyinotify (Linux) - pywin32 (Windows) - MacFSEvents (OSX) Use pip or easy_install and install one of those libraries above. """) while self._running: self._scan() if isinstance(callback, collections.Callable): callback() time.sleep(sleep_time)
Goes into a blocking IO loop. If polling is used, the sleep_time is the interval, in seconds, between polls.
def _scan(self, trigger=True): changed = False files_seen = set() os_path_join = os.path.join for path in self.paths: for root, dirs, files in os.walk(path): for f in files: fpath = os_path_join(root, f) if not self.is_valid_type(fpath): continue files_seen.add(fpath) if self._requires_new_modtime(fpath): self._watch_file(fpath, trigger) changed = True for f in self._watched_files: if f not in files_seen: self._unwatch_file(f, trigger) changed = True return changed
Walks through the directory to look for changes of the given file types. Returns True if changes occurred (False otherwise). Returns None if polling method isn't being used.
def run(sniffer_instance=None, wait_time=0.5, clear=True, args=(), debug=False): if sniffer_instance is None: sniffer_instance = ScentSniffer() if debug: scanner = Scanner( sniffer_instance.watch_paths, scent=sniffer_instance.scent, logger=sys.stdout) else: scanner = Scanner( sniffer_instance.watch_paths, scent=sniffer_instance.scent) #sniffer = sniffer_cls(tuple(args), clear, debug) sniffer_instance.set_up(tuple(args), clear, debug) sniffer_instance.observe_scanner(scanner) scanner.loop(wait_time)
Runs the auto tester loop. Internally, the runner instanciates the sniffer_cls and scanner class. ``sniffer_instance`` The class to run. Usually this is set to but a subclass of scanner. Defaults to Sniffer. Sniffer class documentation for more information. ``wait_time`` The time, in seconds, to wait between polls. This is dependent on the underlying scanner implementation. OS-specific libraries may choose to ignore this parameter. Defaults to 0.5 seconds. ``clear`` Boolean. Set to True to clear the terminal before running the sniffer, (alias, the unit tests). Defaults to True. ``args`` The arguments to pass to the sniffer/test runner. Defaults to (). ``debug`` Boolean. Sets the scanner and sniffer in debug mode, printing more internal information. Defaults to False (and should usually be False).
def main(sniffer_instance=None, test_args=(), progname=sys.argv[0], args=sys.argv[1:]): parser = OptionParser(version="%prog " + __version__) parser.add_option('-w', '--wait', dest="wait_time", metavar="TIME", default=0.5, type="float", help="Wait time, in seconds, before possibly rerunning" "tests. (default: %default)") parser.add_option('--no-clear', dest="clear_on_run", default=True, action="store_false", help="Disable the clearing of screen") parser.add_option('--debug', dest="debug", default=False, action="store_true", help="Enabled debugging output. (default: %default)") parser.add_option('-x', '--test-arg', dest="test_args", default=[], action="append", help="Arguments to pass to nose (use multiple times to " "pass multiple arguments.)") (options, args) = parser.parse_args(args) test_args = test_args + tuple(options.test_args) if options.debug: print("Options:", options) print("Test Args:", test_args) try: print("Starting watch...") run(sniffer_instance, options.wait_time, options.clear_on_run, test_args, options.debug) except KeyboardInterrupt: print("Good bye.") except Exception: import traceback traceback.print_exc() return sys.exit(1) return sys.exit(0)
Runs the program. This is used when you want to run this program standalone. ``sniffer_instance`` A class (usually subclassed of Sniffer) that hooks into the scanner and handles running the test framework. Defaults to Sniffer instance. ``test_args`` This function normally extracts args from ``--test-arg ARG`` command. A preset argument list can be passed. Defaults to an empty tuple. ``program`` Program name. Defaults to sys.argv[0]. ``args`` Command line arguments. Defaults to sys.argv[1:]
def set_up(self, test_args=(), clear=True, debug=False): self.test_args = test_args self.debug, self.clear = debug, clear
Sets properties right before calling run. ``test_args`` The arguments to pass to the test runner. ``clear`` Boolean. Set to True if we should clear console before running the tests. ``debug`` Boolean. Set to True if we want to print debugging information.
def absorb_args(self, func): @wraps(func) def wrapper(*args, **kwargs): return func() return wrapper
Calls a function without any arguments. The returned caller function accepts any arguments (and throws them away).
def observe_scanner(self, scanner): scanner.observe(scanner.ALL_EVENTS, self.absorb_args(self.modules.restore)) if self.clear: scanner.observe(scanner.ALL_EVENTS, self.absorb_args(self.clear_on_run)) scanner.observe(scanner.ALL_EVENTS, self.absorb_args(self._run)) if self.debug: scanner.observe('created', echo("callback - created %(file)s")) scanner.observe('modified', echo("callback - changed %(file)s")) scanner.observe('deleted', echo("callback - deleted %(file)s")) self._scanners.append(scanner)
Hooks into multiple events of a scanner.
def clear_on_run(self, prefix="Running Tests:"): if platform.system() == 'Windows': os.system('cls') else: os.system('clear') if prefix: print(prefix)
Clears console before running the tests.
def _run(self): try: if self.run(): broadcaster.success(self) else: broadcaster.failure(self) except StandardError: import traceback traceback.print_exc() self._stop() raise except Exception: self._stop() raise return True
Calls self.run() and wraps for errors.
def run(self): try: import nose arguments = [sys.argv[0]] + list(self.test_args) return nose.run(argv=arguments) except ImportError: print() print("*** Nose library missing. Please install it. ***") print() raise
Runs the unit test framework. Can be overridden to run anything. Returns True on passing and False on failure.
def run(self): if not self.scent or len(self.scent.runners) == 0: print("Did not find 'scent.py', running nose:") return super(ScentSniffer, self).run() else: print("Using scent:") arguments = [sys.argv[0]] + list(self.test_args) return self.scent.run(arguments) return True
Runs the CWD's scent file.
def copy(self): self_copy = self.dup() self_copy._scopes = copy.copy(self._scopes) return self_copy
Return a copy of this object.
def bind(self, *args, **kw): new_self = self.copy() new_scopes = Object.translate_to_scopes(*args, **kw) new_self._scopes = tuple(reversed(new_scopes)) + new_self._scopes return new_self
Bind environment variables into this object's scope.
def in_scope(self, *args, **kw): new_self = self.copy() new_scopes = Object.translate_to_scopes(*args, **kw) new_self._scopes = new_self._scopes + new_scopes return new_self
Scope this object to a parent environment (like bind but reversed.)
def check(self): try: si, uninterp = self.interpolate() # TODO(wickman) This should probably be pushed out to the interpolate leaves. except (Object.CoercionError, MustacheParser.Uninterpolatable) as e: return TypeCheck(False, "Unable to interpolate: %s" % e) return self.checker(si)
Type check this object.
def restore(self): sys = set(self._sys_modules.keys()) for mod_name in sys.difference(self._saved_modules): del self._sys_modules[mod_name]
Unloads all modules that weren't loaded when save_modules was called.
def join(cls, splits, *namables): isplits = [] unbound = [] for ref in splits: if isinstance(ref, Ref): resolved = False for namable in namables: try: value = namable.find(ref) resolved = True break except Namable.Error: continue if resolved: isplits.append(value) else: isplits.append(ref) unbound.append(ref) else: isplits.append(ref) return (''.join(map(str if Compatibility.PY3 else unicode, isplits)), unbound)
Interpolate strings. :params splits: The output of Parser.split(string) :params namables: A sequence of Namable objects in which the interpolation should take place. Returns 2-tuple containing: joined string, list of unbound object ids (potentially empty)
def Choice(*args): if len(args) == 2: name, alternatives = args else: name = "Choice_" + "_".join(a.__name__ for a in args[0]) alternatives = args[0] assert isinstance(name, Compatibility.stringy) assert all(issubclass(t, Type) for t in alternatives) return TypeFactory.new({}, ChoiceFactory.PROVIDES, name, tuple(t.serialize_type() for t in alternatives))
Helper function for creating new choice types. This can be called either as: Choice(Name, [Type1, Type2, ...]) or: Choice([Type1, Type2, ...]) In the latter case, the name of the new type will be autogenerated, and will look like "Choice_Type1_Type2".
def create(type_dict, *type_parameters): assert len(type_parameters) == 2 name = type_parameters[0] alternatives = type_parameters[1] assert isinstance(name, Compatibility.stringy) assert isinstance(alternatives, (list, tuple)) choice_types = [] for c in alternatives: choice_types.append(TypeFactory.new(type_dict, *c)) return TypeMetaclass(str(name), (ChoiceContainer,), {'CHOICES': choice_types})
type_parameters should be: (name, (alternative1, alternative2, ...)) where name is a string, and the alternatives are all valid serialized types.
def _unwrap(self, ret_fun, err_fun): for opt in self.CHOICES: if isinstance(self._value, opt): return ret_fun(self._value) else: try: o = opt(self._value) ret = ret_fun(o) if ret: return ret except (self.CoercionError, ValueError): pass return err_fun(self._value)
Iterate over the options in the choice type, and try to perform some action on them. If the action fails (returns None or raises either CoercionError or ValueError), then it goes on to the next type. Args: ret_fun: a function that takes a wrapped option value, and either returns a successful return value or fails. err_fun: a function that takes the unwrapped value of this choice, and generates an appropriate error. Returns: the return value from a successful invocation of ret_fun on one of the type options. If no invocation fails, then returns the value of invoking err_fun.
def outitem(title, elems, indent=4): out(title) max_key_len = max(len(key) for key, _ in elems) + 1 for key, val in elems: key_spaced = ('%s:' % key).ljust(max_key_len) out('%s%s %s' % (indent * ' ', key_spaced, val)) out()
Output formatted as list item.
def profile_dir(name): if name: possible_path = Path(name) if possible_path.exists(): return possible_path profiles = list(read_profiles()) try: if name: profile = next(p for p in profiles if p.name == name) else: profile = next(p for p in profiles if p.default) except StopIteration: raise ProfileNotFoundError(name) return profile.path
Return path to FF profile for a given profile name or path.
def arg(*args, **kwargs): metadata = {'arg_params': (args, kwargs)} return attrib(default=arg_default(*args, **kwargs), metadata=metadata)
Return an attrib() that can be fed as a command-line argument. This function is a wrapper for an attr.attrib to create a corresponding command line argument for it. Use it with the same arguments as argparse's add_argument(). Example: >>> @attrs ... class MyFeature(Feature): ... my_number = arg('-n', '--number', default=3) ... def run(self): ... print('Your number:', self.my_number) Now you could run it like `firefed myfeature --number 5`.
def arg_default(*args, **kwargs): parser = argparse.ArgumentParser() parser.add_argument(*args, **kwargs) args = vars(parser.parse_args([])) _, default = args.popitem() return default
Return default argument value as given by argparse's add_argument(). The argument is passed through a mocked-up argument parser. This way, we get default parameters even if the feature is called directly and not through the CLI.
def formatter(name, default=False): def decorator(func): func._output_format = dict(name=name, default=default) return func return decorator
Decorate a Feature method to register it as an output formatter. All formatters are picked up by the argument parser so that they can be listed and selected on the CLI via the -f, --format argument.
def load_sqlite(self, db, query=None, table=None, cls=None, column_map=None): if column_map is None: column_map = {} db_path = self.profile_path(db, must_exist=True) def obj_factory(cursor, row): dict_ = {} for idx, col in enumerate(cursor.description): new_name = column_map.get(col[0], col[0]) dict_[new_name] = row[idx] return cls(**dict_) con = sqlite3.connect(str(db_path)) con.row_factory = obj_factory cursor = con.cursor() if not query: columns = [f.name for f in attr.fields(cls)] for k, v in column_map.items(): columns[columns.index(v)] = k query = 'SELECT %s FROM %s' % (','.join(columns), table) cursor.execute(query) while True: item = cursor.fetchone() if item is None: break yield item con.close()
Load data from sqlite db and return as list of specified objects.
def load_json(self, path): with open(self.profile_path(path, must_exist=True), encoding='utf-8') as f: data = json.load(f) return data
Load a JSON file from the user profile.
def load_mozlz4(self, path): with open(self.profile_path(path, must_exist=True), 'rb') as f: if f.read(8) != b'mozLz40\0': raise NotMozLz4Error('Not Mozilla LZ4 format.') data = lz4.block.decompress(f.read()) return data
Load a Mozilla LZ4 file from the user profile. Mozilla LZ4 is regular LZ4 with a custom string prefix.
def csv_from_items(items, stream=None): items = iter(items) first = next(items) cls = first.__class__ if stream is None: stream = sys.stdout fields = [f.name for f in attr.fields(cls)] writer = csv.DictWriter(stream, fieldnames=fields) writer.writeheader() writer.writerow(attr.asdict(first)) writer.writerows((attr.asdict(x) for x in items))
Write a list of items to stream in CSV format. The items need to be attrs-decorated.
def profile_path(self, path, must_exist=False): full_path = self.session.profile / path if must_exist and not full_path.exists(): raise FileNotFoundError( errno.ENOENT, os.strerror(errno.ENOENT), PurePath(full_path).name, ) return full_path
Return path from current profile.
def make_rpc_call(self, rpc_command): # ~~~ hack: ~~~ if not self.is_alive(): self.close() # force close for safety self.open() # reopen # ~~~ end hack ~~~ result = self._execute_rpc(rpc_command) return ET.tostring(result)
Allow a user to query a device directly using XML-requests. :param rpc_command: (str) rpc command such as: <Get><Operational><LLDP><NodeTable></NodeTable></LLDP></Operational></Get>
def open(self): try: self.device = ConnectHandler(device_type='cisco_xr', ip=self.hostname, port=self.port, username=self.username, password=self.password, **self.netmiko_kwargs) self.device.timeout = self.timeout self._xml_agent_alive = True # successfully open thus alive except NetMikoTimeoutException as t_err: raise ConnectError(t_err.args[0]) except NetMikoAuthenticationException as au_err: raise ConnectError(au_err.args[0]) self._cli_prompt = self.device.find_prompt() # get the prompt self._enter_xml_mode()
Open a connection to an IOS-XR device. Connects to the device using SSH and drops into XML mode.
def is_alive(self): if hasattr(self.device, 'remote_conn'): return self.device.remote_conn.transport.is_active() and self._xml_agent_alive return False
Returns the XML agent connection state (and SSH connection state).
def _execute_show(self, show_command): rpc_command = '<CLI><Exec>{show_command}</Exec></CLI>'.format( show_command=escape_xml(show_command) ) response = self._execute_rpc(rpc_command) raw_response = response.xpath('.//CLI/Exec')[0].text return raw_response.strip() if raw_response else ''
Executes an operational show-type command.
def _execute_config_show(self, show_command, delay_factor=.1): rpc_command = '<CLI><Configuration>{show_command}</Configuration></CLI>'.format( show_command=escape_xml(show_command) ) response = self._execute_rpc(rpc_command, delay_factor=delay_factor) raw_response = response.xpath('.//CLI/Configuration')[0].text return raw_response.strip() if raw_response else ''
Executes a configuration show-type command.
def close(self): if self.lock_on_connect or self.locked: self.unlock() # this refers to the config DB self._unlock_xml_agent() # this refers to the XML agent if hasattr(self.device, 'remote_conn'): self.device.remote_conn.close()
Close the connection to the IOS-XR device. Clean up after you are done and explicitly close the router connection.
def lock(self): if not self.locked: rpc_command = '<Lock/>' try: self._execute_rpc(rpc_command) except XMLCLIError: raise LockError('Unable to enter in configure exclusive mode!', self) self.locked = True
Lock the config database. Use if Locking/Unlocking is not performaed automatically by lock=False
def unlock(self): if self.locked: rpc_command = '<Unlock/>' try: self._execute_rpc(rpc_command) except XMLCLIError: raise UnlockError('Unable to unlock the config!', self) self.locked = False
Unlock the IOS-XR device config. Use if Locking/Unlocking is not performaed automatically by lock=False
def load_candidate_config(self, filename=None, config=None): configuration = '' if filename is None: configuration = config else: with open(filename) as f: configuration = f.read() rpc_command = '<CLI><Configuration>{configuration}</Configuration></CLI>'.format( configuration=escape_xml(configuration) # need to escape, otherwise will try to load invalid XML ) try: self._execute_rpc(rpc_command) except InvalidInputError as e: self.discard_config() raise InvalidInputError(e.args[0], self)
Load candidate confguration. Populate the attribute candidate_config with the desired configuration and loads it into the router. You can populate it from a file or from a string. If you send both a filename and a string containing the configuration, the file takes precedence. :param filename: Path to the file containing the desired configuration. By default is None. :param config: String containing the desired configuration.
def get_candidate_config(self, merge=False, formal=False): command = "show configuration" if merge: command += " merge" if formal: command += " formal" response = self._execute_config_show(command) match = re.search(".*(!! IOS XR Configuration.*)$", response, re.DOTALL) if match is not None: response = match.group(1) return response
Retrieve the configuration loaded as candidate config in your configuration session. :param merge: Merge candidate config with running config to return the complete configuration including all changed :param formal: Return configuration in IOS-XR formal config format
def compare_config(self): _show_merge = self._execute_config_show('show configuration merge') _show_run = self._execute_config_show('show running-config') diff = difflib.unified_diff(_show_run.splitlines(1)[2:-2], _show_merge.splitlines(1)[2:-2]) return ''.join([x.replace('\r', '') for x in diff])
Compare configuration to be merged with the one on the device. Compare executed candidate config with the running config and return a diff, assuming the loaded config will be merged with the existing one. :return: Config diff.
def commit_config(self, label=None, comment=None, confirmed=None): rpc_command = '<Commit' if label: rpc_command += ' Label="%s"' % label if comment: rpc_command += ' Comment="%s"' % comment[:60] if confirmed: if 30 <= int(confirmed) <= 300: rpc_command += ' Confirmed="%d"' % int(confirmed) else: raise InvalidInputError('confirmed needs to be between 30 and 300 seconds', self) rpc_command += '/>' self._execute_rpc(rpc_command)
Commit the candidate config. :param label: Commit comment, displayed in the commit entry on the device. :param comment: Commit label, displayed instead of the commit ID on the device. (Max 60 characters) :param confirmed: Commit with auto-rollback if new commit is not made in 30 to 300 sec
def rollback(self, rb_id=1): rpc_command = '<Unlock/><Rollback><Previous>{rb_id}</Previous></Rollback><Lock/>'.format(rb_id=rb_id) self._execute_rpc(rpc_command)
Rollback the last committed configuration. :param rb_id: Rollback a specific number of steps. Default: 1
def _main(): import sys def log(message): print(message) def print_usage(): log('usage: %s <application key> <application secret> send <number> <message> <from_number>' % sys.argv[0]) log(' %s <application key> <application secret> status <message_id>' % sys.argv[0]) if len(sys.argv) > 4 and sys.argv[3] == 'send': key, secret, number, message = sys.argv[1], sys.argv[2], sys.argv[4], sys.argv[5] client = SinchSMS(key, secret) if len(sys.argv) > 6: log(client.send_message(number, message, sys.argv[6])) else: log(client.send_message(number, message)) elif len(sys.argv) > 3 and sys.argv[3] == 'status': key, secret, message_id = sys.argv[1], sys.argv[2], sys.argv[4] client = SinchSMS(key, secret) log(client.check_status(message_id)) else: print_usage() sys.exit(1) sys.exit(0)
A simple demo to be used from command line.
def _request(self, url, values=None): if values: json_data = json.dumps(values) request = urllib2.Request(url, json_data.encode()) request.add_header('content-type', 'application/json') request.add_header('authorization', self._auth) connection = urllib2.urlopen(request) response = connection.read() connection.close() else: request = urllib2.Request(url) request.add_header('authorization', self._auth) connection = urllib2.urlopen(request) response = connection.read() connection.close() try: result = json.loads(response.decode()) except ValueError as exception: return {'errorCode': 1, 'message': str(exception)} return result
Send a request and read response. Sends a get request if values are None, post request otherwise.
def send_message(self, to_number, message, from_number=None): values = {'Message': message} if from_number is not None: values['From'] = from_number return self._request(self.SEND_SMS_URL + to_number, values)
Send a message to the specified number and return a response dictionary. The numbers must be specified in international format starting with a '+'. Returns a dictionary that contains a 'MessageId' key with the sent message id value or contains 'errorCode' and 'message' on error. Possible error codes: 40001 - Parameter validation 40002 - Missing parameter 40003 - Invalid request 40100 - Illegal authorization header 40200 - There is not enough funds to send the message 40300 - Forbidden request 40301 - Invalid authorization scheme for calling the method 50000 - Internal error
def convert_descriptor(self, descriptor): # Fields fields = [] fallbacks = [] schema = tableschema.Schema(descriptor) for index, field in enumerate(schema.fields): converted_type = self.convert_type(field.type) if not converted_type: converted_type = 'STRING' fallbacks.append(index) mode = 'NULLABLE' if field.required: mode = 'REQUIRED' fields.append({ 'name': _slugify_field_name(field.name), 'type': converted_type, 'mode': mode, }) # Descriptor converted_descriptor = { 'fields': fields, } return (converted_descriptor, fallbacks)
Convert descriptor to BigQuery
def convert_row(self, row, schema, fallbacks): for index, field in enumerate(schema.fields): value = row[index] if index in fallbacks: value = _uncast_value(value, field=field) else: value = field.cast_value(value) row[index] = value return row
Convert row to BigQuery
def convert_type(self, type): # Mapping mapping = { 'any': 'STRING', 'array': None, 'boolean': 'BOOLEAN', 'date': 'DATE', 'datetime': 'DATETIME', 'duration': None, 'geojson': None, 'geopoint': None, 'integer': 'INTEGER', 'number': 'FLOAT', 'object': None, 'string': 'STRING', 'time': 'TIME', 'year': 'INTEGER', 'yearmonth': None, } # Not supported type if type not in mapping: message = 'Type %s is not supported' % type raise tableschema.exceptions.StorageError(message) return mapping[type]
Convert type to BigQuery
def restore_descriptor(self, converted_descriptor): # Convert fields = [] for field in converted_descriptor['fields']: field_type = self.restore_type(field['type']) resfield = { 'name': field['name'], 'type': field_type, } if field.get('mode', 'NULLABLE') != 'NULLABLE': resfield['constraints'] = {'required': True} fields.append(resfield) descriptor = {'fields': fields} return descriptor
Restore descriptor rom BigQuery
def restore_row(self, row, schema): for index, field in enumerate(schema.fields): if field.type == 'datetime': row[index] = parse(row[index]) if field.type == 'date': row[index] = parse(row[index]).date() if field.type == 'time': row[index] = parse(row[index]).time() return schema.cast_row(row)
Restore row from BigQuery
def restore_type(self, type): # Mapping mapping = { 'BOOLEAN': 'boolean', 'DATE': 'date', 'DATETIME': 'datetime', 'INTEGER': 'integer', 'FLOAT': 'number', 'STRING': 'string', 'TIME': 'time', } # Not supported type if type not in mapping: message = 'Type %s is not supported' % type raise tableschema.exceptions.StorageError(message) return mapping[type]
Restore type from BigQuery
def buckets(self): # No cached value if self.__buckets is None: # Get response response = self.__service.tables().list( projectId=self.__project, datasetId=self.__dataset).execute() # Extract buckets self.__buckets = [] for table in response.get('tables', []): table_name = table['tableReference']['tableId'] bucket = self.__mapper.restore_bucket(table_name) if bucket is not None: self.__buckets.append(bucket) return self.__buckets
https://github.com/frictionlessdata/tableschema-bigquery-py#storage
def create(self, bucket, descriptor, force=False): # Make lists buckets = bucket if isinstance(bucket, six.string_types): buckets = [bucket] descriptors = descriptor if isinstance(descriptor, dict): descriptors = [descriptor] # Iterate over buckets/descriptors for bucket, descriptor in zip(buckets, descriptors): # Existent bucket if bucket in self.buckets: if not force: message = 'Bucket "%s" already exists' % bucket raise tableschema.exceptions.StorageError(message) self.delete(bucket) # Prepare job body tableschema.validate(descriptor) table_name = self.__mapper.convert_bucket(bucket) converted_descriptor, fallbacks = self.__mapper.convert_descriptor(descriptor) body = { 'tableReference': { 'projectId': self.__project, 'datasetId': self.__dataset, 'tableId': table_name, }, 'schema': converted_descriptor, } # Make request self.__service.tables().insert( projectId=self.__project, datasetId=self.__dataset, body=body).execute() # Add to descriptors/fallbacks self.__descriptors[bucket] = descriptor self.__fallbacks[bucket] = fallbacks # Remove buckets cache self.__buckets = None
https://github.com/frictionlessdata/tableschema-bigquery-py#storage
def delete(self, bucket=None, ignore=False): # Make lists buckets = bucket if isinstance(bucket, six.string_types): buckets = [bucket] elif bucket is None: buckets = reversed(self.buckets) # Iterater over buckets for bucket in buckets: # Non-existent bucket if bucket not in self.buckets: if not ignore: message = 'Bucket "%s" doesn\'t exist.' % bucket raise tableschema.exceptions.StorageError(message) return # Remove from descriptors if bucket in self.__descriptors: del self.__descriptors[bucket] # Make delete request table_name = self.__mapper.convert_bucket(bucket) self.__service.tables().delete( projectId=self.__project, datasetId=self.__dataset, tableId=table_name).execute() # Remove tables cache self.__buckets = None
https://github.com/frictionlessdata/tableschema-bigquery-py#storage
def describe(self, bucket, descriptor=None): # Set descriptor if descriptor is not None: self.__descriptors[bucket] = descriptor # Get descriptor else: descriptor = self.__descriptors.get(bucket) if descriptor is None: table_name = self.__mapper.convert_bucket(bucket) response = self.__service.tables().get( projectId=self.__project, datasetId=self.__dataset, tableId=table_name).execute() converted_descriptor = response['schema'] descriptor = self.__mapper.restore_descriptor(converted_descriptor) return descriptor
https://github.com/frictionlessdata/tableschema-bigquery-py#storage
def iter(self, bucket): # Get schema/data schema = tableschema.Schema(self.describe(bucket)) table_name = self.__mapper.convert_bucket(bucket) response = self.__service.tabledata().list( projectId=self.__project, datasetId=self.__dataset, tableId=table_name).execute() # Collect rows rows = [] for fields in response['rows']: row = [field['v'] for field in fields['f']] rows.append(row) # Sort rows # TODO: provide proper sorting solution rows = sorted(rows, key=lambda row: row[0] if row[0] is not None else 'null') # Emit rows for row in rows: row = self.__mapper.restore_row(row, schema=schema) yield row
https://github.com/frictionlessdata/tableschema-bigquery-py#storage
def write(self, bucket, rows): # Write buffer BUFFER_SIZE = 10000 # Prepare schema, fallbacks schema = tableschema.Schema(self.describe(bucket)) fallbacks = self.__fallbacks.get(bucket, []) # Write data rows_buffer = [] for row in rows: row = self.__mapper.convert_row(row, schema=schema, fallbacks=fallbacks) rows_buffer.append(row) if len(rows_buffer) > BUFFER_SIZE: self.__write_rows_buffer(bucket, rows_buffer) rows_buffer = [] if len(rows_buffer) > 0: self.__write_rows_buffer(bucket, rows_buffer)
https://github.com/frictionlessdata/tableschema-bigquery-py#storage
def get_securities(self, page=1, **filter_param): url_path = self._build_url_path(None, None) params = {'page': page} # the endpoints respond just fine to invaliid query params, # they just ignore them, but the the real value of the endpoints # is only revealed when using the filters, so let's not waste # requests on filters that don't do anything. if filter_param: query_filter = filter_param.popitem() if query_filter[0] in self.VALID_SECURITY_FILTERS: params[query_filter[0]] = query_filter[1] else: error_msg = 'Invalid filter param. Must be one of: {0}'.format(','.join(self.VALID_SECURITY_FILTERS)) raise exceptions.PyChartsRequestException(error_msg) return self._get_data(url_path, params)
Queries /<security_type> endpoint to return a paged list of securities.
def get_points(self, security_symbols, calculation_codes, query_date=None): security_symbols = self._str_or_list(security_symbols) calculation_codes = self._str_or_list(calculation_codes) url_path = self._build_url_path(security_symbols, 'points', calculation_codes) if query_date: params = {'date': self._format_query_date_for_url(query_date)} else: params = None return self._get_data(url_path, params)
Queries data from a /<security_type>/points endpoint. Args: security_symbols (list): List of string symbols calculation_codes (list): List og string calculation codes query_date (datetime): datetime object on or before which the endpoint will query data for. (int): Negative integer representing relative periods(as it relates to each calc code) in the past. Returns: dict of the decoded json from server response. Notes: The max length of any list arg is 100
def get_series(self, security_symbols, calculation_codes, query_start_date=None, query_end_date=None, resample_frequency=None, resample_function=None, fill_method=None, aggregate_function=None): security_symbols = self._str_or_list(security_symbols) calculation_codes = self._str_or_list(calculation_codes) url_path = self._build_url_path(security_symbols, 'series', calculation_codes) params = {} if query_start_date: params['start_date'] = self._format_query_date_for_url(query_start_date) if query_end_date: params['end_date'] = self._format_query_date_for_url(query_end_date) if resample_frequency: params['resample_frequency'] = resample_frequency if resample_function: params['resample_function'] = resample_function if fill_method: params['fill_method'] = fill_method if aggregate_function: params['aggregate_function'] = aggregate_function return self._get_data(url_path, params)
Queries data from a /<security_type>/series endpoint. Args: security_symbols (list): List of string symbols calculation_codes (list): List og string calculation codes query_start_date (datetime): date after which the endpoint will query data for. (int): Negative integer representing relative periods(as it relates to each calc code) in the past. query_end_date (datetime): date on or before which the endpoint will query data for. (int): Negative integer representing relative periods(as it relates to each calc code) in the past. Returns: dict of the decoded json from server response. Notes: The max length of any list arg is 100
def get_info(self, security_symbols, info_field_codes): security_symbols = self._str_or_list(security_symbols) info_field_codes = self._str_or_list(info_field_codes) url_path = self._build_url_path(security_symbols, 'info', info_field_codes) return self._get_data(url_path, None)
Queries data from a /<security_type>/info endpoint. Args: security_symbols (list): List of string symbols info_field_codes (list): List of string info field codes Returns: dict of the decoded json from server response. Notes: The max length of any list arg is 100
def definition(self): return self.TRUE, self.FALSE, self.NOT, self.AND, self.OR, self.Symbol
Return a tuple of this algebra defined elements and types as: (TRUE, FALSE, NOT, AND, OR, Symbol)
def _rdistributive(self, expr, op_example): if expr.isliteral: return expr expr_class = expr.__class__ args = (self._rdistributive(arg, op_example) for arg in expr.args) args = tuple(arg.simplify() for arg in args) if len(args) == 1: return args[0] expr = expr_class(*args) dualoperation = op_example.dual if isinstance(expr, dualoperation): expr = expr.distributive() return expr
Recursively flatten the `expr` expression for the `op_example` AND or OR operation instance exmaple.
def normalize(self, expr, operation): # ensure that the operation is not NOT assert operation in (self.AND, self.OR,) # Move NOT inwards. expr = expr.literalize() # Simplify first otherwise _rdistributive() may take forever. expr = expr.simplify() operation_example = operation(self.TRUE, self.FALSE) expr = self._rdistributive(expr, operation_example) # Canonicalize expr = expr.simplify() return expr
Return a normalized expression transformed to its normal form in the given AND or OR operation. The new expression arguments will satisfy these conditions: - operation(*args) == expr (here mathematical equality is meant) - the operation does not occur in any of its arg. - NOT is only appearing in literals (aka. Negation normal form). The operation must be an AND or OR operation or a subclass.
def get_literals(self): if self.isliteral: return [self] if not self.args: return [] return list(itertools.chain.from_iterable(arg.get_literals() for arg in self.args))
Return a list of all the literals contained in this expression. Include recursively subexpressions symbols. This includes duplicates.
def literalize(self): if self.isliteral: return self args = tuple(arg.literalize() for arg in self.args) if all(arg is self.args[i] for i, arg in enumerate(args)): return self return self.__class__(*args)
Return an expression where NOTs are only occurring as literals. Applied recursively to subexpressions.
def get_symbols(self): return [s if isinstance(s, Symbol) else s.args[0] for s in self.get_literals()]
Return a list of all the symbols contained in this expression. Include recursively subexpressions symbols. This includes duplicates.
def subs(self, substitutions, default=None, simplify=False): # shortcut: check if we have our whole expression as a possible # subsitution source for expr, substitution in substitutions.items(): if expr == self: return substitution # otherwise, do a proper substitution of sub expressions expr = self._subs(substitutions, default, simplify) return self if expr is None else expr
Return an expression where the expression or all subterms equal to a key expression are substituted with the corresponding value expression using a mapping of: {expr->expr to substitute.} Return this expression unmodified if nothing could be substituted. Note that this can be used to tested for expression containment.
def _subs(self, substitutions, default, simplify): # track the new list of unchanged args or replaced args through # a substitution new_arguments = [] changed_something = False # shortcut for basic logic True or False if self is self.TRUE or self is self.FALSE: return self # if the expression has no elements, e.g. is empty, do not apply # substitions if not self.args: return default # iterate the subexpressions: either plain symbols or a subexpressions for arg in self.args: # collect substitutions for exact matches # break as soon as we have a match for expr, substitution in substitutions.items(): if arg == expr: new_arguments.append(substitution) changed_something = True break # this will execute only if we did not break out of the # loop, e.g. if we did not change anything and did not # collect any substitutions else: # recursively call _subs on each arg to see if we get a # substituted arg new_arg = arg._subs(substitutions, default, simplify) if new_arg is None: # if we did not collect a substitution for this arg, # keep the arg as-is, it is not replaced by anything new_arguments.append(arg) else: # otherwise, we add the substitution for this arg instead new_arguments.append(new_arg) changed_something = True if not changed_something: return # here we did some substitution: we return a new expression # built from the new_arguments newexpr = self.__class__(*new_arguments) return newexpr.simplify() if simplify else newexpr
Return an expression where all subterms equal to a key expression are substituted by the corresponding value expression using a mapping of: {expr->expr to substitute.}
def pretty(self, indent=0, debug=False): debug_details = '' if debug: debug_details += '<isliteral=%r, iscanonical=%r>' % (self.isliteral, self.iscanonical) obj = "'%s'" % self.obj if isinstance(self.obj, basestring) else repr(self.obj) return (' ' * indent) + ('%s(%s%s)' % (self.__class__.__name__, debug_details, obj))
Return a pretty formatted representation of self.
def pretty(self, indent=0, debug=False): debug_details = '' if debug: debug_details += '<isliteral=%r, iscanonical=%r' % (self.isliteral, self.iscanonical) identity = getattr(self, 'identity', None) if identity is not None: debug_details += ', identity=%r' % (identity) annihilator = getattr(self, 'annihilator', None) if annihilator is not None: debug_details += ', annihilator=%r' % (annihilator) dual = getattr(self, 'dual', None) if dual is not None: debug_details += ', dual=%r' % (dual) debug_details += '>' cls = self.__class__.__name__ args = [a.pretty(indent=indent + 2, debug=debug) for a in self.args] pfargs = ',\n'.join(args) cur_indent = ' ' * indent new_line = '' if self.isliteral else '\n' return '{cur_indent}{cls}({debug_details}{new_line}{pfargs}\n{cur_indent})'.format(**locals())
Return a pretty formatted representation of self as an indented tree. If debug is True, also prints debug information for each expression arg. For example: >>> print Expression().parse(u'not a and not b and not (a and ba and c) and c or c').pretty() OR( AND( NOT(Symbol('a')), NOT(Symbol('b')), NOT( AND( Symbol('a'), Symbol('ba'), Symbol('c') ) ), Symbol('c') ), Symbol('c') )
def literalize(self): expr = self.demorgan() if isinstance(expr, self.__class__): return expr return expr.literalize()
Return an expression where NOTs are only occurring as literals.
def simplify(self): if self.iscanonical: return self expr = self.cancel() if not isinstance(expr, self.__class__): return expr.simplify() if expr.args[0] in (self.TRUE, self.FALSE,): return expr.args[0].dual expr = self.__class__(expr.args[0].simplify()) expr.iscanonical = True return expr
Return a simplified expr in canonical form. This means double negations are canceled out and all contained boolean objects are in their canonical form.
def cancel(self): expr = self while True: arg = expr.args[0] if not isinstance(arg, self.__class__): return expr expr = arg.args[0] if not isinstance(expr, self.__class__): return expr
Cancel itself and following NOTs as far as possible. Returns the simplified expression.
def demorgan(self): expr = self.cancel() if expr.isliteral or not isinstance(expr, self.NOT): return expr op = expr.args[0] return op.dual(*(self.__class__(arg).cancel() for arg in op.args))
Return a expr where the NOT function is moved inward. This is achieved by canceling double NOTs and using De Morgan laws.
def pretty(self, indent=1, debug=False): debug_details = '' if debug: debug_details += '<isliteral=%r, iscanonical=%r>' % (self.isliteral, self.iscanonical) if self.isliteral: pretty_literal = self.args[0].pretty(indent=0, debug=debug) return (' ' * indent) + '%s(%s%s)' % (self.__class__.__name__, debug_details, pretty_literal) else: return super(NOT, self).pretty(indent=indent, debug=debug)
Return a pretty formatted representation of self. Include additional debug details if `debug` is True.
def flatten(self): args = list(self.args) i = 0 for arg in self.args: if isinstance(arg, self.__class__): args[i:i + 1] = arg.args i += len(arg.args) else: i += 1 return self.__class__(*args)
Return a new expression where nested terms of this expression are flattened as far as possible. E.g. A & (B & C) becomes A & B & C.
def absorb(self, args): args = list(args) if not args: args = list(self.args) i = 0 while i < len(args): absorber = args[i] j = 0 while j < len(args): if j == i: j += 1 continue target = args[j] if not isinstance(target, self.dual): j += 1 continue # Absorption if absorber in target: del args[j] if j < i: i -= 1 continue # Negative absorption neg_absorber = self.NOT(absorber).cancel() if neg_absorber in target: b = target.subtract(neg_absorber, simplify=False) if b is None: del args[j] if j < i: i -= 1 continue else: args[j] = b j += 1 continue if isinstance(absorber, self.dual): remove = None for arg in absorber.args: narg = self.NOT(arg).cancel() if arg in target.args: pass elif narg in target.args: if remove is None: remove = narg else: remove = None break else: remove = None break if remove is not None: args[j] = target.subtract(remove, simplify=True) j += 1 i += 1 return args
Given an `args` sequence of expressions, return a new list of expression applying absorption and negative absorption. See https://en.wikipedia.org/wiki/Absorption_law Absorption: A & (A | B) = A, A | (A & B) = A Negative absorption: A & (~A | B) = A & B, A | (~A & B) = A | B
def subtract(self, expr, simplify): args = self.args if expr in self.args: args = list(self.args) args.remove(expr) elif isinstance(expr, self.__class__): if all(arg in self.args for arg in expr.args): args = tuple(arg for arg in self.args if arg not in expr) if len(args) == 0: return None if len(args) == 1: return args[0] newexpr = self.__class__(*args) if simplify: newexpr = newexpr.simplify() return newexpr
Return a new expression where the `expr` expression has been removed from this expression if it exists.
def distributive(self): dual = self.dual args = list(self.args) for i, arg in enumerate(args): if isinstance(arg, dual): args[i] = arg.args else: args[i] = (arg,) prod = itertools.product(*args) args = tuple(self.__class__(*arg).simplify() for arg in prod) if len(args) == 1: return args[0] else: return dual(*args)
Return a term where the leading AND or OR terms are switched. This is done by applying the distributive laws: A & (B|C) = (A&B) | (A&C) A | (B&C) = (A|B) & (A|C)
def ages(self): match = self._ages_re.match(self.raw_fields.get('ages')) if not match: match = self._ages_re2.match(self.raw_fields.get('ages')) return self.Ages(int(match.group(1)),int(match.group(1))) return self.Ages(int(match.group(1)), int(match.group(2)))
The age range that the user is interested in.
def single(self): return 'display: none;' not in self._looking_for_xpb.li(id='ajax_single').\ one_(self._profile.profile_tree).attrib['style']
Whether or not the user is only interested in people that are single.
def update(self, ages=None, single=None, near_me=None, kinds=None, gentation=None): ages = ages or self.ages single = single if single is not None else self.single near_me = near_me if near_me is not None else self.near_me kinds = kinds or self.kinds gentation = gentation or self.gentation data = { 'okc_api': '1', 'searchprefs.submit': '1', 'update_prefs': '1', 'lquery': '', 'locid': '0', 'filter5': '1, 1' # TODO(@IvanMalison) Do this better... } if kinds: kinds_numbers = self._build_kinds_numbers(kinds) if kinds_numbers: data['lookingfor'] = kinds_numbers age_min, age_max = ages data.update(looking_for_filters.legacy_build( status=single, gentation=gentation, radius=25 if near_me else 0, age_min=age_min, age_max=age_max )) log.info(simplejson.dumps({'looking_for_update': data})) util.cached_property.bust_caches(self) response = self._profile.authcode_post('profileedit2', data=data) self._profile.refresh(reload=False) return response.content
Update the looking for attributes of the logged in user. :param ages: The ages that the logged in user is interested in. :type ages: tuple :param single: Whether or not the user is only interested in people that are single. :type single: bool :param near_me: Whether or not the user is only interested in people that are near them. :type near_me: bool :param kinds: What kinds of relationships the user should be updated to be interested in. :type kinds: list :param gentation: The sex/orientation of people the user is interested in. :type gentation: str
def upload_and_confirm(self, incoming, **kwargs): response_dict = self.upload(incoming) if 'error' in response_dict: log.warning('Failed to upload photo') return response_dict if isinstance(incoming, Info): kwargs.setdefault('thumb_nail_left', incoming.thumb_nail_left) kwargs.setdefault('thumb_nail_top', incoming.thumb_nail_top) kwargs.setdefault('thumb_nail_right', incoming.thumb_nail_right) kwargs.setdefault('thumb_nail_bottom', incoming.thumb_nail_bottom) kwargs['height'] = response_dict.get('height') kwargs['width'] = response_dict.get('width') self.confirm(response_dict['id'], **kwargs) return response_dict
Upload the file to okcupid and confirm, among other things, its thumbnail position. :param incoming: A filepath string, :class:`.Info` object or a file like object to upload to okcupid.com. If an info object is provided, its thumbnail positioning will be used by default. :param caption: The caption to add to the photo. :param thumb_nail_left: For thumb nail positioning. :param thumb_nail_top: For thumb nail positioning. :param thumb_nail_right: For thumb nail positioning. :param thumb_nail_bottom: For thumb nail positioning.