sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def lte(max_value): """ Validates that a field value is less than or equal to the value given to this validator. """ def validate(value): if value > max_value: return e("{} is not less than or equal to {}", value, max_value) return validate
Validates that a field value is less than or equal to the value given to this validator.
entailment
def gt(gt_value): """ Validates that a field value is greater than the value given to this validator. """ def validate(value): if value <= gt_value: return e("{} is not greater than {}", value, gt_value) return validate
Validates that a field value is greater than the value given to this validator.
entailment
def lt(lt_value): """ Validates that a field value is less than the value given to this validator. """ def validate(value): if value >= lt_value: return e("{} is not less than {}", value, lt_value) return validate
Validates that a field value is less than the value given to this validator.
entailment
def between(min_value, max_value): """ Validates that a field value is between the two values given to this validator. """ def validate(value): if value < min_value: return e("{} is not greater than or equal to {}", value, min_value) if value > max_value: return e("{} is not less than or equal to {}", value, max_value) return validate
Validates that a field value is between the two values given to this validator.
entailment
def length(min=None, max=None): """ Validates that a field value's length is between the bounds given to this validator. """ def validate(value): if min and len(value) < min: return e("{} does not have a length of at least {}", value, min) if max and len(value) > max: return e("{} does not have a length of at most {}", value, max) return validate
Validates that a field value's length is between the bounds given to this validator.
entailment
def match(pattern): """ Validates that a field value matches the regex given to this validator. """ regex = re.compile(pattern) def validate(value): if not regex.match(value): return e("{} does not match the pattern {}", value, pattern) return validate
Validates that a field value matches the regex given to this validator.
entailment
def is_email(): """ Validates that a fields value is a valid email address. """ email = ( ur'(?!^\.)' # No dot at start ur'(?!.*\.@)' # No dot before at sign ur'(?!.*@\.)' # No dot after at sign ur'(?!.*\.$)' # No dot at the end ur'(?!.*\.\.)' # No double dots anywhere ur'^\S+' # Starts with one or more non-whitespace characters ur'@' # Contains an at sign ur'\S+$' # Ends with one or more non-whitespace characters ) regex = re.compile(email, re.IGNORECASE | re.UNICODE) def validate(value): if not regex.match(value): return e("{} is not a valid email address", value) return validate
Validates that a fields value is a valid email address.
entailment
def is_url(): """ Validates that a fields value is a valid URL. """ # Stolen from Django regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) def validate(value): if not regex.match(value): return e("{} is not a valid URL", value) return validate
Validates that a fields value is a valid URL.
entailment
def each_item(*validators): """ A wrapper which applies the given validators to each item in a field value of type `list`. Example usage in a Schema: "my_list_field": {"type": Array(int), "validates": each_item(lte(10))} """ def validate(value): for item in value: for validator in validators: error = validator(item) if error: return error return None return validate
A wrapper which applies the given validators to each item in a field value of type `list`. Example usage in a Schema: "my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
entailment
def distinct(): """ Validates that all items in the given field list value are distinct, i.e. that the list contains no duplicates. """ def validate(value): for i, item in enumerate(value): if item in value[i+1:]: return e("{} is not a distinct set of values", value) return validate
Validates that all items in the given field list value are distinct, i.e. that the list contains no duplicates.
entailment
def apply_defaults(self, instance): """Applies the defaults described by the this schema to the given document instance as appropriate. Defaults are only applied to fields which are currently unset.""" for field, spec in self.doc_spec.iteritems(): field_type = spec['type'] if field not in instance: if 'default' in spec: default = spec['default'] if callable(default): instance[field] = default() else: instance[field] = copy.deepcopy(default) # Determine if a value already exists for the field if field in instance: value = instance[field] # recurse into nested docs if isinstance(field_type, Schema) and isinstance(value, dict): field_type.apply_defaults(value) elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list): for item in value: field_type.contained_type.apply_defaults(item)
Applies the defaults described by the this schema to the given document instance as appropriate. Defaults are only applied to fields which are currently unset.
entailment
def validate(self, instance): """Validates the given document against this schema. Raises a ValidationException if there are any failures.""" errors = {} self._validate_instance(instance, errors) if len(errors) > 0: raise ValidationException(errors)
Validates the given document against this schema. Raises a ValidationException if there are any failures.
entailment
def _verify(self, path_prefix=None): """Verifies that this schema's doc spec is valid and makes sense.""" for field, spec in self.doc_spec.iteritems(): path = self._append_path(path_prefix, field) # Standard dict-based spec if isinstance(spec, dict): self._verify_field_spec(spec, path) else: raise SchemaFormatException("Invalid field definition for {}", path)
Verifies that this schema's doc spec is valid and makes sense.
entailment
def _verify_field_spec(self, spec, path): """Verifies a given field specification is valid, recursing into nested schemas if required.""" # Required should be a boolean if 'required' in spec and not isinstance(spec['required'], bool): raise SchemaFormatException("{} required declaration should be True or False", path) # Required should be a boolean if 'nullable' in spec and not isinstance(spec['nullable'], bool): raise SchemaFormatException("{} nullable declaration should be True or False", path) # Must have a type specified if 'type' not in spec: raise SchemaFormatException("{} has no type declared.", path) self._verify_type(spec, path) # Validations should be either a single function or array of functions if 'validates' in spec: self._verify_validates(spec, path) # Defaults must be of the correct type or a function if 'default' in spec: self._verify_default(spec, path) # Only expected spec keys are supported if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])): raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
Verifies a given field specification is valid, recursing into nested schemas if required.
entailment
def _verify_type(self, spec, path): """Verify that the 'type' in the spec is valid""" field_type = spec['type'] if isinstance(field_type, Schema): # Nested documents cannot have validation if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])): raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path) return elif isinstance(field_type, Array): if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)): raise SchemaFormatException("Unsupported field type contained by Array at {}.", path) elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType): raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
Verify that the 'type' in the spec is valid
entailment
def _verify_default(self, spec, path): """Verifies that the default specified in the given spec is valid.""" field_type = spec['type'] default = spec['default'] # If it's a function there's nothing we can really do except assume its valid if callable(default): return if isinstance(field_type, Array): # Verify we'd got a list as our default if not isinstance(default, list): raise SchemaFormatException("Default value for Array at {} is not a list of values.", path) # Ensure the contents are of the correct type for i, item in enumerate(default): if isinstance(field_type.contained_type, Schema): if not self._valid_schema_default(item): raise SchemaFormatException("Default value for Schema is not valid.", path) elif not isinstance(item, field_type.contained_type): raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path) elif isinstance(field_type, Schema): if not self._valid_schema_default(default): raise SchemaFormatException("Default value for Schema is not valid.", path) else: if not isinstance(default, field_type): raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
Verifies that the default specified in the given spec is valid.
entailment
def _verify_validates(self, spec, path): """Verify thats the 'validates' argument is valid.""" validates = spec['validates'] if isinstance(validates, list): for validator in validates: self._verify_validator(validator, path) else: self._verify_validator(validates, path)
Verify thats the 'validates' argument is valid.
entailment
def _verify_validator(self, validator, path): """Verifies that a given validator associated with the field at the given path is legitimate.""" # Validator should be a function if not callable(validator): raise SchemaFormatException("Invalid validations for {}", path) # Validator should accept a single argument (args, varargs, keywords, defaults) = getargspec(validator) if len(args) != 1: raise SchemaFormatException("Invalid validations for {}", path)
Verifies that a given validator associated with the field at the given path is legitimate.
entailment
def _validate_instance(self, instance, errors, path_prefix=''): """Validates that the given instance of a document conforms to the given schema's structure and validations. Any validation errors are added to the given errors collection. The caller should assume the instance is considered valid if the errors collection is empty when this method returns.""" if not isinstance(instance, dict): errors[path_prefix] = "Expected instance of dict to validate against schema." return # validate against the schema level validators self._apply_validations(errors, path_prefix, self._validates, instance) # Loop over each field in the schema and check the instance value conforms # to its spec for field, spec in self.doc_spec.iteritems(): path = self._append_path(path_prefix, field) # If the field is present, validate it's value. if field in instance: self._validate_value(instance[field], spec, path, errors) else: # If not, add an error if it was a required key. if spec.get('required', False): errors[path] = "{} is required.".format(path) # Now loop over each field in the given instance and make sure we don't # have any fields not declared in the schema, unless strict mode has been # explicitly disabled. if self._strict: for field in instance: if field not in self.doc_spec: errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
Validates that the given instance of a document conforms to the given schema's structure and validations. Any validation errors are added to the given errors collection. The caller should assume the instance is considered valid if the errors collection is empty when this method returns.
entailment
def _validate_value(self, value, field_spec, path, errors): """Validates that the given field value is valid given the associated field spec and path. Any validation failures are added to the given errors collection.""" # Check if the value is None and add an error if the field is not nullable. # Note that for backward compatibility reasons, the default value of 'nullable' # is the inverse of 'required' (which use to mean both that the key be present # and not set to None). if value is None: if not field_spec.get('nullable', not field_spec.get('required', False)): errors[path] = "{} is not nullable.".format(path) return # All fields should have a type field_type = field_spec['type'] if isinstance(field_type, types.FunctionType): try: field_type = field_type(value) except Exception as e: raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path) if not isinstance(field_type, (type, Schema, Array)): raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path) # If our field is an embedded document, recurse into it if isinstance(field_type, Schema): if isinstance(value, dict): field_type._validate_instance(value, errors, path) else: errors[path] = "{} should be an embedded document".format(path) return elif isinstance(field_type, Array): if isinstance(value, list): is_dynamic = isinstance(field_type.contained_type, types.FunctionType) for i, item in enumerate(value): contained_type = field_type.contained_type if is_dynamic: contained_type = contained_type(item) instance_path = self._append_path(path, i) if isinstance(contained_type, Schema): contained_type._validate_instance(item, errors, instance_path) elif not isinstance(item, contained_type): errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path) continue else: errors[path] = "{} should be an embedded array".format(path) return elif not isinstance(value, field_type): errors[path] = "Field should be of type {}".format(field_type) return validations = field_spec.get('validates', None) if validations is None: return self._apply_validations(errors, path, validations, value)
Validates that the given field value is valid given the associated field spec and path. Any validation failures are added to the given errors collection.
entailment
def load_config(path=None, defaults=None): """ Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fails. If ``defaults`` is not given, the following locations listed in the DEFAULT_FILES constant are tried. To completely disable defaults loading, pass in an empty list or ``False``. Returns the SafeConfigParser instance used to load and parse the files. """ if defaults is None: defaults = DEFAULT_FILES config = ConfigParser(allow_no_value=True) if defaults: config.read(defaults) if path: with open(path) as fh: config.read_file(fh) return config
Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fails. If ``defaults`` is not given, the following locations listed in the DEFAULT_FILES constant are tried. To completely disable defaults loading, pass in an empty list or ``False``. Returns the SafeConfigParser instance used to load and parse the files.
entailment
def as_dict(config): """ Converts a ConfigParser object into a dictionary. The resulting dictionary has sections as keys which point to a dict of the sections options as key => value pairs. """ settings = defaultdict(lambda: {}) for section in config.sections(): for key, val in config.items(section): settings[section][key] = val return settings
Converts a ConfigParser object into a dictionary. The resulting dictionary has sections as keys which point to a dict of the sections options as key => value pairs.
entailment
def initialize(self, timeouts): """ Bind or connect the nanomsg socket to some address """ # Bind or connect to address if self.bind is True: self.socket.bind(self.address) else: self.socket.connect(self.address) # Set send and recv timeouts self._set_timeouts(timeouts)
Bind or connect the nanomsg socket to some address
entailment
def _set_timeouts(self, timeouts): """ Set socket timeouts for send and receive respectively """ (send_timeout, recv_timeout) = (None, None) try: (send_timeout, recv_timeout) = timeouts except TypeError: raise EndpointError( '`timeouts` must be a pair of numbers (2, 3) which represent ' 'the timeout values for send and receive respectively') if send_timeout is not None: self.socket.set_int_option( nanomsg.SOL_SOCKET, nanomsg.SNDTIMEO, send_timeout) if recv_timeout is not None: self.socket.set_int_option( nanomsg.SOL_SOCKET, nanomsg.RCVTIMEO, recv_timeout)
Set socket timeouts for send and receive respectively
entailment
def send(self, payload): """ Encode and sign (optional) the send through socket """ payload = self.encode(payload) payload = self.sign(payload) self.socket.send(payload)
Encode and sign (optional) the send through socket
entailment
def receive(self, decode=True): """ Receive from socket, authenticate and decode payload """ payload = self.socket.recv() payload = self.verify(payload) if decode: payload = self.decode(payload) return payload
Receive from socket, authenticate and decode payload
entailment
def sign(self, payload): """ Sign payload using the supplied authenticator """ if self.authenticator: return self.authenticator.signed(payload) return payload
Sign payload using the supplied authenticator
entailment
def verify(self, payload): """ Verify payload authenticity via the supplied authenticator """ if not self.authenticator: return payload try: self.authenticator.auth(payload) return self.authenticator.unsigned(payload) except AuthenticatorInvalidSignature: raise except Exception as exception: raise AuthenticateError(str(exception))
Verify payload authenticity via the supplied authenticator
entailment
def decode(self, payload): """ Decode payload """ try: return self.encoder.decode(payload) except Exception as exception: raise DecodeError(str(exception))
Decode payload
entailment
def encode(self, payload): """ Encode payload """ try: return self.encoder.encode(payload) except Exception as exception: raise EncodeError(str(exception))
Encode payload
entailment
def start(self): """ Start and listen for calls """ if threading.current_thread().name == 'MainThread': signal.signal(signal.SIGINT, self.stop) logging.info('Started on {}'.format(self.address)) while True: self.process()
Start and listen for calls
entailment
def stop(self, dummy_signum=None, dummy_frame=None): """ Shutdown process (this method is also a signal handler) """ logging.info('Shutting down ...') self.socket.close() sys.exit(0)
Shutdown process (this method is also a signal handler)
entailment
def get_summary(list_all=[], **kwargs): ''' summarize the report data @param list_all: a list which save the report data @param kwargs: such as show_all: True/False report show all status cases proj_name: project name home_page: home page url ''' all_summary = [] for module in list_all: summary = { "module_name" : module['Name'], "show_all" : kwargs.get("show_all",True), "project_name" : kwargs.get("proj_name","TestProject"), "home_page" : kwargs.get("home_page",__about__.HOME_PAGE), "start_time" : "", "end_time" : "", "duration_seconds" : "", "total_case_num" : len(module["TestCases"]), "pass_cases_num" : 0, "fail_cases_num" : 0, "details" : [] } for case in module["TestCases"]: case_detail = {} case_detail["linkurl"] = "./caselogs/%s_%s.log" %(case["case_name"],case["exec_date"]) if case["status"].lower() == "pass": summary["pass_cases_num"] += 1 case_detail["c_style"] = "tr_pass" else: summary["fail_cases_num"] += 1 case_detail["c_style"] = "tr_fail" case_detail.update(case) summary["details"].append(case_detail) try: st = module["TestCases"][0].get("start_at") et = module["TestCases"][-1].get("end_at") summary["start_time"] = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(st)) summary["end_time"] = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(et)) summary["duration_seconds"] = float("%.2f" %(et - st)) except Exception as _: logger.log_warning("Will set 'start_at' and 'end_at' to 'None'") (summary["start_time"], summary["end_time"], summary["duration_seconds"]) = (None,None,None) if summary["fail_cases_num"] > 0: summary["dict_report"] = {"result":0,"message":"failure","pass":summary["pass_cases_num"],"fail":summary["fail_cases_num"]} else: summary["dict_report"] = {"result":1,"message":"success","pass":summary["pass_cases_num"],"fail":summary["fail_cases_num"]} all_summary.append(summary) return all_summary
summarize the report data @param list_all: a list which save the report data @param kwargs: such as show_all: True/False report show all status cases proj_name: project name home_page: home page url
entailment
def add_report_data(list_all=[], module_name="TestModule", **kwargs): ''' add report data to a list @param list_all: a list which save the report data @param module_name: test set name or test module name @param kwargs: such as case_name: testcase name status: test result, Pass or Fail resp_tester: responsible tester who write this case tester: tester who execute the test start_at: tester run this case at time end_at: tester stop this case at time ''' start_at = kwargs.get("start_at") case_name = kwargs.get("case_name","TestCase") raw_case_name = kwargs.get("raw_case_name","TestCase") exec_date_time = time.localtime(start_at) execdate = time.strftime("%Y-%m-%d",exec_date_time) exectime = time.strftime("%H:%M:%S",exec_date_time) _case_report = { 'resp_tester': kwargs.get("resp_tester","administrator"), 'tester': kwargs.get("tester","administrator"), 'case_name': case_name, 'raw_case_name': raw_case_name, 'status': kwargs.get("status","Pass"), 'exec_date': execdate, 'exec_time': exectime, 'start_at': start_at, 'end_at': kwargs.get("end_at"), } for module in list_all: if module_name != module["Name"]: continue for case in module["TestCases"]: if raw_case_name == case["raw_case_name"]: case.update(_case_report) return list_all module["TestCases"].append(_case_report) return list_all list_all.append({"Name": module_name, "TestCases": [_case_report]}) return list_all
add report data to a list @param list_all: a list which save the report data @param module_name: test set name or test module name @param kwargs: such as case_name: testcase name status: test result, Pass or Fail resp_tester: responsible tester who write this case tester: tester who execute the test start_at: tester run this case at time end_at: tester stop this case at time
entailment
def parse(self, subscription): """ Fetch the function registered for a certain subscription """ for name in self.methods: tag = bytes(name.encode('utf-8')) if subscription.startswith(tag): fun = self.methods.get(name) message = subscription[len(tag):] return tag, message, fun return None, None, None
Fetch the function registered for a certain subscription
entailment
def subscribe(self, tag, fun, description=None): """ Subscribe to something and register a function """ self.methods[tag] = fun self.descriptions[tag] = description self.socket.set_string_option(nanomsg.SUB, nanomsg.SUB_SUBSCRIBE, tag)
Subscribe to something and register a function
entailment
def process(self): """ Receive a subscription from the socket and process it """ subscription = None result = None try: subscription = self.socket.recv() except AuthenticateError as exception: logging.error( 'Subscriber error while authenticating request: {}' .format(exception), exc_info=1) except AuthenticatorInvalidSignature as exception: logging.error( 'Subscriber error while authenticating request: {}' .format(exception), exc_info=1) except DecodeError as exception: logging.error( 'Subscriber error while decoding request: {}' .format(exception), exc_info=1) except RequestParseError as exception: logging.error( 'Subscriber error while parsing request: {}' .format(exception), exc_info=1) else: logging.debug( 'Subscriber received payload: {}' .format(subscription)) _tag, message, fun = self.parse(subscription) message = self.verify(message) message = self.decode(message) try: result = fun(message) except Exception as exception: logging.error(exception, exc_info=1) # Return result to check successful execution of `fun` when testing return result
Receive a subscription from the socket and process it
entailment
def build_payload(self, tag, message): """ Encode, sign payload(optional) and attach subscription tag """ message = self.encode(message) message = self.sign(message) payload = bytes(tag.encode('utf-8')) + message return payload
Encode, sign payload(optional) and attach subscription tag
entailment
def publish(self, tag, message): """ Publish a message down the socket """ payload = self.build_payload(tag, message) self.socket.send(payload)
Publish a message down the socket
entailment
def start_service(addr, n, authenticator): """ Start a service """ s = Subscriber(addr, authenticator=authenticator) def do_something(line): pass s.subscribe('test', do_something) started = time.time() for _ in range(n): s.process() s.socket.close() duration = time.time() - started print('Subscriber service stats:') util.print_stats(n, duration) return
Start a service
entailment
def bench(client, n): """ Benchmark n requests """ items = list(range(n)) # Time client publish operations # ------------------------------ started = time.time() for i in items: client.publish('test', i) duration = time.time() - started print('Publisher client stats:') util.print_stats(n, duration)
Benchmark n requests
entailment
def get_webpack(request, name='DEFAULT'): """ Get the Webpack object for a given webpack config. Called at most once per request per config name. """ if not hasattr(request, '_webpack_map'): request._webpack_map = {} wp = request._webpack_map.get(name) if wp is None: wp = request._webpack_map[name] = Webpack(request, name) return wp
Get the Webpack object for a given webpack config. Called at most once per request per config name.
entailment
def includeme(config): """ Add pyramid_webpack methods and config to the app """ settings = config.registry.settings root_package_name = config.root_package.__name__ config.registry.webpack = { 'DEFAULT': WebpackState(settings, root_package_name) } for extra_config in aslist(settings.get('webpack.configs', [])): state = WebpackState(settings, root_package_name, name=extra_config) config.registry.webpack[extra_config] = state # Set up any static views for state in six.itervalues(config.registry.webpack): if state.static_view: config.add_static_view(name=state.static_view_name, path=state.static_view_path, cache_max_age=state.cache_max_age) config.add_request_method(get_webpack, 'webpack')
Add pyramid_webpack methods and config to the app
entailment
def _get_setting(self, setting, default=None, name=None, inherit=True): """ Helper function to fetch settings, inheriting from the base """ if name is None: name = self.name if name == 'DEFAULT': return self._settings.get('webpack.{0}'.format(setting), default) else: val = self._settings.get('webpack.{0}.{1}'.format(name, setting), SENTINEL) if val is SENTINEL: if inherit: return self._get_setting(setting, default, 'DEFAULT') else: return default else: return val
Helper function to fetch settings, inheriting from the base
entailment
def load_stats(self, cache=None, wait=None): """ Load and cache the webpack-stats file """ if cache is None: cache = not self.debug if wait is None: wait = self.debug if not cache or self._stats is None: self._stats = self._load_stats() start = time.time() while wait and self._stats.get('status') == 'compiling': if self.timeout and (time.time() - start > self.timeout): raise RuntimeError("Webpack {0!r} timed out while compiling" .format(self.stats_file.path)) time.sleep(0.1) self._stats = self._load_stats() return self._stats
Load and cache the webpack-stats file
entailment
def _load_stats(self): """ Load the webpack-stats file """ for attempt in range(0, 3): try: with self.stats_file.open() as f: return json.load(f) except ValueError: # If we failed to parse the JSON, it's possible that the # webpack process is writing to it concurrently and it's in a # bad state. Sleep and retry. if attempt < 2: time.sleep(attempt * 0.2) else: raise except IOError: raise IOError( "Could not read stats file {0}. Make sure you are using the " "webpack-bundle-tracker plugin" .format(self.stats_file))
Load the webpack-stats file
entailment
def _chunk_filter(self, extensions): """ Create a filter from the extensions and ignore files """ if isinstance(extensions, six.string_types): extensions = extensions.split() def _filter(chunk): """ Exclusion filter """ name = chunk['name'] if extensions is not None: if not any(name.endswith(e) for e in extensions): return False for pattern in self.state.ignore_re: if pattern.match(name): return False for pattern in self.state.ignore: if fnmatch.fnmatchcase(name, pattern): return False return True return _filter
Create a filter from the extensions and ignore files
entailment
def _add_url(self, chunk): """ Add a 'url' property to a chunk and return it """ if 'url' in chunk: return chunk public_path = chunk.get('publicPath') if public_path: chunk['url'] = public_path else: fullpath = posixpath.join(self.state.static_view_path, chunk['name']) chunk['url'] = self._request.static_url(fullpath) return chunk
Add a 'url' property to a chunk and return it
entailment
def get_bundle(self, bundle_name, extensions=None): """ Get all the chunks contained in a bundle """ if self.stats.get('status') == 'done': bundle = self.stats.get('chunks', {}).get(bundle_name, None) if bundle is None: raise KeyError('No such bundle {0!r}.'.format(bundle_name)) test = self._chunk_filter(extensions) return [self._add_url(c) for c in bundle if test(c)] elif self.stats.get('status') == 'error': raise RuntimeError("{error}: {message}".format(**self.stats)) else: raise RuntimeError( "Bad webpack stats file {0} status: {1!r}" .format(self.state.stats_file, self.stats.get('status')))
Get all the chunks contained in a bundle
entailment
def _unique_names(): """Generates unique sequences of bytes. """ characters = ("abcdefghijklmnopqrstuvwxyz" "0123456789") characters = [characters[i:i + 1] for i in irange(len(characters))] rng = random.Random() while True: letters = [rng.choice(characters) for i in irange(10)] yield ''.join(letters)
Generates unique sequences of bytes.
entailment
def escape_queue(s): """Escapes the path to a queue, e.g. preserves ~ at the begining. """ if isinstance(s, PosixPath): s = unicode_(s) elif isinstance(s, bytes): s = s.decode('utf-8') if s.startswith('~/'): return '~/' + shell_escape(s[2:]) else: return shell_escape(s)
Escapes the path to a queue, e.g. preserves ~ at the begining.
entailment
def parse_ssh_destination(destination): """Parses the SSH destination argument. """ match = _re_ssh.match(destination) if not match: raise InvalidDestination("Invalid destination: %s" % destination) user, password, host, port = match.groups() info = {} if user: info['username'] = user else: info['username'] = getpass.getuser() if password: info['password'] = password if port: info['port'] = int(port) info['hostname'] = host return info
Parses the SSH destination argument.
entailment
def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh
Gets an SSH client to connect with.
entailment
def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh
Connects via SSH.
entailment
def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh
Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary.
entailment
def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close()
Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned.
entailment
def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret)
Calls a command through SSH.
entailment
def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output
Calls a command through SSH and returns its output.
entailment
def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way")
Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location).
entailment
def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue
Gets the actual location of the queue, or None.
entailment
def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)})
Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations.
entailment
def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue
Actually installs the runtime.
entailment
def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id
Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error.
entailment
def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret)
Gets the status of a previously-submitted job.
entailment
def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive)
Downloads files from server.
entailment
def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret)
Kills a job on the server.
entailment
def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info
Lists the jobs on the server.
entailment
def multi_substitution(*substitutions): """ Take a sequence of pairs specifying substitutions, and create a function that performs those substitutions. >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') 'baz' """ substitutions = itertools.starmap(substitution, substitutions) # compose function applies last function first, so reverse the # substitutions to get the expected order. substitutions = reversed(tuple(substitutions)) return compose(*substitutions)
Take a sequence of pairs specifying substitutions, and create a function that performs those substitutions. >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') 'baz'
entailment
def simple_html_strip(s): r""" Remove HTML from the string `s`. >>> str(simple_html_strip('')) '' >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) A stormy day in paradise >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) Somebody tell the truth. >>> print(simple_html_strip('What about<br/>\nmultiple lines?')) What about multiple lines? """ html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL) texts = ( match.group(3) or '' for match in html_stripper.finditer(s) ) return ''.join(texts)
r""" Remove HTML from the string `s`. >>> str(simple_html_strip('')) '' >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) A stormy day in paradise >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) Somebody tell the truth. >>> print(simple_html_strip('What about<br/>\nmultiple lines?')) What about multiple lines?
entailment
def remove_prefix(text, prefix): """ Remove the prefix from the text if it exists. >>> remove_prefix('underwhelming performance', 'underwhelming ') 'performance' >>> remove_prefix('something special', 'sample') 'something special' """ null, prefix, rest = text.rpartition(prefix) return rest
Remove the prefix from the text if it exists. >>> remove_prefix('underwhelming performance', 'underwhelming ') 'performance' >>> remove_prefix('something special', 'sample') 'something special'
entailment
def remove_suffix(text, suffix): """ Remove the suffix from the text if it exists. >>> remove_suffix('name.git', '.git') 'name' >>> remove_suffix('something special', 'sample') 'something special' """ rest, suffix, null = text.partition(suffix) return rest
Remove the suffix from the text if it exists. >>> remove_suffix('name.git', '.git') 'name' >>> remove_suffix('something special', 'sample') 'something special'
entailment
def common_prefix(s1, s2): """ Return the common prefix of two lines. """ index = min(len(s1), len(s2)) while s1[:index] != s2[:index]: index -= 1 return s1[:index]
Return the common prefix of two lines.
entailment
def _get_graph(self, ctx, bundle, extensions, caller=None): """ Run a graph and render the tag contents for each output """ request = ctx.get('request') if request is None: request = get_current_request() if ':' in bundle: config_name, bundle = bundle.split(':') else: config_name = 'DEFAULT' webpack = request.webpack(config_name) assets = (caller(a) for a in webpack.get_bundle(bundle, extensions)) return ''.join(assets)
Run a graph and render the tag contents for each output
entailment
def activate(lancet, method, project): """Switch to this project.""" with taskstatus("Looking up project") as ts: if method == "key": func = get_project_keys elif method == "dir": func = get_project_keys for key, project_path in func(lancet): if key.lower() == project.lower(): break else: ts.abort( 'Project "{}" not found (using {}-based lookup)', project, method, ) # Load the configuration config = load_config(os.path.join(project_path, LOCAL_CONFIG)) # cd to the project directory lancet.defer_to_shell("cd", project_path) # Activate virtualenv venv = config.get("lancet", "virtualenv", fallback=None) if venv: venv_path = os.path.join(project_path, os.path.expanduser(venv)) activate_script = os.path.join(venv_path, "bin", "activate") lancet.defer_to_shell("source", activate_script) else: if "VIRTUAL_ENV" in os.environ: lancet.defer_to_shell("deactivate")
Switch to this project.
entailment
def workon(ctx, issue_id, new, base_branch): """ Start work on a given issue. This command retrieves the issue from the issue tracker, creates and checks out a new aptly-named branch, puts the issue in the configured active, status, assigns it to you and starts a correctly linked Harvest timer. If a branch with the same name as the one to be created already exists, it is checked out instead. Variations in the branch name occuring after the issue ID are accounted for and the branch renamed to match the new issue summary. If the `default_project` directive is correctly configured, it is enough to give the issue ID (instead of the full project prefix + issue ID). """ lancet = ctx.obj if not issue_id and not new: raise click.UsageError("Provide either an issue ID or the --new flag.") elif issue_id and new: raise click.UsageError( "Provide either an issue ID or the --new flag, but not both." ) if new: # Create a new issue summary = click.prompt("Issue summary") issue = create_issue( lancet, summary=summary, add_to_active_sprint=True ) else: issue = get_issue(lancet, issue_id) username = lancet.tracker.whoami() active_status = lancet.config.get("tracker", "active_status") if not base_branch: base_branch = lancet.config.get("repository", "base_branch") # Get the working branch branch = get_branch(lancet, issue, base_branch) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, active_status) # Make sure the issue is assigned to us assign_issue(lancet, issue, username, active_status) # Activate environment set_issue_status(lancet, issue, active_status, transition) with taskstatus("Checking out working branch") as ts: lancet.repo.checkout(branch.name) ts.ok('Checked out working branch based on "{}"'.format(base_branch)) with taskstatus("Starting harvest timer") as ts: lancet.timer.start(issue) ts.ok("Started harvest timer")
Start work on a given issue. This command retrieves the issue from the issue tracker, creates and checks out a new aptly-named branch, puts the issue in the configured active, status, assigns it to you and starts a correctly linked Harvest timer. If a branch with the same name as the one to be created already exists, it is checked out instead. Variations in the branch name occuring after the issue ID are accounted for and the branch renamed to match the new issue summary. If the `default_project` directive is correctly configured, it is enough to give the issue ID (instead of the full project prefix + issue ID).
entailment
def time(lancet, issue): """ Start an Harvest timer for the given issue. This command takes care of linking the timer with the issue tracker page for the given issue. If the issue is not passed to command it's taken from currently active branch. """ issue = get_issue(lancet, issue) with taskstatus("Starting harvest timer") as ts: lancet.timer.start(issue) ts.ok("Started harvest timer")
Start an Harvest timer for the given issue. This command takes care of linking the timer with the issue tracker page for the given issue. If the issue is not passed to command it's taken from currently active branch.
entailment
def pause(ctx): """ Pause work on the current issue. This command puts the issue in the configured paused status and stops the current Harvest timer. """ lancet = ctx.obj paused_status = lancet.config.get("tracker", "paused_status") # Get the issue issue = get_issue(lancet) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, paused_status) # Activate environment set_issue_status(lancet, issue, paused_status, transition) with taskstatus("Pausing harvest timer") as ts: lancet.timer.pause() ts.ok("Harvest timer paused")
Pause work on the current issue. This command puts the issue in the configured paused status and stops the current Harvest timer.
entailment
def resume(ctx): """ Resume work on the currently active issue. The issue is retrieved from the currently active branch name. """ lancet = ctx.obj username = lancet.tracker.whoami() active_status = lancet.config.get("tracker", "active_status") # Get the issue issue = get_issue(lancet) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, active_status) # Make sure the issue is assigned to us assign_issue(lancet, issue, username, active_status) # Activate environment set_issue_status(lancet, issue, active_status, transition) with taskstatus("Resuming harvest timer") as ts: lancet.timer.start(issue) ts.ok("Resumed harvest timer")
Resume work on the currently active issue. The issue is retrieved from the currently active branch name.
entailment
def ssh(lancet, print_cmd, environment): """ SSH into the given environment, based on the dploi configuration. """ namespace = {} with open(lancet.config.get('dploi', 'deployment_spec')) as fh: code = compile(fh.read(), 'deployment.py', 'exec') exec(code, {}, namespace) config = namespace['settings'][environment] host = '{}@{}'.format(config['user'], config['hosts'][0]) cmd = ['ssh', '-p', str(config.get('port', 22)), host] if print_cmd: click.echo(' '.join(quote(s) for s in cmd)) else: lancet.defer_to_shell(*cmd)
SSH into the given environment, based on the dploi configuration.
entailment
def _setup_helper(): """Print the shell integration code.""" base = os.path.abspath(os.path.dirname(__file__)) helper = os.path.join(base, "helper.sh") with open(helper) as fh: click.echo(fh.read())
Print the shell integration code.
entailment
def _commands(ctx): """Prints a list of commands for shell completion hooks.""" ctx = ctx.parent ctx.show_hidden_subcommands = False main = ctx.command for subcommand in main.list_commands(ctx): cmd = main.get_command(ctx, subcommand) if cmd is None: continue help = cmd.short_help or "" click.echo("{}:{}".format(subcommand, help))
Prints a list of commands for shell completion hooks.
entailment
def _arguments(ctx, command_name=None): """Prints a list of arguments for shell completion hooks. If a command name is given, returns the arguments for that subcommand. The command name has to refer to a command; aliases are not supported. """ ctx = ctx.parent main = ctx.command if command_name: command = main.get_command(ctx, command_name) if not command: return else: command = main types = ["option", "argument"] all_params = sorted( command.get_params(ctx), key=lambda p: types.index(p.param_type_name) ) def get_name(param): return max(param.opts, key=len) for param in all_params: if param.param_type_name == "option": option = get_name(param) same_dest = [ get_name(p) for p in all_params if p.name == param.name ] if same_dest: option = "({})".format(" ".join(same_dest)) + option if param.help: option += "[{}]".format(param.help or "") if not param.is_flag: option += "=:( )" click.echo(option) elif param.param_type_name == "argument": option = get_name(param) click.echo(":{}".format(option))
Prints a list of arguments for shell completion hooks. If a command name is given, returns the arguments for that subcommand. The command name has to refer to a command; aliases are not supported.
entailment
def _autocomplete(ctx, shell): """Print the shell autocompletion code.""" if not shell: shell = os.environ.get("SHELL", "") shell = os.path.basename(shell).lower() if not shell: click.secho( "Your shell could not be detected, please pass its name " "as the argument.", fg="red", ) ctx.exit(-1) base = os.path.abspath(os.path.dirname(__file__)) autocomplete = os.path.join(base, "autocomplete", "{}.sh".format(shell)) if not os.path.exists(autocomplete): click.secho( "Autocompletion for your shell ({}) is currently not " "supported.", fg="red", ) ctx.exit(-1) with open(autocomplete) as fh: click.echo(fh.read())
Print the shell autocompletion code.
entailment
def raisefrom(exc_type, message, exc): # type: (Any, str, BaseException) -> None """Call Python 3 raise from or emulate it for Python 2 Args: exc_type (Any): Type of Exception message (str): Error message to display exc (BaseException): original exception Returns: None """ if sys.version_info[:2] >= (3, 2): six.raise_from(exc_type(message), exc) else: six.reraise(exc_type, '%s - %s' % (message, exc), sys.exc_info()[2])
Call Python 3 raise from or emulate it for Python 2 Args: exc_type (Any): Type of Exception message (str): Error message to display exc (BaseException): original exception Returns: None
entailment
def init_runner(self, parser, tracers, projinfo): ''' initial some instances for preparing to run test case @note: should not override @param parser: instance of TestCaseParser @param tracers: dict type for the instance of Tracer. Such as {"":tracer_obj} or {"192.168.0.1:5555":tracer_obj1, "192.168.0.2:5555":tracer_obj2} @param proj_info: dict type of test case. use like: self.proj_info["module"], self.proj_info["name"] yaml case like: - project: name: xxx module: xxxx dict case like: {"project": {"name": xxx, "module": xxxx}} ''' self.parser = parser self.tracers = tracers self.proj_info = projinfo
initial some instances for preparing to run test case @note: should not override @param parser: instance of TestCaseParser @param tracers: dict type for the instance of Tracer. Such as {"":tracer_obj} or {"192.168.0.1:5555":tracer_obj1, "192.168.0.2:5555":tracer_obj2} @param proj_info: dict type of test case. use like: self.proj_info["module"], self.proj_info["name"] yaml case like: - project: name: xxx module: xxxx dict case like: {"project": {"name": xxx, "module": xxxx}}
entailment
def _run_grid_multiprocess(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' multiprocessing.freeze_support() pool = multiprocessing.Pool() pool_tracers = pool.map(func, iterables) pool.close() pool.join() # 传递给 pool.map的 实例对象,内存地址发生变化, 因此,这里在运行结束后,重新定义 self.tracers self.tracers = dict(zip(self._default_devices, pool_tracers))
running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects
entailment
def _run_grid_multithread(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' f = lambda x: threading.Thread(target = func,args = (x,)) threads = map(f, iterables) for thread in threads: thread.setDaemon(True) thread.start() thread.join()
running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects
entailment
def init_project_env(subject='Automation', proj_path = None, sysencoding = "utf-8", debug = False): ''' Set the environment for pyrunner ''' # if sysencoding: # set_sys_encode(sysencoding) if not proj_path: try: executable_file_path = os.path.dirname(os.path.abspath(inspect.stack()[-1][1])) except: executable_file_path = os.path.dirname(sys.path[0]) finally: proj_path = executable_file_path p = os.path.join(proj_path,subject) proj_conf = { "sys_coding" : sysencoding, "debug" : debug, "module_name" : os.path.splitext(os.path.basename(subject))[0], "cfg_file" : os.path.join(p,"config.ini"), "path" : {"root" : p, "case" : os.path.join(p,"testcase"), "data" : os.path.join(p,"data"), "buffer" : os.path.join(p,"buffer"), "resource" : os.path.join(p,"resource"), "tools" : os.path.join(p,"tools"), "rst" : os.path.join(p,"result"), "rst_log" : os.path.join(p,"result","testcase"), "rst_shot" : os.path.join(p,"result","screenshots"), }, } [FileSystemUtils.mkdirs(v) for v in proj_conf["path"].values()] sys.path.append(p) if os.path.isdir(p) else "" return proj_conf
Set the environment for pyrunner
entailment
def seqfy(strs): ''' 序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串 Sampe usage: strs = ["", None, u"First-line\nSecond-line\nThird-line", u"没有换行符"] for s in strs: print "---" result = seqfy(s) print result print unseqfy(result) ''' if not strs: return result = "" seq = 1 ss = strs.split("\n") for i in ss: if i: result = "".join([result, str(seq), ".", i, "\n"]) seq = seq + 1 return result
序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串 Sampe usage: strs = ["", None, u"First-line\nSecond-line\nThird-line", u"没有换行符"] for s in strs: print "---" result = seqfy(s) print result print unseqfy(result)
entailment
def stepfy(strs): ''' 步骤化 字符串 --->实际效果是, 依据 序列化的字符串,转换为 Step_%s_info 的字典, 返回字典 Sample usage: test_strs = [ "", None, u"First-line\nSecond-line\nThird-line", u'1.First-line\n2.Second-line\n3.Third-line\n', u'3.没有换行符', u'3.有换行符\n', "asdfasdfsdf", "1.asdfasdfsdf\n2.sodfi", "1.1.dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", "dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", ] for i in test_strs: steps = stepfy(i) un = unstepfy(steps) print "string: %r" %i print "stepfy: %s" %steps print "unstepfy: %r\n" %un ''' result = {} prog_step = re.compile("^\d+\.") if not strs: return result raws = strs.split("\n") for raw in raws: step_num = raws.index(raw) + 1 raw = prog_step.sub("",raw) if raw: result["Step_%s_info" %step_num] = raw return result
步骤化 字符串 --->实际效果是, 依据 序列化的字符串,转换为 Step_%s_info 的字典, 返回字典 Sample usage: test_strs = [ "", None, u"First-line\nSecond-line\nThird-line", u'1.First-line\n2.Second-line\n3.Third-line\n', u'3.没有换行符', u'3.有换行符\n', "asdfasdfsdf", "1.asdfasdfsdf\n2.sodfi", "1.1.dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", "dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", ] for i in test_strs: steps = stepfy(i) un = unstepfy(steps) print "string: %r" %i print "stepfy: %s" %steps print "unstepfy: %r\n" %un
entailment
def map_function(func_str, fw_action_addtion=None,bw_action_addtion=None, alias_func=None): ''' Sample usage: print map_function('set',alias_func = "ini_items");# -> ini_items print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test()) print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test()) ''' split_action_value = re.compile("^(\w+)(\((.*)\)$)?") matched = split_action_value.match(func_str) if matched: action = matched.group(1).lower() value = matched.group(2) #params = matched.group(3) if alias_func: action = alias_func if fw_action_addtion: action = fw_action_addtion + action if fw_action_addtion: action = action + bw_action_addtion if value: return action+value else: return action
Sample usage: print map_function('set',alias_func = "ini_items");# -> ini_items print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test()) print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test())
entailment
def until_cmd(listcmd, end_expects=None, save2logfile=None, coding = encoding): ''' 执行系统命令,并等待执行完 @param listcmd: 执行的命令,列表格式 @param end_expects: 命令执行结束,在输出的最后一行,正则搜素期望值,并设置 结果标志 @param save2logfile: 设置执行过程,保存的日志 @param coding: 设置输出编码 ''' if end_expects and not isinstance(end_expects, p_compat.str): raise Exception("invalide unicode string: '%s'" %end_expects) lines = [] subp = subprocess.Popen(listcmd,stdout=subprocess.PIPE,stderr=subprocess.STDOUT) while subp.poll()==None: next_line = subp.stdout.readline().decode(coding) if next_line: # print(next_line) lines.append(next_line) if end_expects and re.search(end_expects, next_line): result = True else: result = False subp.stdout.close() if subp.returncode: result = False lines.append("sub command error code: %s" %subp.returncode) if save2logfile: with open(save2logfile, 'a') as f: f.writelines(lines) return result
执行系统命令,并等待执行完 @param listcmd: 执行的命令,列表格式 @param end_expects: 命令执行结束,在输出的最后一行,正则搜素期望值,并设置 结果标志 @param save2logfile: 设置执行过程,保存的日志 @param coding: 设置输出编码
entailment
def until(method, timeout = 30, message=''): """Calls the method until the return value is not False.""" end_time = time.time() + timeout while True: try: value = method() if value: return value except: pass time.sleep(1) if time.time() > end_time: break raise Exception(message)
Calls the method until the return value is not False.
entailment
def _check_format(file_path, content): """ check testcase format if valid """ if not content: # testcase file content is empty err_msg = u"Testcase file content is empty: {}".format(file_path) raise p_exception.FileFormatError(err_msg) elif not isinstance(content, (list, dict)): # testcase file content does not match testcase format err_msg = u"Testcase file content format invalid: {}".format(file_path) raise p_exception.FileFormatError(err_msg)
check testcase format if valid
entailment
def _load_yaml_file(yaml_file): """ load yaml file and check file content format """ with io.open(yaml_file, 'r', encoding='utf-8') as stream: yaml_content = yaml.load(stream) FileUtils._check_format(yaml_file, yaml_content) return yaml_content
load yaml file and check file content format
entailment
def _load_json_file(json_file): """ load json file and check file content format """ with io.open(json_file, encoding='utf-8') as data_file: try: json_content = json.load(data_file) except p_exception.JSONDecodeError: err_msg = u"JSONDecodeError: JSON file format error: {}".format(json_file) raise p_exception.FileFormatError(err_msg) FileUtils._check_format(json_file, json_content) return json_content
load json file and check file content format
entailment
def _load_csv_file(csv_file): """ load csv file and check file content format @param csv_file: csv file path e.g. csv file content: username,password test1,111111 test2,222222 test3,333333 @return list of parameter, each parameter is in dict format e.g. [ {'username': 'test1', 'password': '111111'}, {'username': 'test2', 'password': '222222'}, {'username': 'test3', 'password': '333333'} ] """ csv_content_list = [] with io.open(csv_file, encoding='utf-8') as csvfile: reader = csv.DictReader(csvfile) for row in reader: csv_content_list.append(row) return csv_content_list
load csv file and check file content format @param csv_file: csv file path e.g. csv file content: username,password test1,111111 test2,222222 test3,333333 @return list of parameter, each parameter is in dict format e.g. [ {'username': 'test1', 'password': '111111'}, {'username': 'test2', 'password': '222222'}, {'username': 'test3', 'password': '333333'} ]
entailment
def force_delete_file(file_path): ''' force delete a file ''' if os.path.isfile(file_path): try: os.remove(file_path) return file_path except: return FileSystemUtils.add_unique_postfix(file_path) else: return file_path
force delete a file
entailment
def mkzip(source_dir, output_filename): '''Usage: p = r'D:\auto\env\ttest\ins\build\lib\rock4\softtest\support' mkzip(os.path.join(p, "appiumroot"),os.path.join(p, "appiumroot.zip")) unzip(os.path.join(p, "appiumroot.zip"),os.path.join(p, "appiumroot2")) ''' zipf = zipfile.ZipFile(output_filename, 'w', zipfile.zlib.DEFLATED) pre_len = len(os.path.dirname(source_dir)) for parent, dirnames, filenames in os.walk(source_dir): for filename in filenames: pathfile = os.path.join(parent, filename) arcname = pathfile[pre_len:].strip(os.path.sep);#相对路径 zipf.write(pathfile, arcname) zipf.close()
Usage: p = r'D:\auto\env\ttest\ins\build\lib\rock4\softtest\support' mkzip(os.path.join(p, "appiumroot"),os.path.join(p, "appiumroot.zip")) unzip(os.path.join(p, "appiumroot.zip"),os.path.join(p, "appiumroot2"))
entailment
def get_imported_module_from_file(file_path): """ import module from python file path and return imported module """ if p_compat.is_py3: imported_module = importlib.machinery.SourceFileLoader('module_name', file_path).load_module() elif p_compat.is_py2: imported_module = imp.load_source('module_name', file_path) else: raise RuntimeError("Neither Python 3 nor Python 2.") return imported_module
import module from python file path and return imported module
entailment