_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q279100
Worker.work
test
def work(self, socket, call, args, kwargs, topics=()): """Calls a function and send results to the collector. It supports all of function actions. A function could return, yield, raise any packable objects. """ task_id = uuid4_bytes() reply_socket, topics = self.replier(socket, topics, call.reply_to) if reply_socket: channel = (call.call_id, task_id, topics) else: channel = (None, None, None) f, rpc_spec = self.find_call_target(call) if rpc_spec.reject_if.__get__(self.app)(call, topics): reply_socket and self.reject(reply_socket, call.call_id, topics) return reply_socket and self.accept(reply_socket, channel) success = False with self.catch_exceptions(): try:
python
{ "resource": "" }
q279101
Worker.accept
test
def accept(self, reply_socket, channel): """Sends ACCEPT reply.""" info = self.info or b''
python
{ "resource": "" }
q279102
Worker.reject
test
def reject(self, reply_socket, call_id, topics=()): """Sends REJECT reply.""" info = self.info or b''
python
{ "resource": "" }
q279103
Worker.raise_
test
def raise_(self, reply_socket, channel, exc_info=None): """Sends RAISE reply.""" if not reply_socket: return if exc_info is None: exc_info = sys.exc_info() exc_type, exc, tb = exc_info while tb.tb_next is not None: tb = tb.tb_next if issubclass(exc_type, RemoteException): exc_type = exc_type.exc_type
python
{ "resource": "" }
q279104
_Caller._call_wait
test
def _call_wait(self, hints, name, args, kwargs, topics=(), raw=False, limit=None, retry=False, max_retries=None): """Allocates a call id and emit.""" col = self.collector if not col.is_running(): col.start() call_id = uuid4_bytes() reply_to = (DUPLEX if self.socket is col.socket else col.topic) # Normal tuple is faster than namedtuple. header = self._make_header(name, call_id, reply_to, hints) payload = self._pack(args, kwargs, raw) # Use short names. def send_call(): try:
python
{ "resource": "" }
q279105
Collector.establish
test
def establish(self, call_id, timeout, limit=None, retry=None, max_retries=None): """Waits for the call is accepted by workers and starts to collect the results. """ rejected = 0 retried = 0 results = [] result_queue = self.result_queues[call_id] try: with Timeout(timeout, False): while True: result = result_queue.get() if result is None: rejected += 1
python
{ "resource": "" }
q279106
Collector.dispatch_reply
test
def dispatch_reply(self, reply, value): """Dispatches the reply to the proper queue.""" method = reply.method call_id = reply.call_id task_id = reply.task_id if method & ACK: try: result_queue = self.result_queues[call_id] except KeyError: raise KeyError('already established or unprepared call') if method == ACCEPT: worker_info = value result = RemoteResult(self, call_id, task_id, worker_info)
python
{ "resource": "" }
q279107
guess_type_name
test
def guess_type_name(value): ''' Guess the type name of a serialized value. ''' value = str(value) if value.upper() in ['TRUE', 'FALSE']: return 'BOOLEAN' elif re.match(r'(-)?(\d+)(\.\d+)', value): return 'REAL' elif re.match(r'(-)?(\d+)', value): return 'INTEGER'
python
{ "resource": "" }
q279108
deserialize_value
test
def deserialize_value(ty, value): ''' Deserialize a value of some type ''' uty = ty.upper() if uty == 'BOOLEAN': if value.isdigit(): return bool(int(value)) elif value.upper() == 'FALSE': return False elif value.upper() == 'TRUE': return True else: return None elif uty == 'INTEGER': if '"' in value: return uuid.UUID(value[1:-1]).int else:
python
{ "resource": "" }
q279109
ModelLoader.t_LPAREN
test
def t_LPAREN(self, t): r'\(' t.endlexpos = t.lexpos +
python
{ "resource": "" }
q279110
ModelLoader.t_RPAREN
test
def t_RPAREN(self, t): r'\)' t.endlexpos = t.lexpos +
python
{ "resource": "" }
q279111
ElasticStore.get
test
def get(self, content_id, feature_names=None): '''Retrieve a feature collection. If a feature collection with the given id does not exist, then ``None`` is returned. :param str content_id: Content identifier. :param [str] feature_names: A list of feature names to retrieve. When ``None``, all features are retrieved. Wildcards are allowed. :rtype: :class:`dossier.fc.FeatureCollection` or ``None`` ''' try: resp = self.conn.get(index=self.index, doc_type=self.type,
python
{ "resource": "" }
q279112
ElasticStore.get_many
test
def get_many(self, content_ids, feature_names=None): '''Returns an iterable of feature collections. This efficiently retrieves multiple FCs corresponding to the list of ids given. Tuples of identifier and feature collection are yielded. If the feature collection for a given id does not exist, then ``None`` is returned as the second element of the tuple. :param [str] content_ids: List of content ids.
python
{ "resource": "" }
q279113
ElasticStore.put
test
def put(self, items, indexes=True): '''Adds feature collections to the store. This efficiently adds multiple FCs to the store. The iterable of ``items`` given should yield tuples of ``(content_id, FC)``. :param items: Iterable of ``(content_id, FC)``. :param [str] feature_names: A list of feature names to retrieve. When ``None``, all features are retrieved. Wildcards are allowed. ''' actions = [] for cid, fc in items: # TODO: If we store features in a columnar order, then we # could tell ES to index the feature values directly. ---AG # (But is problematic because we want to preserve the ability # to selectively index FCs. So we'd probably need two distinct # doc types.) idxs = defaultdict(list) if indexes: for fname in self.indexed_features: if fname in fc: idxs[fname_to_idx_name(fname)].extend(fc[fname])
python
{ "resource": "" }
q279114
ElasticStore.delete
test
def delete(self, content_id): '''Deletes the corresponding feature collection. If the FC does not exist, then this is a no-op.
python
{ "resource": "" }
q279115
ElasticStore.delete_all
test
def delete_all(self): '''Deletes all feature collections. This does not destroy the ES index, but instead only deletes all FCs with the configured document type (defaults to ``fc``). ''' try: self.conn.indices.delete_mapping(
python
{ "resource": "" }
q279116
ElasticStore.delete_index
test
def delete_index(self): '''Deletes the underlying ES index. Only use this if you know what you're doing. This destroys the entire underlying ES index, which could be shared by multiple distinct
python
{ "resource": "" }
q279117
ElasticStore.scan
test
def scan(self, *key_ranges, **kwargs): '''Scan for FCs in the given id ranges. :param key_ranges: ``key_ranges`` should be a list of pairs of ranges. The first value is the lower bound id and the second value is the upper bound id. Use ``()`` in either position to leave it unbounded. If no ``key_ranges`` are given, then all FCs in the store are returned. :param [str] feature_names: A list of feature names to retrieve. When ``None``, all
python
{ "resource": "" }
q279118
ElasticStore.scan_ids
test
def scan_ids(self, *key_ranges, **kwargs): '''Scan for ids only in the given id ranges. :param key_ranges: ``key_ranges`` should be a list of pairs of ranges. The first value is the lower bound id and the second value is the upper bound id. Use ``()`` in either position to leave it unbounded. If no ``key_ranges`` are given, then all FCs in the store are returned. :param [str] feature_names: A list of feature names to retrieve. When ``None``, all
python
{ "resource": "" }
q279119
ElasticStore.scan_prefix
test
def scan_prefix(self, prefix, feature_names=None): '''Scan for FCs with a given prefix. :param str prefix: Identifier prefix. :param [str] feature_names: A list of feature names to retrieve. When ``None``, all features are retrieved. Wildcards are allowed. :rtype: Iterable of ``(content_id, FC)``
python
{ "resource": "" }
q279120
ElasticStore.scan_prefix_ids
test
def scan_prefix_ids(self, prefix): '''Scan for ids with a given prefix. :param str prefix: Identifier prefix. :param [str] feature_names: A list of feature names to
python
{ "resource": "" }
q279121
ElasticStore.fulltext_scan
test
def fulltext_scan(self, query_id=None, query_fc=None, feature_names=None, preserve_order=True, indexes=None): '''Fulltext search. Yields an iterable of triples (score, identifier, FC) corresponding to the search results of the fulltext search in ``query``. This will only search text indexed under the given feature named ``fname``. Note that, unless ``preserve_order`` is set to True, the ``score`` will always be 0.0, and the results will be unordered. ``preserve_order`` set to True will cause the
python
{ "resource": "" }
q279122
ElasticStore.fulltext_scan_ids
test
def fulltext_scan_ids(self, query_id=None, query_fc=None, preserve_order=True, indexes=None): '''Fulltext search for identifiers. Yields an iterable of triples (score, identifier) corresponding to the search results of the fulltext search in ``query``. This will only search text indexed under the given feature named ``fname``. Note that, unless ``preserve_order`` is set to True, the ``score`` will always be 0.0, and the results will be unordered. ``preserve_order`` set to True will cause the results to be scored and be ordered by score, but you should expect to see a decrease in performance.
python
{ "resource": "" }
q279123
ElasticStore.keyword_scan
test
def keyword_scan(self, query_id=None, query_fc=None, feature_names=None): '''Keyword scan for feature collections. This performs a keyword scan using the query given. A keyword scan searches for FCs with terms in each of the query's indexed fields. At least one of ``query_id`` or ``query_fc`` must be provided. If ``query_fc`` is ``None``, then the query is retrieved automatically corresponding to ``query_id``. :param str query_id: Optional query id. :param query_fc: Optional query feature collection. :type query_fc: :class:`dossier.fc.FeatureCollection` :param [str] feature_names: A list of feature names to retrieve. When ``None``,
python
{ "resource": "" }
q279124
ElasticStore.keyword_scan_ids
test
def keyword_scan_ids(self, query_id=None, query_fc=None): '''Keyword scan for ids. This performs a keyword scan using the query given. A keyword scan searches for FCs with terms in each of the query's indexed fields. At least one of ``query_id`` or ``query_fc`` must be provided.
python
{ "resource": "" }
q279125
ElasticStore.index_scan_ids
test
def index_scan_ids(self, fname, val): '''Low-level keyword index scan for ids. Retrieves identifiers of FCs that have a feature value ``val`` in the feature named ``fname``. Note that ``fname`` must be indexed. :param str fname: Feature name. :param str val: Feature value. :rtype: Iterable of ``content_id`` ''' disj = [] for fname2 in self.indexes[fname]['feature_names']: disj.append({'term': {fname_to_idx_name(fname2): val}}) query = {
python
{ "resource": "" }
q279126
ElasticStore._source
test
def _source(self, feature_names): '''Maps feature names to ES's "_source" field.''' if feature_names is None: return True elif isinstance(feature_names, bool):
python
{ "resource": "" }
q279127
ElasticStore._range_filters
test
def _range_filters(self, *key_ranges): 'Creates ES filters for key ranges used in scanning.' filters = [] for s, e in key_ranges: if isinstance(s, basestring): s = eid(s) if isinstance(e, basestring): # Make the range inclusive. # We need a valid codepoint, so use the max.
python
{ "resource": "" }
q279128
ElasticStore._create_index
test
def _create_index(self): 'Create the index' try: self.conn.indices.create( index=self.index, timeout=60, request_timeout=60, body={ 'settings': { 'number_of_shards': self.shards, 'number_of_replicas': self.replicas, },
python
{ "resource": "" }
q279129
ElasticStore._create_mappings
test
def _create_mappings(self): 'Create the field type mapping.' self.conn.indices.put_mapping( index=self.index, doc_type=self.type, timeout=60, request_timeout=60, body={ self.type: { 'dynamic_templates': [{ 'default_no_analyze_fc': { 'match': 'fc.*', 'mapping': {'index': 'no'}, }, }], '_all': { 'enabled': False, }, '_id': { 'index': 'not_analyzed', # allows range queries }, 'properties': self._get_index_mappings(),
python
{ "resource": "" }
q279130
ElasticStore._get_index_mappings
test
def _get_index_mappings(self): 'Retrieve the field mappings. Useful for debugging.' maps = {} for fname in self.indexed_features: config = self.indexes.get(fname, {}) print(fname, config) maps[fname_to_idx_name(fname)] = { 'type': config.get('es_index_type', 'integer'), 'store': False, 'index': 'not_analyzed', }
python
{ "resource": "" }
q279131
ElasticStore._get_field_types
test
def _get_field_types(self): 'Retrieve the field types. Useful for debugging.' mapping = self.conn.indices.get_mapping(
python
{ "resource": "" }
q279132
ElasticStore._fc_index_disjunction_from_query
test
def _fc_index_disjunction_from_query(self, query_fc, fname): 'Creates a disjunction for keyword scan queries.' if len(query_fc.get(fname, [])) == 0: return [] terms = query_fc[fname].keys()
python
{ "resource": "" }
q279133
ElasticStore.fc_bytes
test
def fc_bytes(self, fc_dict): '''Take a feature collection in dict form and count its size in bytes.
python
{ "resource": "" }
q279134
ElasticStore.count_bytes
test
def count_bytes(self, filter_preds): '''Count bytes of all feature collections whose key satisfies one of the predicates in ``filter_preds``. The byte counts are binned by filter predicate.
python
{ "resource": "" }
q279135
pretty_string
test
def pretty_string(fc): '''construct a nice looking string for an FC ''' s = [] for fname, feature in sorted(fc.items()): if isinstance(feature, StringCounter): feature = [u'%s: %d' % (k, v)
python
{ "resource": "" }
q279136
process_docopts
test
def process_docopts(): # type: ()->None """ Take care of command line options """ arguments = docopt(__doc__, version="Find Known Secrets {0}".format(__version__)) logger.debug(arguments) # print(arguments) if arguments["here"]: # all default go()
python
{ "resource": "" }
q279137
default_formatter
test
def default_formatter(error): """Escape the error, and wrap it in a span with class ``error-message``"""
python
{ "resource": "" }
q279138
pretty_to_link
test
def pretty_to_link(inst, link): ''' Create a human-readable representation of a link on the 'TO'-side ''' values = '' prefix = '' metaclass = xtuml.get_metaclass(inst) for name, ty in metaclass.attributes: if name in link.key_map: value = getattr(inst, name)
python
{ "resource": "" }
q279139
pretty_unique_identifier
test
def pretty_unique_identifier(inst, identifier): ''' Create a human-readable representation a unique identifier. ''' values = '' prefix = '' metaclass = xtuml.get_metaclass(inst) for name, ty in metaclass.attributes: if name in metaclass.identifying_attributes: value = getattr(inst, name)
python
{ "resource": "" }
q279140
check_uniqueness_constraint
test
def check_uniqueness_constraint(m, kind=None): ''' Check the model for uniqueness constraint violations. ''' if kind is None: metaclasses = m.metaclasses.values() else: metaclasses = [m.find_metaclass(kind)] res = 0 for metaclass in metaclasses: id_map = dict() for identifier in metaclass.indices: id_map[identifier] = dict() for inst in metaclass.select_many(): # Check for null-values for name, ty in metaclass.attributes: if name not in metaclass.identifying_attributes: continue value = getattr(inst, name) isnull = value is None isnull |= (ty == 'UNIQUE_ID' and not value) if isnull: res += 1 logger.warning('%s.%s is part of an identifier and is null'
python
{ "resource": "" }
q279141
check_link_integrity
test
def check_link_integrity(m, link): ''' Check the model for integrity violations on an association in a particular direction. ''' res = 0 for inst in link.from_metaclass.select_many(): q_set = list(link.navigate(inst)) if(len(q_set) < 1 and not link.conditional) or ( (len(q_set) > 1 and not link.many)): res += 1 logger.warning('integrity violation in '
python
{ "resource": "" }
q279142
check_subtype_integrity
test
def check_subtype_integrity(m, super_kind, rel_id): ''' Check the model for integrity violations across a subtype association. ''' if isinstance(rel_id, int): rel_id = 'R%d' % rel_id res = 0 for inst in m.select_many(super_kind): if not xtuml.navigate_subtype(inst, rel_id):
python
{ "resource": "" }
q279143
feature_index
test
def feature_index(*feature_names): '''Returns a index creation function. Returns a valid index ``create`` function for the feature names given. This can be used with the :meth:`Store.define_index` method to create indexes on any combination of features in a feature collection. :type feature_names: list(unicode) :rtype: ``(val -> index val) -> (content_id, FeatureCollection) -> generator of [index val]`` ''' def _(trans, (cid, fc)): for fname in feature_names:
python
{ "resource": "" }
q279144
basic_transform
test
def basic_transform(val): '''A basic transform for strings and integers.'''
python
{ "resource": "" }
q279145
Store.put
test
def put(self, items, indexes=True): '''Add feature collections to the store. Given an iterable of tuples of the form ``(content_id, feature collection)``, add each to the store and overwrite any that already exist. This method optionally accepts a keyword argument `indexes`, which by default is set to ``True``. When it is ``True``, it will *create* new indexes for each content object for all indexes defined on this store. Note that this will not update existing indexes. (There is currently no way to do this without running some sort of garbage collection process.) :param iterable items: iterable of ``(content_id, FeatureCollection)``. :type fc: :class:`dossier.fc.FeatureCollection` ''' # So why accept an iterable? Ideally, some day, `kvlayer.put` would # accept an iterable, so we should too. # # But we have to transform it to a list in order to update indexes # anyway. Well, if we don't have to update indexes, then we can avoid # loading everything into memory, which seems like an optimization # worth having even if it's only some of the time. # # N.B.
python
{ "resource": "" }
q279146
Store.delete_all
test
def delete_all(self): '''Deletes all storage. This includes every content object and all index data. '''
python
{ "resource": "" }
q279147
Store.scan
test
def scan(self, *key_ranges): '''Retrieve feature collections in a range of ids. Returns a generator of content objects corresponding to the content identifier ranges given. `key_ranges` can be a possibly empty list of 2-tuples, where the first element of the tuple is the beginning of a range and the second element is the end of a range. To specify the beginning or end of the table, use an empty tuple `()`. If the list is empty, then this yields all content objects in the storage. :param key_ranges: as described in
python
{ "resource": "" }
q279148
Store.scan_ids
test
def scan_ids(self, *key_ranges): '''Retrieve content ids in a range of ids. Returns a generator of ``content_id`` corresponding to the content identifier ranges given. `key_ranges` can be a possibly empty list of 2-tuples, where the first element of the tuple is the beginning of a range and the second element is the end of a range. To specify the beginning or end of the table, use an empty tuple `()`. If the list is empty, then this yields all content ids in the storage. :param key_ranges: as described in
python
{ "resource": "" }
q279149
Store.index_scan
test
def index_scan(self, idx_name, val): '''Returns ids that match an indexed value. Returns a generator of content identifiers that have an entry in the index ``idx_name`` with value ``val`` (after index transforms are applied). If the index named by ``idx_name`` is not registered, then a :exc:`~exceptions.KeyError` is raised.
python
{ "resource": "" }
q279150
Store.index_scan_prefix
test
def index_scan_prefix(self, idx_name, val_prefix): '''Returns ids that match a prefix of an indexed value. Returns a generator of content identifiers that have an entry in the index ``idx_name`` with prefix ``val_prefix`` (after index transforms are applied). If the index named by ``idx_name`` is not registered, then a :exc:`~exceptions.KeyError` is raised. :param unicode idx_name: name of index
python
{ "resource": "" }
q279151
Store.index_scan_prefix_and_return_key
test
def index_scan_prefix_and_return_key(self, idx_name, val_prefix): '''Returns ids that match a prefix of an indexed value, and the specific key that matched the search prefix. Returns a generator of (index key, content identifier) that have an entry in the index ``idx_name`` with prefix ``val_prefix`` (after index transforms are applied). If the index named by ``idx_name`` is not registered, then a :exc:`~exceptions.KeyError` is raised. :param unicode idx_name: name of index :param val_prefix: the value to use to search the index
python
{ "resource": "" }
q279152
Store._index_scan_prefix_impl
test
def _index_scan_prefix_impl(self, idx_name, val_prefix, retfunc): '''Implementation for index_scan_prefix and index_scan_prefix_and_return_key, parameterized on return value function. retfunc gets passed a key tuple from the index: (index name, index value, content_id) ''' idx = self._index(idx_name)['transform']
python
{ "resource": "" }
q279153
Store.define_index
test
def define_index(self, idx_name, create, transform): '''Add an index to this store instance. Adds an index transform to the current FC store. Once an index with name ``idx_name`` is added, it will be available in all ``index_*`` methods. Additionally, the index will be automatically updated on calls to :meth:`~dossier.fc.store.Store.put`. If an index with name ``idx_name`` already exists, then it is overwritten. Note that indexes do *not* persist. They must be re-defined for each instance of :class:`Store`. For example, to add an index on the ``boNAME`` feature, you can use the ``feature_index`` helper function: .. code-block:: python store.define_index('boNAME', feature_index('boNAME'), lambda s: s.encode('utf-8')) Another example for creating an index on names: .. code-block:: python store.define_index('NAME', feature_index('canonical_name', 'NAME'),
python
{ "resource": "" }
q279154
Store._index_put
test
def _index_put(self, idx_name, *ids_and_fcs): '''Add new index values. Adds new index values for index ``idx_name`` for the pairs given. Each pair should be a content identifier and a :class:`dossier.fc.FeatureCollection`. :type idx_name: unicode :type ids_and_fcs: ``[(content_id, FeatureCollection)]`` '''
python
{ "resource": "" }
q279155
Store._index_put_raw
test
def _index_put_raw(self, idx_name, content_id, val): '''Add new raw index values. Adds a new index key corresponding to ``(idx_name, transform(val), content_id)``. This method bypasses the *creation* of indexes from content objects, but values are still transformed. :type idx_name: unicode :type
python
{ "resource": "" }
q279156
Store._index_keys_for
test
def _index_keys_for(self, idx_name, *ids_and_fcs): '''Returns a generator of index triples. Returns a generator of index keys for the ``ids_and_fcs`` pairs given. The index keys have the form ``(idx_name, idx_val, content_id)``. :type idx_name: unicode :type ids_and_fcs: ``[(content_id, FeatureCollection)]`` :rtype: generator of ``(str, str, str)`` ''' idx = self._index(idx_name) icreate, itrans = idx['create'], idx['transform'] if isinstance(idx_name, unicode): idx_name = idx_name.encode('utf-8') for cid_fc in ids_and_fcs: content_id
python
{ "resource": "" }
q279157
Store._index
test
def _index(self, name): '''Returns index transforms for ``name``. :type name: unicode :rtype: ``{ create |--> function, transform |--> function }`` ''' name = name.decode('utf-8') try:
python
{ "resource": "" }
q279158
check_pypi_name
test
def check_pypi_name(pypi_package_name, pypi_registry_host=None): """ Check if a package name exists on pypi. TODO: Document the Registry URL construction. It may not be obvious how pypi_package_name and pypi_registry_host are used I'm appending the simple HTTP API parts of the registry standard specification. It will return True if the package name, or any equivalent variation as defined by PEP 503 normalisation rules (https://www.python.org/dev/peps/pep-0503/#normalized-names) is registered in the PyPI registry. >>> check_pypi_name('pip') True >>> check_pypi_name('Pip') True It will return False if the package name, or any equivalent variation as defined by PEP 503 normalisation rules (https://www.python.org/dev/peps/pep-0503/#normalized-names) is not registered in the PyPI registry. >>> check_pypi_name('testy_mc-test_case-has.a.cousin_who_should_never_write_a_package') False :param pypi_package_name: :param pypi_registry_host: :return: """ if pypi_registry_host is None: pypi_registry_host = 'pypi.python.org' # Just a helpful reminder why this bytearray size was chosen. # HTTP/1.1 200 OK # HTTP/1.1 404 Not Found receive_buffer = bytearray(b'------------') context = ssl.create_default_context() ssl_http_socket = context.wrap_socket(socket.socket(socket.AF_INET), server_hostname=pypi_registry_host) ssl_http_socket.connect((pypi_registry_host, 443)) ssl_http_socket.send(b''.join([ b"HEAD /simple/", pypi_package_name.encode('ascii'), b"/ HTTP/1.0", b"\r\n", b"Host: ", pypi_registry_host.encode('ascii'), b"\r\n", b"\r\n\r\n" ])) ssl_http_socket.recv_into(receive_buffer) # Early return when possible. if b'HTTP/1.1 200' in receive_buffer: ssl_http_socket.shutdown(1) ssl_http_socket.close() return True elif b'HTTP/1.1 404' in receive_buffer: ssl_http_socket.shutdown(1)
python
{ "resource": "" }
q279159
add_direction
test
def add_direction(value, arg=u"rtl_only"): """Adds direction to the element :arguments: arg * rtl_only: Add the direction only in case of a right-to-left language (default) * both: add the direction in both case * ltr_only: Add the direction only in case of a left-to-right language {{image_name|add_direction}} when image_name is 'start_arrow.png' results in 'start_arrow_rtl.png' in case of RTL language, and 'start_arrow.png' or 'start_arrow_ltr.png' depends on `arg` value. """ if arg == u'rtl_only': directions = (u'', u'_rtl') elif arg == u'both': directions = (u'_ltr', u'_rtl') elif arg == u'ltr_only':
python
{ "resource": "" }
q279160
get_type_name
test
def get_type_name(s_dt): ''' get the xsd name of a S_DT ''' s_cdt = nav_one(s_dt).S_CDT[17]() if s_cdt and s_cdt.Core_Typ in range(1,
python
{ "resource": "" }
q279161
get_refered_attribute
test
def get_refered_attribute(o_attr): ''' Get the the referred attribute. ''' o_attr_ref = nav_one(o_attr).O_RATTR[106].O_BATTR[113].O_ATTR[106]() if o_attr_ref:
python
{ "resource": "" }
q279162
build_core_type
test
def build_core_type(s_cdt): ''' Build an xsd simpleType out of a S_CDT. ''' s_dt = nav_one(s_cdt).S_DT[17]() if s_dt.name == 'void': type_name = None elif s_dt.name == 'boolean': type_name = 'xs:boolean' elif s_dt.name == 'integer': type_name = 'xs:integer' elif s_dt.name == 'real': type_name = 'xs:decimal' elif s_dt.name == 'string': type_name = 'xs:string' elif s_dt.name == 'unique_id': type_name =
python
{ "resource": "" }
q279163
build_enum_type
test
def build_enum_type(s_edt): ''' Build an xsd simpleType out of a S_EDT. ''' s_dt = nav_one(s_edt).S_DT[17]() enum = ET.Element('xs:simpleType', name=s_dt.name) enum_list = ET.SubElement(enum, 'xs:restriction', base='xs:string') first_filter = lambda selected: not nav_one(selected).S_ENUM[56, 'succeeds']()
python
{ "resource": "" }
q279164
build_struct_type
test
def build_struct_type(s_sdt): ''' Build an xsd complexType out of a S_SDT. ''' s_dt = nav_one(s_sdt).S_DT[17]() struct = ET.Element('xs:complexType', name=s_dt.name) first_filter = lambda selected: not nav_one(selected).S_MBR[46, 'succeeds']() s_mbr = nav_any(s_sdt).S_MBR[44](first_filter) while s_mbr: s_dt = nav_one(s_mbr).S_DT[45]()
python
{ "resource": "" }
q279165
build_user_type
test
def build_user_type(s_udt): ''' Build an xsd simpleType out of a S_UDT. ''' s_dt_user = nav_one(s_udt).S_DT[17]() s_dt_base = nav_one(s_udt).S_DT[18]() base_name = get_type_name(s_dt_base) if base_name:
python
{ "resource": "" }
q279166
build_type
test
def build_type(s_dt): ''' Build a partial xsd tree out of a S_DT and its sub types S_CDT, S_EDT, S_SDT and S_UDT. ''' s_cdt = nav_one(s_dt).S_CDT[17]() if s_cdt: return build_core_type(s_cdt) s_edt = nav_one(s_dt).S_EDT[17]() if s_edt:
python
{ "resource": "" }
q279167
build_class
test
def build_class(o_obj): ''' Build an xsd complex element out of a O_OBJ, including its O_ATTR. ''' cls = ET.Element('xs:element', name=o_obj.key_lett, minOccurs='0', maxOccurs='unbounded') attributes = ET.SubElement(cls, 'xs:complexType') for o_attr in nav_many(o_obj).O_ATTR[102](): o_attr_ref = get_refered_attribute(o_attr)
python
{ "resource": "" }
q279168
build_component
test
def build_component(m, c_c): ''' Build an xsd complex element out of a C_C, including its packaged S_DT and O_OBJ. ''' component = ET.Element('xs:element', name=c_c.name) classes = ET.SubElement(component, 'xs:complexType') classes = ET.SubElement(classes, 'xs:sequence')
python
{ "resource": "" }
q279169
build_schema
test
def build_schema(m, c_c): ''' Build an xsd schema from a bridgepoint component. ''' schema = ET.Element('xs:schema') schema.set('xmlns:xs', 'http://www.w3.org/2001/XMLSchema') global_filter = lambda selected: ooaofooa.is_global(selected) for s_dt in m.select_many('S_DT', global_filter): datatype = build_type(s_dt) if datatype is not None:
python
{ "resource": "" }
q279170
prettify
test
def prettify(xml_string): ''' Indent an xml string with four spaces, and add an additional line
python
{ "resource": "" }
q279171
fetch_bikes
test
async def fetch_bikes() -> List[dict]: """ Gets the full list of bikes from the bikeregister site. The data is hidden behind a form post request and so we need to extract an xsrf and session token with bs4. todo add pytest tests :return: All the currently registered bikes. :raise ApiError: When there was an error connecting to the API. """ async with ClientSession() as session: try: async with session.get('https://www.bikeregister.com/stolen-bikes') as request: document = document_fromstring(await request.text()) except ClientConnectionError as con_err: logger.debug(f"Could not connect to {con_err.host}") raise ApiError(f"Could not connect to {con_err.host}") token = document.xpath("//input[@name='_token']") if len(token) != 1: raise ApiError(f"Couldn't extract token from page.") else: token = token[0].value xsrf_token = request.cookies["XSRF-TOKEN"] laravel_session = request.cookies["laravel_session"] # get the bike data headers = { 'cookie': f'XSRF-TOKEN={xsrf_token}; laravel_session={laravel_session}', 'origin': 'https://www.bikeregister.com', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8', 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0', 'content-type': 'application/x-www-form-urlencoded; charset=UTF-8', 'accept': '*/*',
python
{ "resource": "" }
q279172
set_positional_info
test
def set_positional_info(node, p): ''' set positional information on a node ''' node.position = Position() node.position.label = p.lexer.label node.position.start_stream = p.lexpos(1) node.position.start_line = p.lineno(1) node.position.start_column = find_column(p.lexer.lexdata, node.position.start_stream) _, node.position.end_stream = p.lexspan(len(p) - 1) _, node.position.end_line = p.linespan(len(p) - 1)
python
{ "resource": "" }
q279173
track_production
test
def track_production(f): ''' decorator for adding positional information to returning nodes ''' @wraps(f) def wrapper(self, p): r = f(self, p) node = p[0] if isinstance(node, Node)
python
{ "resource": "" }
q279174
OALParser.t_DOUBLEEQUAL
test
def t_DOUBLEEQUAL(self, t): r"\=\=" t.endlexpos
python
{ "resource": "" }
q279175
OALParser.t_NOTEQUAL
test
def t_NOTEQUAL(self, t): r"!\=" t.endlexpos =
python
{ "resource": "" }
q279176
OALParser.t_ARROW
test
def t_ARROW(self, t): r"\-\>" t.endlexpos = t.lexpos +
python
{ "resource": "" }
q279177
OALParser.t_LE
test
def t_LE(self, t): r"\<\="
python
{ "resource": "" }
q279178
OALParser.t_GE
test
def t_GE(self, t): r"\>\="
python
{ "resource": "" }
q279179
OALParser.t_EQUAL
test
def t_EQUAL(self, t): r"\="
python
{ "resource": "" }
q279180
OALParser.t_DOT
test
def t_DOT(self, t): r"\."
python
{ "resource": "" }
q279181
OALParser.t_LSQBR
test
def t_LSQBR(self, t): r"\["
python
{ "resource": "" }
q279182
OALParser.t_RSQBR
test
def t_RSQBR(self, t): r"\]"
python
{ "resource": "" }
q279183
OALParser.t_QMARK
test
def t_QMARK(self, t): r"\?"
python
{ "resource": "" }
q279184
OALParser.t_LESSTHAN
test
def t_LESSTHAN(self, t): r"\<" t.endlexpos = t.lexpos
python
{ "resource": "" }
q279185
OALParser.t_GT
test
def t_GT(self, t): r"\>" t.endlexpos =
python
{ "resource": "" }
q279186
OALParser.t_PLUS
test
def t_PLUS(self, t): r"\+"
python
{ "resource": "" }
q279187
RequestCmd.create_queue
test
def create_queue(self, name, strict=True, auto_delete=False, auto_delete_timeout=0): """Create message content and properties to create queue with QMFv2 :param name: Name of queue to create :type name: str :param strict: Whether command should fail when unrecognized properties are provided Not used by QMFv2 Default: True :type strict: bool :param auto_delete: Whether queue should be auto deleted Default: False :type auto_delete: bool :param auto_delete_timeout: Timeout in seconds for auto deleting queue Default: 10 :type auto_delete_timeout: int :returns: Tuple containing content and method properties """
python
{ "resource": "" }
q279188
RequestCmd.delete_queue
test
def delete_queue(self, name): """Create message content and properties to delete queue with QMFv2 :param name: Name of queue to delete :type name: str :returns: Tuple containing content and method properties """ content = {"_object_id": {"_object_name": self.object_name}, "_method_name": "delete", "_arguments": {"type": "queue",
python
{ "resource": "" }
q279189
RequestCmd.list_queues
test
def list_queues(self): """Create message content and properties to list all queues with QMFv2 :returns: Tuple containing content and query properties """ content = {"_what": "OBJECT",
python
{ "resource": "" }
q279190
RequestCmd.list_exchanges
test
def list_exchanges(self): """Create message content and properties to list all exchanges with QMFv2 :returns: Tuple containing content and query properties """ content = {"_what": "OBJECT",
python
{ "resource": "" }
q279191
RequestCmd.purge_queue
test
def purge_queue(self, name): """Create message content and properties to purge queue with QMFv2 :param name: Name of queue to purge :type name: str :returns: Tuple containing content and method properties """ content = {"_object_id": {"_object_name": "org.apache.qpid.broker:queue:{0}".format(name)}, "_method_name": "purge",
python
{ "resource": "" }
q279192
Gmailer._create_msg
test
def _create_msg(self, to, subject, msgHtml, msgPlain, attachments=None): ''' attachments should be a list of paths ''' sender = self.sender if attachments and isinstance(attachments, str): attachments = [attachments] else: attachments = list(attachments or []) msg = MIMEMultipart('alternative') msg['Subject'] = subject msg['From'] = sender msg['To'] = to msg.attach(MIMEText(msgPlain, 'plain')) msg.attach(MIMEText(msgHtml, 'html')) # append attachments if any
python
{ "resource": "" }
q279193
OCR.read
test
def read(self): """ Returns the text from an image at a given url. """ # Only download the image if it has changed if self.connection.has_changed(): image_path = self.connection.download_image()
python
{ "resource": "" }
q279194
OCR.text_visible
test
def text_visible(self): """ Returns true or false based on if the OCR process has read actual words. This is needed to prevent non-words from being added to the queue since the ocr process can sometimes return values that are not meaningfull. """ # Split the input string at points with any amount of whitespace words = self.read().split() # Light weight check to see if a word exists for word in words: # If the word is a numeric value
python
{ "resource": "" }
q279195
main
test
def main(): ''' Parse command line options and launch the interpreter ''' parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path..]", version=xtuml.version.complete_string, formatter=optparse.TitledHelpFormatter()) parser.add_option("-v", "--verbosity", dest='verbosity', action="count", default=1, help="increase debug logging level") parser.add_option("-f", "--function", dest='function', action="store", help="invoke function named NAME", metavar='NAME') parser.add_option("-c", "--component", dest='component', action="store", help="look for the function in a component named NAME", metavar='NAME', default=None) (opts, args) = parser.parse_args() if len(args) == 0 or not opts.function: parser.print_help() sys.exit(1) levels = { 0: logging.ERROR,
python
{ "resource": "" }
q279196
serialize_value
test
def serialize_value(value, ty): ''' Serialize a value from an xtuml metamodel instance. ''' ty = ty.upper() null_value = { 'BOOLEAN' : False, 'INTEGER' : 0, 'REAL' : 0.0, 'STRING' : '', 'UNIQUE_ID' : 0 } transfer_fn = { 'BOOLEAN' : lambda v: '%d' % int(v), 'INTEGER' : lambda v: '%d' % v, 'REAL'
python
{ "resource": "" }
q279197
serialize_association
test
def serialize_association(ass): ''' Serialize an xtuml metamodel association. ''' s1 = '%s %s (%s)' % (ass.source_link.cardinality, ass.source_link.to_metaclass.kind, ', '.join(ass.source_keys)) if ass.target_link.phrase: s1 += " PHRASE '%s'" % ass.target_link.phrase s2 = '%s %s (%s)' % (ass.target_link.cardinality, ass.target_link.to_metaclass.kind, ', '.join(ass.target_keys))
python
{ "resource": "" }
q279198
serialize_class
test
def serialize_class(Cls): ''' Serialize an xtUML metamodel class. ''' metaclass = xtuml.get_metaclass(Cls) attributes = ['%s %s' % (name, ty.upper()) for name, ty in metaclass.attributes]
python
{ "resource": "" }
q279199
main
test
def main(): """Function for command line execution""" parser = ArgumentParser(description="search files using n-grams") parser.add_argument('--path', dest='path', help="where to search", nargs=1, action="store", default=getcwd()) parser.add_argument('--update', dest='update', help="update the index", action='store_true', default=True) parser.add_argument('--filetype', dest='filetype', help="any, images, documents, code, audio, video", nargs=1, action="store", default=["any"]) parser.add_argument('--verbose', dest='verbose', help="extended output", action='store_true', default=False) parser.add_argument('--results', dest='results', help="number of results to display", action="store", default=10) parser.add_argument('query', nargs='+', help="what to search", action="store") args = parser.parse_args() if args.verbose: verbose = 2 pprint(args) else:
python
{ "resource": "" }