code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ParseError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg, type, value, start_pos): <NEW_LINE> <INDENT> Exception.__init__(self, "%s: type=%r, value=%r, start_pos=%r" % (msg, tokenize.tok_name[type], value, start_pos)) <NEW_LINE> self.msg = msg <NEW_LINE> self.type = type <NEW_LINE> self.value = value <NEW_LINE> self.start_pos = start_pos
Exception to signal the parser is stuck.
62599037287bf620b6272d63
class Instance(object): <NEW_LINE> <INDENT> implements(IInstance) <NEW_LINE> __slots__ = ("type_name", "snapshot") <NEW_LINE> @classmethod <NEW_LINE> def _build(cls, data): <NEW_LINE> <INDENT> type_name, snapshot = data <NEW_LINE> return cls(type_name, snapshot) <NEW_LINE> <DEDENT> def __init__(self, type_name, snapshot): <NEW_LINE> <INDENT> self.type_name = type_name <NEW_LINE> self.snapshot = snapshot <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Instance %s: %r>" % (self.type_name, self.snapshot) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Instance): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return (self.type_name == other.type_name and self.snapshot == other.snapshot) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if type(self) != type(other): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return not self.__eq__(other)
Used by TreeSerializer to encapsulate ISerializable instances. Implements L{IInstance} and can be compared for equality.
625990378da39b475be04369
class DownloadErrorCounter(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.error_count = 0 <NEW_LINE> self.lock = threading.Lock() <NEW_LINE> <DEDENT> def update_counter(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.error_count += 1 <NEW_LINE> <DEDENT> <DEDENT> def get_counter(self): <NEW_LINE> <INDENT> return self.error_count
Class for tracking download errors in a thread-safe way
6259903715baa72349463115
class replacefile(TextOp): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def op(cls,text,filename,mode='w', newline='\n',*args,**kwargs): <NEW_LINE> <INDENT> out = TextOp.make_string(text, newline) <NEW_LINE> with open(filename, mode) as fh: <NEW_LINE> <INDENT> fh.write(out)
send input to file Works like :class:`textops.tofile` except it takes care to consume input text generators before writing the file. This is mandatory when doing some in-file textops. The drawback is that the data to write to file is stored temporarily in memory. This does not work:: cat('myfile').sed('from_patter','to_pattern').tofile('myfile').n This works:: cat('myfile').sed('from_patter','to_pattern').replacefile('myfile').n Args: filename (str): The file to send output to mode (str): File open mode (Default : 'w') newline (str): The newline string to add for each line (default: '\n') Examples: >>> cat('myfile').sed('from_patter','to_pattern').replacefile('myfile').n
62599037d53ae8145f9195de
class ContentSourceIDs(Base): <NEW_LINE> <INDENT> __tablename__ = 'content_source_ids' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> source_id = Column( Integer, ForeignKey( 'content_source.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False, index=True) <NEW_LINE> source = relationship('ContentSource', backref=backref( 'pushed_messages', cascade='all, delete-orphan')) <NEW_LINE> post_id = Column( Integer, ForeignKey( 'content.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False, index=True) <NEW_LINE> post = relationship('Content', backref=backref('post_sink_associations', cascade='all, delete-orphan')) <NEW_LINE> message_id_in_source = Column(String(256), nullable=False, index=True)
A table that keeps track of the number of external identities that an internal post can be exported to. A stepping-stone to having Sinks
625990373eb6a72ae038b7e3
class Spectrum(object): <NEW_LINE> <INDENT> def __init__(self, cs, spectrum_id): <NEW_LINE> <INDENT> self._cs = cs <NEW_LINE> self._spectrum_id = int(spectrum_id) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, Spectrum) and self.spectrum_id == other.spectrum_id <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Spectrum(%r)' % self.spectrum_id <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_info_dict(cls, cs, info): <NEW_LINE> <INDENT> s = cls(cs, info['spectrum_id']) <NEW_LINE> s._info = info <NEW_LINE> return s <NEW_LINE> <DEDENT> @property <NEW_LINE> def _spectrum_info(self): <NEW_LINE> <INDENT> if not hasattr(self, '_info'): <NEW_LINE> <INDENT> self._info = self._cs.get_spectrum_info(self._spectrum_id) <NEW_LINE> <DEDENT> return self._info <NEW_LINE> <DEDENT> @property <NEW_LINE> def spectrum_id(self): <NEW_LINE> <INDENT> return self._spectrum_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def csid(self): <NEW_LINE> <INDENT> return self._spectrum_info['csid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def spectrum_type(self): <NEW_LINE> <INDENT> return self._spectrum_info['spectrum_type'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def file_name(self): <NEW_LINE> <INDENT> return self._spectrum_info['file_name'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def comments(self): <NEW_LINE> <INDENT> return self._spectrum_info.get('comments') <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return 'http://www.chemspider.com/FilesHandler.ashx?type=blob&disp=1&id=%s' % self.spectrum_id <NEW_LINE> <DEDENT> @memoized_property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> r = self._cs.http.get(self.url) <NEW_LINE> return r.text <NEW_LINE> <DEDENT> @property <NEW_LINE> def original_url(self): <NEW_LINE> <INDENT> return self._spectrum_info.get('original_url') <NEW_LINE> <DEDENT> @property <NEW_LINE> def submitted_date(self): <NEW_LINE> <INDENT> return timestamp(self._spectrum_info['submitted_date'])
A class for retrieving and caching details about a Spectrum.
6259903726068e7796d4dac2
class Container(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._items = [] <NEW_LINE> <DEDENT> def _set_list(self, new_list): <NEW_LINE> <INDENT> self._items = new_list <NEW_LINE> <DEDENT> def _get_list(self): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> def _put(self, item): <NEW_LINE> <INDENT> self._items.append(item) <NEW_LINE> <DEDENT> def _peek(self, index_peek=FIRST_ELEMENT_INDEX): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise EmptyContainerError(self.__class__.__name__ + ' is empty') <NEW_LINE> <DEDENT> return self._items[index_peek] <NEW_LINE> <DEDENT> def _get(self, index_pop=FIRST_ELEMENT_INDEX): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise EmptyContainerError(self.__class__.__name__ + ' is empty') <NEW_LINE> <DEDENT> return self._items.pop(index_pop) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._items = list() <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return len(self._items) <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return len(self._items) == 0
A class to represent a container.
6259903707d97122c4217e17
class ReactionEnergyBarrier(ScalarProperty): <NEW_LINE> <INDENT> def __init__(self, name, parser, *args, **kwargs): <NEW_LINE> <INDENT> super(ReactionEnergyBarrier, self).__init__(name, parser, *args, **kwargs) <NEW_LINE> energies = self.parser.reaction_energies() <NEW_LINE> self.value = sorted(energies, key=lambda e: abs(e or 0), reverse=True)[0]
Reaction energy barrier.
62599037b830903b9686ed36
class PyOpencensus(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/census-instrumentation/opencensus-python" <NEW_LINE> pypi = "opencensus/opencensus-0.7.10.tar.gz" <NEW_LINE> version('0.7.10', sha256='2921e3e570cfadfd123cd8e3636a405031367fddff74c55d3fe627a4cf8b981c') <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('[email protected]', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:1.999', type=('build', 'run'))
A stats collection and distributed tracing framework.
62599037a4f1c619b294f744
class IronicHostManager(host_manager.HostManager): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _is_ironic_compute(compute): <NEW_LINE> <INDENT> ht = compute.hypervisor_type if 'hypervisor_type' in compute else None <NEW_LINE> return ht == obj_fields.HVType.IRONIC <NEW_LINE> <DEDENT> def _load_filters(self): <NEW_LINE> <INDENT> if CONF.filter_scheduler.use_baremetal_filters: <NEW_LINE> <INDENT> return CONF.filter_scheduler.baremetal_enabled_filters <NEW_LINE> <DEDENT> return super(IronicHostManager, self)._load_filters() <NEW_LINE> <DEDENT> def host_state_cls(self, host, node, **kwargs): <NEW_LINE> <INDENT> compute = kwargs.get('compute') <NEW_LINE> if compute and self._is_ironic_compute(compute): <NEW_LINE> <INDENT> return IronicNodeState(host, node) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return host_manager.HostState(host, node) <NEW_LINE> <DEDENT> <DEDENT> def _init_instance_info(self, compute_nodes=None): <NEW_LINE> <INDENT> context = context_module.RequestContext() <NEW_LINE> if not compute_nodes: <NEW_LINE> <INDENT> compute_nodes = objects.ComputeNodeList.get_all(context).objects <NEW_LINE> <DEDENT> non_ironic_computes = [c for c in compute_nodes if not self._is_ironic_compute(c)] <NEW_LINE> super(IronicHostManager, self)._init_instance_info(non_ironic_computes) <NEW_LINE> <DEDENT> def _get_instance_info(self, context, compute): <NEW_LINE> <INDENT> if compute and self._is_ironic_compute(compute): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(IronicHostManager, self)._get_instance_info(context, compute)
Ironic HostManager class.
6259903726238365f5fadcd0
class Game: <NEW_LINE> <INDENT> def __init__(self, key_name, **args): <NEW_LINE> <INDENT> self.key_name = key_name <NEW_LINE> self.userX = args.get('userX') <NEW_LINE> self.userO = args.get('userO') <NEW_LINE> self.board = args.get('board') <NEW_LINE> self.moveX = args.get('moveX') <NEW_LINE> self.winner = args.get('winner') <NEW_LINE> self.winning_board = args.get('winning_board') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_by_key_name(cls, key_name): <NEW_LINE> <INDENT> if isinstance(key_name, unicode): <NEW_LINE> <INDENT> key_name = key_name.encode('utf8') <NEW_LINE> <DEDENT> data = mc.get(key_name) <NEW_LINE> if data: <NEW_LINE> <INDENT> return cls(key_name, **data) <NEW_LINE> <DEDENT> <DEDENT> def put(self): <NEW_LINE> <INDENT> mc.set(self.key_name, { 'userX': self.userX, 'userO': self.userO, 'board': self.board, 'moveX': self.moveX, 'winner': self.winner, 'winning_board': self.winning_board,})
All the data we store for a game
6259903730c21e258be99989
class NotEmptyError(enum.IntEnum): <NEW_LINE> <INDENT> UNSPECIFIED = 0 <NEW_LINE> UNKNOWN = 1 <NEW_LINE> EMPTY_LIST = 2
Enum describing possible not empty errors. Attributes: UNSPECIFIED (int): Enum unspecified. UNKNOWN (int): The received error code is not known in this version. EMPTY_LIST (int): Empty list.
625990378da39b475be0436b
class DeltaFeature(Layer): <NEW_LINE> <INDENT> def build(self, input_shape): <NEW_LINE> <INDENT> if len(input_shape) != 3: <NEW_LINE> <INDENT> raise ValueError('DeltaFeature input should have three ' 'dimensions. Got %d.' % len(input_shape)) <NEW_LINE> <DEDENT> super(DeltaFeature, self).build(input_shape) <NEW_LINE> <DEDENT> def call(self, x, mask=None): <NEW_LINE> <INDENT> x_a, x_b = K.zeros_like(x[:, 1:]), x[:, :1] <NEW_LINE> x_shifted = K.concatenate([x_a, x_b], axis=1) <NEW_LINE> return x - x_shifted <NEW_LINE> <DEDENT> def get_output_shape_for(self, input_shape): <NEW_LINE> <INDENT> return input_shape
Layer for calculating time-wise deltas.
6259903715baa72349463117
class ReferencesField(JsonMixin, Field): <NEW_LINE> <INDENT> def __init__(self, **params): <NEW_LINE> <INDENT> params['widget'] = params.get('widget', ReferencesFieldWidget) <NEW_LINE> super(ReferencesField, self).__init__(**params) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> value = super(ReferencesField, self).to_python(value) <NEW_LINE> return utils.unserialize_references(value)
A references form field.
625990373eb6a72ae038b7e5
class QueueHandler(BaseHandler): <NEW_LINE> <INDENT> def flatten(self, obj, data): <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> def restore(self, data): <NEW_LINE> <INDENT> return queue.Queue()
Opaquely serializes Queue objects Queues contains mutex and condition variables which cannot be serialized. Construct a new Queue instance when restoring.
625990370a366e3fb87ddb61
class Grid (Drawable): <NEW_LINE> <INDENT> NIL = Node (-1, -1, None, None, None, None) <NEW_LINE> def __init__ (self, size, walls = []): <NEW_LINE> <INDENT> super (Grid, self).__init__ (Vector2D (0,0)) <NEW_LINE> self.nodes = [] <NEW_LINE> self.size = size <NEW_LINE> w = self.width = int (self.size [0] / GRID_SPACING) + 1 <NEW_LINE> h = self.height = int (self.size [1] / GRID_SPACING) + 1 <NEW_LINE> for y in range (self.height): <NEW_LINE> <INDENT> for x in range (self.width): <NEW_LINE> <INDENT> n, e, s, w = (0, -1), (1, 0), (0, 1), (-1, 0) <NEW_LINE> if x == 0: n = None <NEW_LINE> if y == 0: w = None <NEW_LINE> if x == w: e = None <NEW_LINE> if y == h: s = None <NEW_LINE> if x == 40 and (y < 40 or y > 45): <NEW_LINE> <INDENT> self.nodes.append (Grid.NIL) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.nodes.append (Node (x, y, n, e, s, w)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> pass <NEW_LINE> <DEDENT> def reset (self): <NEW_LINE> <INDENT> for n in self.nodes: <NEW_LINE> <INDENT> n.reset () <NEW_LINE> <DEDENT> <DEDENT> def draw (self, g): <NEW_LINE> <INDENT> for n in self.nodes: <NEW_LINE> <INDENT> n.draw (g) <NEW_LINE> <DEDENT> <DEDENT> def getClosestNodePosition (self, position): <NEW_LINE> <INDENT> x = math.floor (position.x / GRID_SPACING) * GRID_SPACING <NEW_LINE> y = math.floor (position.y / GRID_SPACING) * GRID_SPACING <NEW_LINE> return Vector2D (x, y) <NEW_LINE> <DEDENT> def getNodeAt (self, position): <NEW_LINE> <INDENT> p = self.getClosestNodePosition (position) <NEW_LINE> i = self.getNodeIndex (p.x / GRID_SPACING, p.y / GRID_SPACING) <NEW_LINE> return self.nodes [i] <NEW_LINE> <DEDENT> def shortestPath (self, source, dest): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> nodeQueue = queue.Queue () <NEW_LINE> nodeQueue.put_nowait (source) <NEW_LINE> source.previousNode = Grid.NIL <NEW_LINE> while not nodeQueue.empty (): <NEW_LINE> <INDENT> currentNode = nodeQueue.get_nowait () <NEW_LINE> if currentNode == dest: <NEW_LINE> <INDENT> while not nodeQueue.empty (): <NEW_LINE> <INDENT> nodeQueue.get_nowait () <NEW_LINE> <DEDENT> return currentNode <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> currentNode.color = R <NEW_LINE> for n in currentNode.neighbors: <NEW_LINE> <INDENT> if n: <NEW_LINE> <INDENT> index = self.getNodeIndex (currentNode.x + n[0], currentNode.y + n[1]) <NEW_LINE> if index: <NEW_LINE> <INDENT> neighbor = self.nodes [index] <NEW_LINE> if (neighbor and neighbor.previousNode == None): <NEW_LINE> <INDENT> neighbor.previousNode = currentNode <NEW_LINE> nodeQueue.put (neighbor) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> def getNodeIndex (self, x, y): <NEW_LINE> <INDENT> i = (y * self.width) + x <NEW_LINE> if x >= 0 and y >= 0 and x < self.width and y < self.height: <NEW_LINE> <INDENT> return int (i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
size: tuple (800, 600) or something like that Grid class used for player pathfinding
6259903726068e7796d4dac4
class TypeContext(object): <NEW_LINE> <INDENT> def __init__(self, source_code_info, name): <NEW_LINE> <INDENT> self.source_code_info = source_code_info <NEW_LINE> self.path = [] <NEW_LINE> self.name = name <NEW_LINE> self.map_typenames = {} <NEW_LINE> self.oneof_fields = {} <NEW_LINE> self.oneof_names = {} <NEW_LINE> self.oneof_required = {} <NEW_LINE> self.type_name = 'file' <NEW_LINE> <DEDENT> def _Extend(self, path, type_name, name): <NEW_LINE> <INDENT> if not self.name: <NEW_LINE> <INDENT> extended_name = name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> extended_name = '%s.%s' % (self.name, name) <NEW_LINE> <DEDENT> extended = TypeContext(self.source_code_info, extended_name) <NEW_LINE> extended.path = self.path + path <NEW_LINE> extended.type_name = type_name <NEW_LINE> extended.map_typenames = self.map_typenames.copy() <NEW_LINE> extended.oneof_fields = self.oneof_fields.copy() <NEW_LINE> extended.oneof_names = self.oneof_names.copy() <NEW_LINE> extended.oneof_required = self.oneof_required.copy() <NEW_LINE> return extended <NEW_LINE> <DEDENT> def ExtendMessage(self, index, name): <NEW_LINE> <INDENT> return self._Extend([4, index], 'message', name) <NEW_LINE> <DEDENT> def ExtendNestedMessage(self, index, name): <NEW_LINE> <INDENT> return self._Extend([3, index], 'message', name) <NEW_LINE> <DEDENT> def ExtendField(self, index, name): <NEW_LINE> <INDENT> return self._Extend([2, index], 'field', name) <NEW_LINE> <DEDENT> def ExtendEnum(self, index, name): <NEW_LINE> <INDENT> return self._Extend([5, index], 'enum', name) <NEW_LINE> <DEDENT> def ExtendNestedEnum(self, index, name): <NEW_LINE> <INDENT> return self._Extend([4, index], 'enum', name) <NEW_LINE> <DEDENT> def ExtendEnumValue(self, index, name): <NEW_LINE> <INDENT> return self._Extend([2, index], 'enum_value', name) <NEW_LINE> <DEDENT> def ExtendOneof(self, index, name): <NEW_LINE> <INDENT> return self._Extend([8, index], "oneof", name) <NEW_LINE> <DEDENT> def LeadingCommentPathLookup(self): <NEW_LINE> <INDENT> return self.source_code_info.LeadingCommentPathLookup(self.path, self.type_name) <NEW_LINE> <DEDENT> def GithubUrl(self): <NEW_LINE> <INDENT> return self.source_code_info.GithubUrl(self.path)
Contextual information for a message/field. Provides information around namespaces and enclosing types for fields and nested messages/enums.
62599038711fe17d825e155a
class Action: <NEW_LINE> <INDENT> def __init__(self, actionName, actionCost, startState, resultingState): <NEW_LINE> <INDENT> self.actionName = actionName <NEW_LINE> self.actionCost = actionCost <NEW_LINE> self.startState = startState <NEW_LINE> self.resultingState = resultingState <NEW_LINE> <DEDENT> def getActionName(self): <NEW_LINE> <INDENT> return self.actionName <NEW_LINE> <DEDENT> def getActionCost(self): <NEW_LINE> <INDENT> return self.actionCost <NEW_LINE> <DEDENT> def getStartState(self): <NEW_LINE> <INDENT> return self.startState <NEW_LINE> <DEDENT> def getResultingState(self): <NEW_LINE> <INDENT> return self.resultingState <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Action(%s $%s %s->%s)" % (self.actionName, self.actionCost, self.startState, self.resultingState); <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self)
Each action consists of the ACTION string, the action cost, the starting state, and the state that would result if this action were taken.
625990384e696a045264e6e0
class PostResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_HTTPLog(self): <NEW_LINE> <INDENT> return self._output.get('HTTPLog', None) <NEW_LINE> <DEDENT> def get_ResponseStatusCode(self): <NEW_LINE> <INDENT> return self._output.get('ResponseStatusCode', None) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
A ResultSet with methods tailored to the values returned by the Post Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
625990388c3a8732951f76d4
class _VmGroupSpecDecoder(option_decoders.TypeVerifier): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(_VmGroupSpecDecoder, self).__init__(valid_types=(dict,), **kwargs) <NEW_LINE> <DEDENT> def Decode(self, value, component_full_name, flag_values): <NEW_LINE> <INDENT> vm_group_config = super(_VmGroupSpecDecoder, self).Decode( value, component_full_name, flag_values) <NEW_LINE> return _VmGroupSpec( self._GetOptionFullName(component_full_name), flag_values=flag_values, **vm_group_config)
Validates a single VmGroupSpec dictionary.
62599038c432627299fa4175
class FormularioAdminRegPerfil(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserProfile <NEW_LINE> fields = ['fk_tipo_documento', 'id_perfil'] <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(FormularioAdminRegPerfil, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['fk_tipo_documento'].empty_label = 'Seleccione el Tipo de Documento' <NEW_LINE> self.fields['fk_tipo_documento'].widget.attrs.update({'class': 'form-control'}) <NEW_LINE> self.fields['fk_tipo_documento'].label= 'Tipo de Documento' <NEW_LINE> self.fields['fk_tipo_documento'].required=True <NEW_LINE> self.fields['id_perfil'].widget.attrs.update({'class': 'form-control', 'placeholder':'Documento de identidad'}) <NEW_LINE> self.fields['id_perfil'].label= 'Documento de Identidad' <NEW_LINE> self.fields['id_perfil'].required=True
! Clase que permite crear el formulario para actualizar usuario por el administrador @author Ing. Leonel P. Hernandez M. (lhernandez at cenditel.gob.ve) @copyright <a href='http://www.gnu.org/licenses/gpl-2.0.html'>GNU Public License versión 2 (GPLv2)</a> @date 09-01-2017 @version 1.0.0
62599038b5575c28eb713588
class TestAppWithStaticFiles(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.static_url_path = '/a/static/path' <NEW_LINE> self.host = 'foo.host' <NEW_LINE> self.version = '0.3.2' <NEW_LINE> self.app = Flask(__name__, static_url_path=self.static_url_path) <NEW_LINE> self.app.config['APP_VERSION'] = self.version <NEW_LINE> self.app.config['SERVER_NAME'] = self.host <NEW_LINE> <DEDENT> def test_base_alters_static_url(self): <NEW_LINE> <INDENT> with self.app.app_context(): <NEW_LINE> <INDENT> target_url = url_for('static', filename='foo.txt') <NEW_LINE> <DEDENT> self.assertEqual( target_url, f'http://{self.host}/a/static/path/foo.txt' ) <NEW_LINE> Base(self.app) <NEW_LINE> with self.app.app_context(): <NEW_LINE> <INDENT> target_url = url_for('static', filename='foo.txt') <NEW_LINE> <DEDENT> self.assertEqual( target_url, f'http://{self.host}/static/{self.app.name}/{self.version}/foo.txt' )
We are using :class:`.Base` on a Flask app.
6259903894891a1f408b9fb6
class Corpus(SentenceCollection): <NEW_LINE> <INDENT> def __init__(self, dirname): <NEW_LINE> <INDENT> super(Corpus, self).__init__() <NEW_LINE> self._dirname = dirname <NEW_LINE> self._prepareSentenceSplitter() <NEW_LINE> self._documents = [] <NEW_LINE> <DEDENT> def _prepareSentenceSplitter(self): <NEW_LINE> <INDENT> self._sentenceSplitter = lambda doc: sum( map(lambda p: nlp.getSentenceSplitter()(p), doc.split("\n")), [] ) <NEW_LINE> <DEDENT> def load(self, params, translate=False, replaceWithTranslation=False, simplify=False, replaceWithSimplified=False): <NEW_LINE> <INDENT> self.setSourceLang(params['sourceLang']) <NEW_LINE> self.setTargetLang(params['targetLang']) <NEW_LINE> files = map(lambda f: os.path.join(self._dirname, f), os.walk(self._dirname).next()[2]) <NEW_LINE> sentences = [] <NEW_LINE> for filename in files: <NEW_LINE> <INDENT> with open(filename) as f: <NEW_LINE> <INDENT> document = f.read().decode('utf-8') <NEW_LINE> self._documents.append(document) <NEW_LINE> sentences.extend(self._sentenceSplitter(document)) <NEW_LINE> <DEDENT> <DEDENT> sentences = map(lambda s: s.strip(), sentences) <NEW_LINE> self.addSentences(map(Sentence, set(sentences))) <NEW_LINE> if simplify: <NEW_LINE> <INDENT> logger.info("Simplifying sentences") <NEW_LINE> self.simplify(self.sourceLang, replaceOriginal=replaceWithSimplified) <NEW_LINE> <DEDENT> if translate: <NEW_LINE> <INDENT> if self.sourceLang != self.targetLang: <NEW_LINE> <INDENT> logger.info("Translating sentences") <NEW_LINE> self.translate(self.sourceLang, self.targetLang, replaceOriginal=replaceWithTranslation) <NEW_LINE> <DEDENT> if replaceWithTranslation: <NEW_LINE> <INDENT> self.setSourceLang(self.targetLang) <NEW_LINE> <DEDENT> self.generateTranslationSentenceVectors() <NEW_LINE> <DEDENT> self.generateSentenceVectors() <NEW_LINE> return self
Class for source documents. Contains utilities for loading document set.
625990386fece00bbacccb29
class CephInt(CephArgtype): <NEW_LINE> <INDENT> def __init__(self, range=''): <NEW_LINE> <INDENT> if range == '': <NEW_LINE> <INDENT> self.range = list() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.range = list(range.split('|')) <NEW_LINE> self.range = [int(x) for x in self.range] <NEW_LINE> <DEDENT> <DEDENT> def valid(self, s, partial=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> val = int(s) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ArgumentValid("{0} doesn't represent an int".format(s)) <NEW_LINE> <DEDENT> if len(self.range) == 2: <NEW_LINE> <INDENT> if val < self.range[0] or val > self.range[1]: <NEW_LINE> <INDENT> raise ArgumentValid("{0} not in range {1}".format(val, self.range)) <NEW_LINE> <DEDENT> <DEDENT> elif len(self.range) == 1: <NEW_LINE> <INDENT> if val < self.range[0]: <NEW_LINE> <INDENT> raise ArgumentValid("{0} not in range {1}".format(val, self.range)) <NEW_LINE> <DEDENT> <DEDENT> self.val = val <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> r = '' <NEW_LINE> if len(self.range) == 1: <NEW_LINE> <INDENT> r = '[{0}-]'.format(self.range[0]) <NEW_LINE> <DEDENT> if len(self.range) == 2: <NEW_LINE> <INDENT> r = '[{0}-{1}]'.format(self.range[0], self.range[1]) <NEW_LINE> <DEDENT> return '<int{0}>'.format(r)
range-limited integers, [+|-][0-9]+ or 0x[0-9a-f]+ range: list of 1 or 2 ints, [min] or [min,max]
6259903830c21e258be9998b
class Record(object): <NEW_LINE> <INDENT> _typeToString = { numpy.int8: "int8", numpy.uint8: "uint8", numpy.int16: "int16", numpy.uint16: "uint16", numpy.int32: "int32", numpy.uint32: "uint32", numpy.int64: "int64", numpy.float32: "float", numpy.float64: "double", numpy.complex128: "complex", numpy.object_: "object" } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._registeredFields = [] <NEW_LINE> <DEDENT> def _registerField(self,name,data): <NEW_LINE> <INDENT> self._registeredFields.append(name) <NEW_LINE> self.__setattr__(name,data) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._registeredFields) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if not isinstance(key, int): <NEW_LINE> <INDENT> raise TypeError("index should be an integer") <NEW_LINE> <DEDENT> if key < 0: <NEW_LINE> <INDENT> key += len(self._registeredFields) <NEW_LINE> <DEDENT> if key < 0 or key >= len(self._registeredFields): <NEW_LINE> <INDENT> raise IndexError <NEW_LINE> <DEDENT> return self.__dict__[self._registeredFields[key]] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<coda record>" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> out = io.StringIO() <NEW_LINE> for field in self._registeredFields: <NEW_LINE> <INDENT> data = self.__dict__[field] <NEW_LINE> out.write(u"%32s:" % (field)) <NEW_LINE> if isinstance(data,Record): <NEW_LINE> <INDENT> out.write(u"record (%i fields)\n" % (len(data),)) <NEW_LINE> <DEDENT> elif isinstance(data,numpy.ndarray): <NEW_LINE> <INDENT> dim = data.shape <NEW_LINE> dimString = "" <NEW_LINE> for d in dim[:-1]: <NEW_LINE> <INDENT> dimString += "%ix" % (d,) <NEW_LINE> <DEDENT> dimString += "%i" % (dim[-1],) <NEW_LINE> out.write(u"[%s %s]\n" % (dimString,self._typeToString[data.dtype.type])) <NEW_LINE> <DEDENT> elif isinstance(data,str): <NEW_LINE> <INDENT> out.write(u"\"%s\"\n" % (data,)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out.write(u"%s\n" % (data,)) <NEW_LINE> <DEDENT> <DEDENT> return out.getvalue()
A class that represents the CODA record type in Python. When a record is read from a product file, a Record instance is created and populated with fields using the _registerField() method. Each field will appear as an instance attribute. The field name is used as the name of the attribute, and its value is read from the product file.
625990383eb6a72ae038b7e7
class DeleteUpdateCommentAPIView(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Comment.objects.all() <NEW_LINE> serializer_class = CommentSerializer <NEW_LINE> permission_classes = ( IsAuthenticated, ) <NEW_LINE> def delete(self, request, *args, **kwargs): <NEW_LINE> <INDENT> comment = get_object_or_404(Comment, pk=self.kwargs.get('pk')) <NEW_LINE> if comment.author_id != request.user.id: <NEW_LINE> <INDENT> return Response( data={'message': 'You can only delete your comment'}, status=status.HTTP_403_FORBIDDEN ) <NEW_LINE> <DEDENT> comment = self.queryset.get(pk=kwargs["pk"]) <NEW_LINE> comment.delete() <NEW_LINE> return Response( data={ "message": "Comment has been successfully deleted" }, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> def put(self, request, *args, **kwargs): <NEW_LINE> <INDENT> comment = get_object_or_404(Comment, pk=self.kwargs.get('pk')) <NEW_LINE> data = request.data["comment"] <NEW_LINE> if comment.author_id != request.user.id: <NEW_LINE> <INDENT> return Response( data={'message': 'You can only update your comment'}, status=status.HTTP_403_FORBIDDEN ) <NEW_LINE> <DEDENT> serializer = self.serializer_class( comment, data, partial=True ) <NEW_LINE> serializer.is_valid(raise_exception=True) <NEW_LINE> serializer.save() <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK)
class to delete a comment on an article
6259903826068e7796d4dac6
class CephEntityAddr(CephIPAddr): <NEW_LINE> <INDENT> def valid(self, s, partial=False): <NEW_LINE> <INDENT> ip, nonce = s.split('/') <NEW_LINE> super(self.__class__, self).valid(ip) <NEW_LINE> self.nonce = nonce <NEW_LINE> self.val = s <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<EntityAddr>'
EntityAddress, that is, IP address/nonce
625990388e05c05ec3f6f71a
@dataclass <NEW_LINE> class Program: <NEW_LINE> <INDENT> asts: Dict[str, AST] = field(default_factory=dict) <NEW_LINE> memory: Memory = field(default_factory=Memory) <NEW_LINE> environment: Environment = field(default_factory=Environment) <NEW_LINE> def to_dict(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> d = dict() <NEW_LINE> d["memory"] = { "functions": sorted(list(self.memory._functions.keys())), "classes": sorted(list(self.memory._classes.keys())), } <NEW_LINE> d["environment"] = self.environment.to_dict() <NEW_LINE> return d
Represent a statically analysed program : a set of references, with a memory containing already initialized values, and a dict containing the ASTs of the different compiled files.
62599038d10714528d69ef4a
class SingletonModel(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.__class__.objects.exclude(id=self.id).delete() <NEW_LINE> super(SingletonModel, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return cls.objects.get() <NEW_LINE> <DEDENT> except cls.DoesNotExist: <NEW_LINE> <INDENT> return cls()
Singleton Django Model Ensures there's always only one entry in the database, and can fix the table (by deleting extra entries) even if added via another mechanism. Also has a static load() method which always returns the object - from the database if possible, or a new empty (default) instance if the database is still empty. If your instance has sane defaults (recommended), you can use it immediately without worrying if it was saved to the database or not. Useful for things like system-wide user-editable settings.
62599038b830903b9686ed38
class _NXdataBaseDataView(DataView): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> DataView.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def _updateColormap(self, nxdata): <NEW_LINE> <INDENT> cmap_norm = nxdata.plot_style.signal_scale_type <NEW_LINE> if cmap_norm is not None: <NEW_LINE> <INDENT> self.defaultColormap().setNormalization( 'log' if cmap_norm == 'log' else 'linear')
Base class for NXdata DataView
6259903896565a6dacd2d84b
class WarningWidg(QtGui.QDialog): <NEW_LINE> <INDENT> def __init__(self, message, parent=None): <NEW_LINE> <INDENT> super(WarningWidg, self).__init__(parent) <NEW_LINE> z_label = QtGui.QLabel('Warning: {:s}'.format(message)) <NEW_LINE> nbtn = QtGui.QPushButton('No', self) <NEW_LINE> nbtn.clicked.connect(self.touch_no) <NEW_LINE> ybtn = QtGui.QPushButton('Yes', self) <NEW_LINE> ybtn.clicked.connect(self.touch_yes) <NEW_LINE> vbox = QtGui.QVBoxLayout() <NEW_LINE> vbox.addWidget(z_label) <NEW_LINE> vbox.addWidget(nbtn) <NEW_LINE> vbox.addWidget(ybtn) <NEW_LINE> self.setLayout(vbox) <NEW_LINE> <DEDENT> def touch_yes(self): <NEW_LINE> <INDENT> self.ans = True <NEW_LINE> self.done(0) <NEW_LINE> <DEDENT> def touch_no(self): <NEW_LINE> <INDENT> self.ans = False <NEW_LINE> self.done(0)
GUI to warn user about coming action and solicit response
62599038a8ecb0332587239e
class APIError(Exception): <NEW_LINE> <INDENT> pass
Indicates an exception happened on the bot side of the RPC connection
6259903873bcbd0ca4bcb408
class SimilarUsersFollowingListView(SimilarUsersListView): <NEW_LINE> <INDENT> filmaster_type = 'following'
Followers of given users that are similar to him
6259903882261d6c52730784
class Condition0(Condition): <NEW_LINE> <INDENT> def check(self, instance): <NEW_LINE> <INDENT> return instance.objectPlayer.random.randrange(100 ) < self.evaluate_index(0)
Random Event Parameters: 0: Percent (EXPRESSION, ExpressionParameter)
625990388a349b6b436873c1
class QLearning(): <NEW_LINE> <INDENT> def __init__(self, env, epsilon = .9, alpha = .1, gamma = .9): <NEW_LINE> <INDENT> self.env = env <NEW_LINE> self.Q = np.zeros(shape=(self.env.observation_space.n, self.env.action_space.n)) <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.alpha = alpha <NEW_LINE> self.gamma = gamma <NEW_LINE> <DEDENT> def epsilon_greedy(self, S): <NEW_LINE> <INDENT> if np.random.uniform(0, 1) < self.epsilon: <NEW_LINE> <INDENT> action = self.env.action_space.sample() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = np.argmax(self.Q[S, :]) <NEW_LINE> <DEDENT> return action <NEW_LINE> <DEDENT> def train(self, num_episodes = 10000, verbose = True): <NEW_LINE> <INDENT> start_time = datetime.now().replace(microsecond=0) <NEW_LINE> for e in range(num_episodes): <NEW_LINE> <INDENT> S_old = self.env.reset() <NEW_LINE> steps = 0 <NEW_LINE> while steps < 1000: <NEW_LINE> <INDENT> steps += 1 <NEW_LINE> A = self.epsilon_greedy(S_old) <NEW_LINE> S_new, reward, done, info = self.env.step(A) <NEW_LINE> self.Q[S_old, A] = self.Q[S_old, A] + self.alpha * (reward + self.gamma * np.max(self.Q[S_new, :]) - self.Q[S_old, A]) <NEW_LINE> if done: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> S_old = S_new <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> clear_output(wait=True) <NEW_LINE> now_time = datetime.now().replace(microsecond=0) <NEW_LINE> print("Epoch: {}/{} - Steps: {:4} - Duration: {}".format(e+1, num_episodes, steps, now_time-start_time)) <NEW_LINE> <DEDENT> <DEDENT> return self.Q <NEW_LINE> <DEDENT> def run(self, verbose=False, sleep_time=.1): <NEW_LINE> <INDENT> S_old = self.env.reset() <NEW_LINE> max_steps = 100 <NEW_LINE> for step in range(max_steps): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> time.sleep(sleep_time) <NEW_LINE> clear_output(wait=True) <NEW_LINE> print("Step: {}".format(step)) <NEW_LINE> self.env.render() <NEW_LINE> <DEDENT> A = np.argmax(self.Q[S_old, :]) <NEW_LINE> S_new, reward, done, info = self.env.step(A) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print(S_old, S_new, reward, done) <NEW_LINE> <DEDENT> if done: <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> time.sleep(sleep_time) <NEW_LINE> clear_output(wait=True) <NEW_LINE> print("Final Steps: {}".format(step)) <NEW_LINE> self.env.render() <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> S_old = S_new <NEW_LINE> <DEDENT> return reward, step + 1 <NEW_LINE> <DEDENT> def save(self,path="qtable.npy"): <NEW_LINE> <INDENT> np.save(path, self.Q) <NEW_LINE> <DEDENT> def load(self, path="qtable.npy"): <NEW_LINE> <INDENT> self.Q = np.load(path)
Q-learning algorithms with epsilon greedy policy. References: https://github.com/udacity/rl-cheatsheet/blob/master/cheatsheet.pdf
6259903876d4e153a661db32
class Package(object): <NEW_LINE> <INDENT> def __init__(self, name, root_path, package_path, profiles=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.root_path = root_path <NEW_LINE> self.package_path = package_path <NEW_LINE> self.profiles = profiles or {} <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> sys.path.insert(0, self.root_path) <NEW_LINE> self._original_working_set = pkg_resources.working_set <NEW_LINE> pkg_resources.working_set = pkg_resources.WorkingSet._build_master() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, type, value, tb): <NEW_LINE> <INDENT> pkg_resources.working_set = self._original_working_set <NEW_LINE> sys.path.remove(self.root_path) <NEW_LINE> modules_to_remove = [ name for name in sys.modules if name.startswith(self.name)] <NEW_LINE> modules_to_remove += (set(sys.modules.keys()) & set(parent_namespaces(self.name))) <NEW_LINE> for name in modules_to_remove: <NEW_LINE> <INDENT> del sys.modules[name] <NEW_LINE> <DEDENT> <DEDENT> def import_package(self): <NEW_LINE> <INDENT> return import_module(self.name, package=self.name) <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def imported(self): <NEW_LINE> <INDENT> with self: <NEW_LINE> <INDENT> yield self.import_package() <NEW_LINE> <DEDENT> <DEDENT> def load_zcml(self, configuration_context): <NEW_LINE> <INDENT> module = self.import_package() <NEW_LINE> xmlconfig.file('configure.zcml', module, context=configuration_context) <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def zcml_loaded(self, configuration_context): <NEW_LINE> <INDENT> with self: <NEW_LINE> <INDENT> self.load_zcml(configuration_context) <NEW_LINE> yield self
A ``Package`` object is created when creating a python package using the "python package" builder. It contains infos about the package, such as useful paths and can be used for importing the package and doing things such as loading the ZCML. The object can be used as context manager to add the package temporarily to the python package so that it can be imported. When the context manager is exited, the package is removed from the python path and the currently loaded modules (sys.modules) is cleaned up.
62599038d4950a0f3b1116ff
class Downloader(object): <NEW_LINE> <INDENT> def __init__(self, nb_dl=2): <NEW_LINE> <INDENT> self._nb_dl = nb_dl <NEW_LINE> self._last_display = time.time() <NEW_LINE> <DEDENT> def run(self, uri_list): <NEW_LINE> <INDENT> downloaders = [] <NEW_LINE> in_queue = [] <NEW_LINE> _download_infos = dict(count=0, start_ts = time.time()) <NEW_LINE> percent_memory = WeakKeyDictionary() <NEW_LINE> write_out = sys.stdout.write <NEW_LINE> def _download(): <NEW_LINE> <INDENT> for dl in downloaders: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ret = dl.next() <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> downloaders.remove(dl) <NEW_LINE> _download_infos['count'] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(ret, int): <NEW_LINE> <INDENT> percent_memory[dl] = ret <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> t = time.time() <NEW_LINE> if self._last_display + 0.1 < t: <NEW_LINE> <INDENT> self._last_display = t <NEW_LINE> sumup = ', '.join('%3d%%'%(val if int(val)<=100 else 0) for val in percent_memory.itervalues()) <NEW_LINE> write_out(' [ %s ] %d \r'%(sumup, _download_infos['count'])) <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> for uri in chain( uri_list, in_queue ): <NEW_LINE> <INDENT> if len(downloaders) < self._nb_dl: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dg = DownloadGenerator(uri) <NEW_LINE> dg.next() <NEW_LINE> downloaders.append( dg ) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> in_queue.append(uri) <NEW_LINE> _download() <NEW_LINE> <DEDENT> <DEDENT> while downloaders: <NEW_LINE> <INDENT> _download() <NEW_LINE> <DEDENT> t = time.time() - _download_infos['start_ts'] <NEW_LINE> write_out(" \nGot %d files in %s. Enjoy ;)\n"%( _download_infos['count'], duration_tidy(t)))
A nice downloader class with pretty user output (stdout) just call :meth:`run` with a list of uris you want to fetch
625990386e29344779b017d1
class IntentService(): <NEW_LINE> <INDENT> def __init__(self, top_n): <NEW_LINE> <INDENT> self.top_n = top_n <NEW_LINE> self.__noun_phrase_tokens=[] <NEW_LINE> self.__default_categories = CATEGORIES <NEW_LINE> self.__glove_model = GloveService() <NEW_LINE> <DEDENT> def __generate_text_vector(self): <NEW_LINE> <INDENT> token_vector_dict={} <NEW_LINE> for token in self.__noun_phrase_tokens: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> token_vector_dict[token] = self.__glove_model.get_vector(token) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> vector_mean = np.mean(list(token_vector_dict.values()),axis=0) <NEW_LINE> return vector_mean <NEW_LINE> <DEDENT> def __get_text_category_affinity(self, text): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__noun_phrase_tokens = NounPhraseExtraction().get_noun_phrases(text) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> <DEDENT> affinity_dict={} <NEW_LINE> affinity_dict["aspects"]=self.__noun_phrase_tokens <NEW_LINE> text_vector=self.__generate_text_vector() <NEW_LINE> category_affinity_dict={} <NEW_LINE> for category in self.__default_categories: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> category_vector = self.__glove_model.get_vector(category) <NEW_LINE> category_affinity_dict[category] = cosine_similarity(text_vector.reshape(1,-1),category_vector.reshape(1,-1)).item(0) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> category_affinity_dict[category] = None <NEW_LINE> <DEDENT> <DEDENT> affinity_dict["affinity"] = category_affinity_dict <NEW_LINE> return affinity_dict <NEW_LINE> <DEDENT> def get_default_category(self,text): <NEW_LINE> <INDENT> category_affinity_dict=self.__get_text_category_affinity(text) <NEW_LINE> affinity_dict=category_affinity_dict.get('affinity') <NEW_LINE> five_largest=nlargest(self.top_n,affinity_dict,key=affinity_dict.get) <NEW_LINE> return five_largest
The class is for Extracting the top n intents from the input text.
6259903816aa5153ce40166c
class SnippetViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Snippet.objects.all() <NEW_LINE> serializer_class = SnippetSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly) <NEW_LINE> @detail_route(renderer_classes=[renderers.StaticHTMLRenderer]) <NEW_LINE> def highlight(self, request, *args, **kwargs): <NEW_LINE> <INDENT> snippet = self.get_object() <NEW_LINE> return Response(snippet.highlighted) <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(owner=self.request.user)
This viewset automatically provides 'list', 'create', 'retrieve', 'update', and 'destroy' actions.
625990383eb6a72ae038b7e9
class AuthTokenSerializer(serializers.Serializer): <NEW_LINE> <INDENT> email = serializers.CharField() <NEW_LINE> password = serializers.CharField( style={'input_type': 'password'}, trim_whitespace=False ) <NEW_LINE> def validate(self, attrs): <NEW_LINE> <INDENT> email = attrs.get('email') <NEW_LINE> password = attrs.get('password') <NEW_LINE> user = authenticate( request=self.context.get('request'), username=email, password=password ) <NEW_LINE> if not user: <NEW_LINE> <INDENT> msg = _('Unable to authenticate with provided credentials') <NEW_LINE> raise serializers.ValidationError(msg, code='authentication') <NEW_LINE> <DEDENT> attrs['user'] = user <NEW_LINE> return attrs
Serializer for User Authentication Object
62599038ac7a0e7691f73668
class Robot(): <NEW_LINE> <INDENT> name = '' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name = self.generate_name() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def generate_name(): <NEW_LINE> <INDENT> random.seed() <NEW_LINE> letters = '' <NEW_LINE> number = random.randint(100, 999) <NEW_LINE> for _ in range(2): <NEW_LINE> <INDENT> letters += random.choice(string.ascii_uppercase) <NEW_LINE> <DEDENT> return '{}{}'.format(letters, number) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.name = self.generate_name()
Robot whose name is unique at each instantiation
62599038ec188e330fdf9a18
class _ScalarAccessIndexer(NDFrameIndexerBase): <NEW_LINE> <INDENT> def _convert_key(self, key, is_setter: bool = False): <NEW_LINE> <INDENT> raise AbstractMethodError(self) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if not isinstance(key, tuple): <NEW_LINE> <INDENT> if not is_list_like_indexer(key): <NEW_LINE> <INDENT> key = (key,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Invalid call for scalar access (getting)!") <NEW_LINE> <DEDENT> <DEDENT> key = self._convert_key(key) <NEW_LINE> return self.obj._get_value(*key, takeable=self._takeable) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if isinstance(key, tuple): <NEW_LINE> <INDENT> key = tuple(com.apply_if_callable(x, self.obj) for x in key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = com.apply_if_callable(key, self.obj) <NEW_LINE> <DEDENT> if not isinstance(key, tuple): <NEW_LINE> <INDENT> key = _tuplify(self.ndim, key) <NEW_LINE> <DEDENT> key = list(self._convert_key(key, is_setter=True)) <NEW_LINE> if len(key) != self.ndim: <NEW_LINE> <INDENT> raise ValueError("Not enough indexers for scalar access (setting)!") <NEW_LINE> <DEDENT> self.obj._set_value(*key, value=value, takeable=self._takeable)
Access scalars quickly.
6259903866673b3332c31575
class DigitalReceiptSurvey(SurveyStatus): <NEW_LINE> <INDENT> STRONGLY_AGREE = 0 <NEW_LINE> AGREE = 1 <NEW_LINE> NEITHER = 2 <NEW_LINE> DISAGREE = 3 <NEW_LINE> STRONGLY_DISAGREE = 4 <NEW_LINE> LIKERT_CHOICES = ( ( STRONGLY_AGREE, 'Strongly agree'), ( AGREE, 'Agree'), ( NEITHER, 'Neither agree nor disagree'), ( DISAGREE, 'Disagree'), ( STRONGLY_DISAGREE, 'Strongly disagree'), ) <NEW_LINE> helpful = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="It was helpful to see my transaction history on the mobile app.") <NEW_LINE> sharing = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="I was comfortable sharing the locations I purchased from.") <NEW_LINE> stats = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="It was helpful to see basic statistics on where I frequently visit.") <NEW_LINE> stats_change = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="Viewing statistics made me try different places that I visit less often.") <NEW_LINE> popularity = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="It was helpful to know which places are popular.") <NEW_LINE> friends = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="It was helpful to know where my friends frequently visit.") <NEW_LINE> talk = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="MealTime allowed me to talk with friends about places to eat together.") <NEW_LINE> new = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="MealTime allowed me to discover new places where I could eat.") <NEW_LINE> reviews = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="MealTime reviews of locations were helpful.") <NEW_LINE> reputation = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="I was aware that MealTime reviews of locations could only be made by people who transacted there.") <NEW_LINE> spending = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="I was more aware of my TechCASH expenses through MealTime.") <NEW_LINE> changed = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="I think MealTime changed my eating and spending behavior.") <NEW_LINE> general = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="I believe a MealTime-like application linked to my credit or debit card could be helpful.") <NEW_LINE> cheap = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="If I had a goal of saving on costs, I would like MealTime to suggest cheap places to eat.") <NEW_LINE> healthy = models.IntegerField(default=0, choices=LIKERT_CHOICES, verbose_name="If I had a goal of eating healthy, I would like MealTime to suggest healthy places to eat.") <NEW_LINE> comments = models.TextField(verbose_name="Any other comments about your experiences?")
Design survey
6259903873bcbd0ca4bcb409
class ResourceObject(object): <NEW_LINE> <INDENT> deserialized_types = { 'object_type': 'str' } <NEW_LINE> attribute_map = { 'object_type': 'type' } <NEW_LINE> supports_multiple_types = False <NEW_LINE> discriminator_value_class_map = { 'InteractionModel': 'ask_smapi_model.v1.skill.interaction_model.jobs.interaction_model.InteractionModel', 'Catalog': 'ask_smapi_model.v1.skill.interaction_model.jobs.catalog.Catalog', 'SlotTypeReference': 'ask_smapi_model.v1.skill.interaction_model.jobs.slot_type_reference.SlotTypeReference' } <NEW_LINE> json_discriminator_key = "type" <NEW_LINE> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def __init__(self, object_type=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.object_type = object_type <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_real_child_model(cls, data): <NEW_LINE> <INDENT> discriminator_value = data[cls.json_discriminator_key] <NEW_LINE> return cls.discriminator_value_class_map.get(discriminator_value) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ResourceObject): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Resource object where the job is applied on. :param object_type: Polymorphic type of the ResourceObject. :type object_type: (optional) str .. note:: This is an abstract class. Use the following mapping, to figure out the model class to be instantiated, that sets ``type`` variable. | InteractionModel: :py:class:`ask_smapi_model.v1.skill.interaction_model.jobs.interaction_model.InteractionModel`, | | Catalog: :py:class:`ask_smapi_model.v1.skill.interaction_model.jobs.catalog.Catalog`, | | SlotTypeReference: :py:class:`ask_smapi_model.v1.skill.interaction_model.jobs.slot_type_reference.SlotTypeReference`
62599038a4f1c619b294f747
class PageQuerySet(models.QuerySet, TranslatableModelManager): <NEW_LINE> <INDENT> def active(self): <NEW_LINE> <INDENT> return self.filter(active=True) <NEW_LINE> <DEDENT> def get_by_uuid(self, uuid): <NEW_LINE> <INDENT> return self.get(uuid=uuid)
Manager de l'aide
62599038be383301e0254997
class BayerShader(ImageShader): <NEW_LINE> <INDENT> vertex_source = Path(__file__).parent / 'bayer.vs' <NEW_LINE> fragment_source = Path(__file__).parent / 'bayer.fs' <NEW_LINE> patterns = { 'RGGB': QtGui.QVector2D(0, 0), 'GBRG': QtGui.QVector2D(0, 1), 'GRBG': QtGui.QVector2D(1, 0), 'BGGR': QtGui.QVector2D(1, 1), } <NEW_LINE> def __init__(self, opengl_widget, *, fragment_main, definitions=None, default_pattern='RGGB'): <NEW_LINE> <INDENT> super().__init__(opengl_widget, fragment_main=fragment_main, definitions=definitions) <NEW_LINE> self.default_pattern = default_pattern <NEW_LINE> with bind(self.shader): <NEW_LINE> <INDENT> self.shader.setUniformValue('firstRed', self.patterns[default_pattern]) <NEW_LINE> <DEDENT> <DEDENT> def _update(self, image_type): <NEW_LINE> <INDENT> width, height = image_type.width, image_type.height <NEW_LINE> bayer_pattern = image_type.bayer_pattern <NEW_LINE> self.opengl_widget.makeCurrent() <NEW_LINE> with bind(self.shader): <NEW_LINE> <INDENT> size_vector = QtGui.QVector4D(width, height, 1. / width, 1. / height) <NEW_LINE> self.shader.setUniformValue('sourceSize', size_vector) <NEW_LINE> if bayer_pattern: <NEW_LINE> <INDENT> self.shader.setUniformValue('firstRed', self.patterns[bayer_pattern])
Shader which performs bayer demosaic filtering NOTE: see LICENSE for source of the original GLSL shader code. This has been modified for usage with GLSL 4.10 by @klauer
6259903826238365f5fadcd6
class GameTurn(object): <NEW_LINE> <INDENT> def __init__(self, arena, turn_number): <NEW_LINE> <INDENT> self.arena = arena <NEW_LINE> self.trace = [] <NEW_LINE> self.history = [] <NEW_LINE> self.turn_number = turn_number <NEW_LINE> <DEDENT> def evaluate_bot_action(self, bot_response): <NEW_LINE> <INDENT> if not bot_response.get('error') and bot_response.has_key('action_type'): <NEW_LINE> <INDENT> self.history.append(bot_response) <NEW_LINE> <DEDENT> <DEDENT> def summarize_moves(self, actions): <NEW_LINE> <INDENT> key_func = lambda x: (x['player'], x['from'], x['to']) <NEW_LINE> for (player, origin, end), movements in groupby(sorted(actions, key=key_func), key_func): <NEW_LINE> <INDENT> movements = list(movements) <NEW_LINE> summary = { 'action': 'MOVE_UNITS', 'player': player, 'from': { "tile": {"x": origin.latitude, "y": origin.longitude}, }, 'to': { "tile": {"x": end.latitude, "y": end.longitude}, "units": len(movements) }, 'turn_number': self.turn_number, } <NEW_LINE> self.trace.append(summary) <NEW_LINE> <DEDENT> <DEDENT> def summarize_attacks(self, actions): <NEW_LINE> <INDENT> for attack in actions: <NEW_LINE> <INDENT> att_cord = attack['attacker_coord'] <NEW_LINE> def_cord = attack['defender_coord'] <NEW_LINE> self.trace.append({ "action": "ATTACK", "player": attack['attacker_player'], "from": { "tile": {"x": att_cord.latitude, "y": att_cord.longitude}, "dice": attack['attacker_dice'], "remaining_units": attack['attacker_units'], "lost_units": attack['attacker_loses'] }, "to": { "player": attack['defender_player'], "tile": {"x": def_cord.latitude, "y": def_cord.longitude}, "dice": attack['defender_dice'], "remaining_units": attack['defender_units'], "lost_units": attack['defender_loses'] }, 'turn_number': self.turn_number, }) <NEW_LINE> <DEDENT> <DEDENT> def summarize_actions(self): <NEW_LINE> <INDENT> for key, actions in groupby(self.history, key=lambda x: x['action_type']): <NEW_LINE> <INDENT> actions = list(actions) <NEW_LINE> if key == 'MOVE': <NEW_LINE> <INDENT> self.summarize_moves(actions) <NEW_LINE> <DEDENT> elif key == 'ATTACK': <NEW_LINE> <INDENT> self.summarize_attacks(actions) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def end_turn_status(self): <NEW_LINE> <INDENT> self.summarize_actions() <NEW_LINE> return self.trace
Abstract the actions that take place during a turn, and when it finishes, return a summarized status of what happened so the engine can trace it.
62599038b5575c28eb71358a
class AccountList(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[Account]'}, } <NEW_LINE> def __init__( self, *, next_link: Optional[str] = None, value: Optional[List["Account"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(AccountList, self).__init__(**kwargs) <NEW_LINE> self.next_link = next_link <NEW_LINE> self.value = value
List of Accounts. :param next_link: The link used to get the next page of Accounts list. :type next_link: str :param value: List of Accounts. :type value: list[~device_update.models.Account]
62599038287bf620b6272d6b
class Servlet(object): <NEW_LINE> <INDENT> def onServletInit(self, url, runtime): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onServletError(self, url, error): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onServletEnd(self, url): <NEW_LINE> <INDENT> pass
A service addresable with an url
625990386fece00bbacccb2d
class UploadInfoAPI(BaseDetailView): <NEW_LINE> <INDENT> schema = UploadInfoSchema() <NEW_LINE> permission_classes = [ProjectTransferPermission] <NEW_LINE> http_method_names = ['get'] <NEW_LINE> def _get(self, params): <NEW_LINE> <INDENT> expiration = params['expiration'] <NEW_LINE> num_parts = params['num_parts'] <NEW_LINE> project = params['project'] <NEW_LINE> media_id = params.get('media_id') <NEW_LINE> file_id = params.get('file_id') <NEW_LINE> filename = params.get('filename') <NEW_LINE> if os.getenv('REQUIRE_HTTPS') == 'TRUE': <NEW_LINE> <INDENT> PROTO = 'https' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> PROTO = 'http' <NEW_LINE> <DEDENT> project_obj = Project.objects.get(pk=project) <NEW_LINE> organization = project_obj.organization.pk <NEW_LINE> name = str(uuid1()) <NEW_LINE> if filename: <NEW_LINE> <INDENT> name = filename <NEW_LINE> rand_str = ''.join(random.SystemRandom().choice(string.ascii_letters) for _ in range(10)) <NEW_LINE> components = os.path.splitext(name) <NEW_LINE> name = f"{components[0]}_{rand_str}{components[1]}" <NEW_LINE> <DEDENT> if media_id is None and file_id is None: <NEW_LINE> <INDENT> today = datetime.datetime.now().strftime('%Y-%m-%d') <NEW_LINE> user = self.request.user.pk <NEW_LINE> key = f"_uploads/{today}/{organization}/{project}/{user}/{name}" <NEW_LINE> tator_store = get_tator_store(project_obj.get_bucket(upload=True), upload=True) <NEW_LINE> <DEDENT> elif media_id is not None and file_id is not None: <NEW_LINE> <INDENT> raise ValueError(f"Both a file_id and media_id was provided!") <NEW_LINE> <DEDENT> elif media_id is not None: <NEW_LINE> <INDENT> qs = Media.objects.filter(project=project, pk=media_id) <NEW_LINE> if qs.exists(): <NEW_LINE> <INDENT> key = f"{organization}/{project}/{media_id}/{name}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f"Media ID {media_id} does not exist in project {project}!") <NEW_LINE> <DEDENT> tator_store = get_tator_store(project_obj.bucket) <NEW_LINE> <DEDENT> elif file_id is not None: <NEW_LINE> <INDENT> qs = File.objects.filter(project=project, pk=file_id) <NEW_LINE> if qs.exists(): <NEW_LINE> <INDENT> key = f"{organization}/{project}/files/{file_id}/{name}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f"File ID {file_id} does not exist in project {project}!") <NEW_LINE> <DEDENT> tator_store = get_tator_store(project_obj.bucket) <NEW_LINE> <DEDENT> urls, upload_id = tator_store.get_upload_urls( key, expiration, num_parts, self.request.build_absolute_uri("/")[:-1] ) <NEW_LINE> if tator_store.external_host and project_obj.bucket is None: <NEW_LINE> <INDENT> urls = [ urlunsplit(urlsplit(url)._replace(netloc=tator_store.external_host, scheme=PROTO)) for url in urls ] <NEW_LINE> <DEDENT> return {'urls': urls, 'key': key, 'upload_id': upload_id}
Retrieve info needed to upload a file.
6259903891af0d3eaad3afb4
class Spider(Worker): <NEW_LINE> <INDENT> queue = 'spider' <NEW_LINE> repeat_delta = timedelta(days=7) <NEW_LINE> headers = { 'user-agent': 'PyBot/1.0' } <NEW_LINE> def work(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> job = self.jobs.reserve_job(self.queue) <NEW_LINE> if job is False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> url = Url.find(job.payload['url_id']) <NEW_LINE> if not url or not can_crawl_url(url): <NEW_LINE> <INDENT> self.jobs.clear_job(job) <NEW_LINE> return False <NEW_LINE> <DEDENT> response = self.fetch(url) <NEW_LINE> doc = Document.from_response(response, url) <NEW_LINE> doc.purge_docs_for_url(url) <NEW_LINE> doc.insert() <NEW_LINE> if doc.can_index: <NEW_LINE> <INDENT> doc.discover_urls() <NEW_LINE> doc.discover_excerpts() <NEW_LINE> doc.discover_images() <NEW_LINE> <DEDENT> recrawl_at = datetime.now() + self.repeat_delta <NEW_LINE> self.jobs.reschedule_job(job, recrawl_at) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.jobs.release_job(job) <NEW_LINE> print("Releasing job %d because an exception occurred" % job.id) <NEW_LINE> raise e <NEW_LINE> <DEDENT> <DEDENT> def fetch(self, url): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return requests.get(url.geturl(), headers=self.headers) <NEW_LINE> <DEDENT> except requests.exceptions.ConnectionError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> raise Exception("HTTP Connection error, bailing out.")
Fetches webpages from the 'spider' queue, and stores a representation for our search engine in the database.
62599038287bf620b6272d6c
class City(Base): <NEW_LINE> <INDENT> __tablename__ = 'cities' <NEW_LINE> id = Column(Integer, primary_key=True, nullable=False) <NEW_LINE> name = Column(String(128), nullable=False) <NEW_LINE> state_id = Column(Integer, ForeignKey('states.id'), nullable=False)
City inheriting from States Base
6259903807d97122c4217e1f
class HelloAPIView(APIView): <NEW_LINE> <INDENT> serializer_class = serializers.HelloSerializer <NEW_LINE> def get(self, request, format=None): <NEW_LINE> <INDENT> an_apiview = [ 'Uses HTTP methods as functions (get, post, patch, put, delete)', 'Is similar to a traditional Django View', 'Gives you the most control over your logic', 'Is mapped manually to URLs', ] <NEW_LINE> return Response({'message': 'Hello', 'an_apiview': an_apiview}) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> serializer = serializers.HelloSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> name = serializer.data.get('name') <NEW_LINE> message = 'Hello {}'.format(name) <NEW_LINE> return Response({'message': message}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> <DEDENT> def put(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method: put'}) <NEW_LINE> <DEDENT> def patch(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method: patch'}) <NEW_LINE> <DEDENT> def delete(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method: delete'})
Test API View
625990388e05c05ec3f6f71c
class TestEnums(unittest.TestCase): <NEW_LINE> <INDENT> def test_core_types(self): <NEW_LINE> <INDENT> self.assertEqual(len(CoreTypes), 6) <NEW_LINE> self.assertEqual(CoreTypes.ENUM_PAIR_SPEC.sym, 'EnumPairSpec') <NEW_LINE> self.assertEqual(CoreTypes.PROTO_SPEC.sym, 'ProtoSpec') <NEW_LINE> self.assertEqual(len(CoreTypes), 6) <NEW_LINE> for _ in CoreTypes: <NEW_LINE> <INDENT> self.assertEqual(CoreTypes.from_sym(_.sym), _) <NEW_LINE> <DEDENT> <DEDENT> def test_quants(self): <NEW_LINE> <INDENT> self.assertEqual(len(Quants), 4) <NEW_LINE> self.assertEqual(Quants.REQUIRED.sym, '') <NEW_LINE> self.assertEqual(Quants.OPTIONAL.sym, '?') <NEW_LINE> self.assertEqual(Quants.STAR.sym, '*') <NEW_LINE> self.assertEqual(Quants.PLUS.sym, '+') <NEW_LINE> self.assertEqual(len(Quants), 4)
Test enumerations defined in fieldz package.
6259903873bcbd0ca4bcb40b
class TwitterJSHelpers(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def context(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> c = toolkit.c.pylons.__dict__ <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> c = dict(toolkit.c.pylons) <NEW_LINE> <DEDENT> return c <NEW_LINE> <DEDENT> def _get_package(self, package_name_or_id): <NEW_LINE> <INDENT> return get_action('package_show')(self.context, { 'id': package_name_or_id }) <NEW_LINE> <DEDENT> def _is_new(self, package_id): <NEW_LINE> <INDENT> revisions = get_action('package_activity_list')(self.context, { 'id': package_id }) <NEW_LINE> return len(revisions) <= 3 <NEW_LINE> <DEDENT> def tweet_ready(self, package_id): <NEW_LINE> <INDENT> in_session = session.pop('twitter_is_suitable', '') == package_id <NEW_LINE> return in_session <NEW_LINE> <DEDENT> def get_tweet(self, package_id): <NEW_LINE> <INDENT> return twitter_parsers.generate_tweet(self.context, package_id, self._is_new(package_id))
A class defining various methods to pass into the templates as helpers.
62599038e76e3b2f99fd9b8f
class AutoEstimator(EntropyEstimator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.estimator = None <NEW_LINE> self.k = None <NEW_LINE> <DEDENT> def guess(self, nk, k=None, zk=None): <NEW_LINE> <INDENT> if k is not None: <NEW_LINE> <INDENT> self.k = k <NEW_LINE> self.estimator = Nsb() <NEW_LINE> return <NEW_LINE> <DEDENT> if zk is None: <NEW_LINE> <INDENT> counts = CountsDistribution().fit(nk) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> counts = CountsDistribution(nk=nk, zk=zk) <NEW_LINE> <DEDENT> if not counts.coincidences: <NEW_LINE> <INDENT> logging.warning( 'Insufficient data (no coincidences found in counts). ' 'Return plugin estimate.') <NEW_LINE> self.k = None <NEW_LINE> self.estimator = Plugin() <NEW_LINE> return <NEW_LINE> <DEDENT> if counts.sampling_ratio < 0.1: <NEW_LINE> <INDENT> self.k = None <NEW_LINE> self.estimator = AsymptoticNsb() <NEW_LINE> return <NEW_LINE> <DEDENT> self.k = guess_alphabet_size(nk=nk, zk=zk) <NEW_LINE> self.estimator = Nsb() <NEW_LINE> <DEDENT> @fit_function <NEW_LINE> def fit(self, nk, k=None, zk=None): <NEW_LINE> <INDENT> self.guess(nk=nk, k=k, zk=zk) <NEW_LINE> self.estimator.fit(nk=nk, k=self.k, zk=zk) <NEW_LINE> self.estimate_ = self.estimator.estimate_ <NEW_LINE> self.err_ = self.estimator.err_ <NEW_LINE> return self
Select the best estimator for the input data.
62599038796e427e5384f8ff
class TestWantedBuilder(unittest.TestCase): <NEW_LINE> <INDENT> @patch('mozci.platforms.fetch_allthethings_data') <NEW_LINE> def test_pgo(self, fetch_allthethings_data): <NEW_LINE> <INDENT> fetch_allthethings_data.return_value = MOCK_ALLTHETHINGS <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-central pgo test mochitest-1'), False) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-aurora pgo test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-inbound pgo test mochitest-1'), False) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-beta pgo test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-release pgo test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-esr38 pgo test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-esr45 pgo test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 repo pgo test mochitest-1'), False) <NEW_LINE> with pytest.raises(MozciError): <NEW_LINE> <INDENT> _wanted_builder('Platform1 non-existent-repo1 pgo test mochitest-1') <NEW_LINE> <DEDENT> <DEDENT> @patch('mozci.platforms.fetch_allthethings_data') <NEW_LINE> def test_opt(self, fetch_allthethings_data): <NEW_LINE> <INDENT> fetch_allthethings_data.return_value = MOCK_ALLTHETHINGS <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-central opt test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-aurora opt test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-inbound opt test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-beta opt test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-release opt test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-esr38 opt test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 mozilla-esr45 opt test mochitest-1'), True) <NEW_LINE> self.assertEquals( _wanted_builder('Platform1 repo opt test mochitest-1'), True) <NEW_LINE> with pytest.raises(MozciError): <NEW_LINE> <INDENT> _wanted_builder('Platform1 non-existent-repo2 opt test mochitest-1')
Test _wanted_builder with mock data.
62599038baa26c4b54d5042b
class MSDClassifier(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, num_classes): <NEW_LINE> <INDENT> super(MSDClassifier, self).__init__() <NEW_LINE> self.features = nn.Sequential() <NEW_LINE> self.features.add_module("conv1", conv3x3_block( in_channels=in_channels, out_channels=in_channels, stride=2)) <NEW_LINE> self.features.add_module("conv2", conv3x3_block( in_channels=in_channels, out_channels=in_channels, stride=2)) <NEW_LINE> self.features.add_module("pool", nn.AvgPool2d( kernel_size=2, stride=2)) <NEW_LINE> self.output = nn.Linear( in_features=in_channels, out_features=num_classes) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.features(x) <NEW_LINE> x = x.view(x.size(0), -1) <NEW_LINE> x = self.output(x) <NEW_LINE> return x
MSDNet classifier. Parameters: ---------- in_channels : int Number of input channels. num_classes : int Number of classification classes.
625990381d351010ab8f4c9d
class ItemAssigned(TimestampedVersionedEntity.Event): <NEW_LINE> <INDENT> def __init__(self, item, index, *args, **kwargs): <NEW_LINE> <INDENT> kwargs['item'] = item <NEW_LINE> super(ItemAssigned, self).__init__(originator_version=index, *args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def item(self): <NEW_LINE> <INDENT> return self.__dict__['item'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def index(self): <NEW_LINE> <INDENT> return self.originator_version
Occurs when an item is set at a position in an array.
62599038be383301e0254999
class OpenInvoice(Wizard): <NEW_LINE> <INDENT> __name__ = 'project.open_invoice' <NEW_LINE> start_state = 'open_' <NEW_LINE> open_ = StateAction('account_invoice.act_invoice_form') <NEW_LINE> def do_open_(self, action): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> Work = pool.get('project.work') <NEW_LINE> works = Work.search([ ('parent', 'child_of', Transaction().context['active_ids']), ]) <NEW_LINE> invoice_ids = set() <NEW_LINE> for work in works: <NEW_LINE> <INDENT> if work.invoice_line and work.invoice_line.invoice: <NEW_LINE> <INDENT> invoice_ids.add(work.invoice_line.invoice.id) <NEW_LINE> <DEDENT> for twork in work.timesheet_works: <NEW_LINE> <INDENT> for timesheet_line in twork.timesheet_lines: <NEW_LINE> <INDENT> if (timesheet_line.invoice_line and timesheet_line.invoice_line.invoice): <NEW_LINE> <INDENT> invoice_ids.add(timesheet_line.invoice_line.invoice.id) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if work.invoiced_progress: <NEW_LINE> <INDENT> for progress in work.invoiced_progress: <NEW_LINE> <INDENT> invoice_ids.add(progress.invoice_line.invoice.id) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> encoder = PYSONEncoder() <NEW_LINE> action['pyson_domain'] = encoder.encode( [('id', 'in', list(invoice_ids))]) <NEW_LINE> action['pyson_search_value'] = encoder.encode([]) <NEW_LINE> return action, {}
Open Invoice
6259903826238365f5fadcd8
class SizedParent: <NEW_LINE> <INDENT> def AddChild(self, child): <NEW_LINE> <INDENT> sizer = self.GetSizer() <NEW_LINE> nolog = wx.LogNull() <NEW_LINE> item = sizer.Add(child) <NEW_LINE> del nolog <NEW_LINE> item.SetUserData({"HGrow":0, "VGrow":0}) <NEW_LINE> child.SetDefaultSizerProps() <NEW_LINE> <DEDENT> def GetSizerType(self): <NEW_LINE> <INDENT> return self.sizerType <NEW_LINE> <DEDENT> def SetSizerType(self, type, options={}): <NEW_LINE> <INDENT> sizer = None <NEW_LINE> self.sizerType = type <NEW_LINE> if type == "horizontal": <NEW_LINE> <INDENT> sizer = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> <DEDENT> elif type == "vertical": <NEW_LINE> <INDENT> sizer = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> <DEDENT> elif type == "form": <NEW_LINE> <INDENT> sizer = wx.FlexGridSizer(0, 2, 0, 0) <NEW_LINE> <DEDENT> elif type == "table": <NEW_LINE> <INDENT> rows = cols = 0 <NEW_LINE> if 'rows' in options: <NEW_LINE> <INDENT> rows = int(options['rows']) <NEW_LINE> <DEDENT> if 'cols' in options: <NEW_LINE> <INDENT> cols = int(options['cols']) <NEW_LINE> <DEDENT> sizer = TableSizer(rows, cols) <NEW_LINE> <DEDENT> elif type == "grid": <NEW_LINE> <INDENT> sizer = wx.FlexGridSizer(0, 0, 0, 0) <NEW_LINE> if 'rows' in options: <NEW_LINE> <INDENT> sizer.SetRows(int(options['rows'])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sizer.SetRows(0) <NEW_LINE> <DEDENT> if 'cols' in options: <NEW_LINE> <INDENT> sizer.SetCols(int(options['cols'])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sizer.SetCols(0) <NEW_LINE> <DEDENT> if 'growable_row' in options: <NEW_LINE> <INDENT> row, proportion = options['growable_row'] <NEW_LINE> sizer.SetGrowableRow(row, proportion) <NEW_LINE> <DEDENT> if 'growable_col' in options: <NEW_LINE> <INDENT> col, proportion = options['growable_col'] <NEW_LINE> sizer.SetGrowableCol(col, proportion) <NEW_LINE> <DEDENT> if 'hgap' in options: <NEW_LINE> <INDENT> sizer.SetHGap(options['hgap']) <NEW_LINE> <DEDENT> if 'vgap' in options: <NEW_LINE> <INDENT> sizer.SetVGap(options['vgap']) <NEW_LINE> <DEDENT> <DEDENT> if sizer: <NEW_LINE> <INDENT> self._SetNewSizer(sizer) <NEW_LINE> <DEDENT> <DEDENT> def _DetachFromSizer(self, sizer): <NEW_LINE> <INDENT> props = {} <NEW_LINE> for child in self.GetChildren(): <NEW_LINE> <INDENT> csp = child.GetSizerProps() <NEW_LINE> if csp is not None: <NEW_LINE> <INDENT> props[child.GetId()] = csp <NEW_LINE> self.GetSizer().Detach(child) <NEW_LINE> <DEDENT> <DEDENT> return props <NEW_LINE> <DEDENT> def _AddToNewSizer(self, sizer, props): <NEW_LINE> <INDENT> for child in self.GetChildren(): <NEW_LINE> <INDENT> csp = props.get(child.GetId(), None) <NEW_LINE> if csp is not None: <NEW_LINE> <INDENT> self.GetSizer().Add(child) <NEW_LINE> child.SetSizerProps(csp)
Mixin class for some methods used by the ``Sized*`` classes.
625990388a349b6b436873c5
class UserViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all().order_by('-date_joined') <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> permission_classes = [permissions.IsAuthenticatedOrReadOnly]
API endpoint that allows users to be viewed or edited.
6259903894891a1f408b9fb9
class PersonDetailView(LinksMixin, SeriesMixin, DetailView): <NEW_LINE> <INDENT> model = Person <NEW_LINE> def get_links(self): <NEW_LINE> <INDENT> links = super().get_links() <NEW_LINE> if self.series: <NEW_LINE> <INDENT> links.append(Link(href=self.series.get_absolute_url(), rel="feed")) <NEW_LINE> <DEDENT> return links
Information about a person (only allowed if that person has a slug).
6259903850485f2cf55dc103
class Proxy(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ocupied = 'No' <NEW_LINE> self.worker = None <NEW_LINE> <DEDENT> def work(self): <NEW_LINE> <INDENT> print('Checking if worker is available') <NEW_LINE> if self.ocupied == 'No': <NEW_LINE> <INDENT> self.worker = Worker() <NEW_LINE> time.sleep(2) <NEW_LINE> self.ocupied = 'Yes' <NEW_LINE> self.worker.newWork() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(2) <NEW_LINE> print("Worker is Busy")
docstring for Proxy
62599038cad5886f8bdc593e
class WaveWriteCallback(SpeechCallback): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.file = None <NEW_LINE> self.filename = 'test.wav' <NEW_LINE> <DEDENT> def set(self, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.close() <NEW_LINE> <DEDENT> def _open(self): <NEW_LINE> <INDENT> if self.file: <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> <DEDENT> self.file = wave.open(self.filename, 'wb') <NEW_LINE> self.file.setnchannels(1) <NEW_LINE> self.file.setsampwidth(self.sample_size) <NEW_LINE> self.file.setframerate(self._sample_rate) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.file: <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> self.file = None <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, samples, count, user_data): <NEW_LINE> <INDENT> if not self.file: <NEW_LINE> <INDENT> self._open() <NEW_LINE> <DEDENT> self.file.writeframes(string_at(samples, count * self.sample_size)) <NEW_LINE> return True
Callback that writes sound to wave file.
625990389b70327d1c57ff0a
class PredictionSetBase: <NEW_LINE> <INDENT> def __init__(self, definition, path, cutoff_point, prediction_start_day, prediction_interval): <NEW_LINE> <INDENT> self.definition = definition <NEW_LINE> self.path = path <NEW_LINE> self.cutoff_point = cutoff_point <NEW_LINE> self.prediction_start_day = prediction_start_day <NEW_LINE> self.prediction_interval = prediction_interval <NEW_LINE> self.data_manager = DataManager(self.path) <NEW_LINE> <DEDENT> def get_labels(self): <NEW_LINE> <INDENT> temp_labels = self.data_manager.get_data_for_variable(self.definition) <NEW_LINE> labels = np.zeros((temp_labels.shape[0], 1)) <NEW_LINE> for i in range(len(temp_labels)): <NEW_LINE> <INDENT> labels[i, 0] = int(np.any( temp_labels[ i, self.cutoff_point + self.prediction_start_day: self.cutoff_point + self.prediction_start_day + self.prediction_interval ])) <NEW_LINE> <DEDENT> return labels <NEW_LINE> <DEDENT> def get_features(self): <NEW_LINE> <INDENT> raise NotImplementedError("Use a subclass")
A class that receives as input the processed variables and the definition that you want classification for and returns the data either in the correct format for prediction (either using a fixed cut-off point or a sliding window approach. It also sets a prediction interval in the future where you will do classification for
6259903891af0d3eaad3afb6
@admin.register(models.NamesCd) <NEW_LINE> class NamesCdAdmin(BaseAdmin): <NEW_LINE> <INDENT> list_display = ("namid", "namf", "naml",)
Custom admin for the NamesCd model.
6259903807d97122c4217e21
class GtkSettingPresenterContainer(settings.SettingPresenterContainer): <NEW_LINE> <INDENT> def _gui_on_element_value_change(self, widget, presenter, *args): <NEW_LINE> <INDENT> self._on_element_value_change(presenter) <NEW_LINE> <DEDENT> def _gui_on_element_value_change_streamline(self, widget, presenter, *args): <NEW_LINE> <INDENT> self._on_element_value_change_streamline(presenter)
This class is used to group `SettingPresenter` objects in a GTK environment.
625990381f5feb6acb163d76
class CreateUserView(generics.CreateAPIView): <NEW_LINE> <INDENT> model = get_user_model() <NEW_LINE> permission_classes = [permissions.AllowAny, ] <NEW_LINE> serializer_class = UserSerializer
Create a user.
6259903866673b3332c31579
class Status(Enum): <NEW_LINE> <INDENT> enabled = 'E' <NEW_LINE> disabled = 'D'
目前考虑两个状态, 默认为启用状态。
6259903873bcbd0ca4bcb40d
@register('Jupyter.HTML') <NEW_LINE> class HTML(_String): <NEW_LINE> <INDENT> _view_name = Unicode('HTMLView').tag(sync=True) <NEW_LINE> _model_name = Unicode('HTMLModel').tag(sync=True)
Renders the string `value` as HTML.
62599038711fe17d825e155e
class TestMeasureDiskUtil: <NEW_LINE> <INDENT> def test_measure_diskutil(self, mock_logger, mock_global_config): <NEW_LINE> <INDENT> MetadataExtractor.measure_diskutil() <NEW_LINE> MetadataExtractor.logging.info.assert_called_once()
Tests for MetadataExtractor.measure_diskutil
62599038a4f1c619b294f749
class Console(Immutable): <NEW_LINE> <INDENT> def print(self, msg: str = '') -> Success[None]: <NEW_LINE> <INDENT> return purify_io_bound(print)(msg) <NEW_LINE> <DEDENT> def input(self, prompt: str = '') -> Success[str]: <NEW_LINE> <INDENT> return purify_io_bound(input)(prompt)
Module that enables printing to stdout and reading from stdin
6259903830dc7b76659a09b7
class AccountError(Exception): <NEW_LINE> <INDENT> pass
Raised when the API can't locate any accounts for the user
6259903873bcbd0ca4bcb40e
class ConvModule(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, bias=False, norm_layer=None, activation='relu', inplace=True): <NEW_LINE> <INDENT> super(ConvModule, self).__init__() <NEW_LINE> assert norm_layer is None or norm_layer == 'bn_2d' or norm_layer == 'sync_bn' <NEW_LINE> self.activation = activation <NEW_LINE> conv = nn.Conv2d( in_channels, out_channels, kernel_size, stride, padding, bias=bias) <NEW_LINE> self.add_module('0', conv) <NEW_LINE> norm = BatchNorm(out_channels) <NEW_LINE> self.add_module('1', norm) <NEW_LINE> self.with_activation = activation is not None <NEW_LINE> if self.with_activation: <NEW_LINE> <INDENT> if self.activation == 'relu': <NEW_LINE> <INDENT> activate = nn.ReLU(inplace=inplace) <NEW_LINE> self.add_module('2', activate) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self._modules['0'](x) <NEW_LINE> x = self._modules['1'](x) <NEW_LINE> if self.with_activation: <NEW_LINE> <INDENT> x = self._modules['2'](x) <NEW_LINE> <DEDENT> return x
A conv block that contains conv/norm/activation layers.
62599038287bf620b6272d6f
class GF: <NEW_LINE> <INDENT> def __init__(self, v): <NEW_LINE> <INDENT> if isinstance(v, GF): <NEW_LINE> <INDENT> self.v = v.v <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.v = int(v) <NEW_LINE> <DEDENT> assert 0 <= int(self.v) < 256 <NEW_LINE> <DEDENT> def __add__(self, o): <NEW_LINE> <INDENT> o = GF(o).v <NEW_LINE> return GF(self.v ^ o) <NEW_LINE> <DEDENT> def __sub__(self, o): <NEW_LINE> <INDENT> return self + o <NEW_LINE> <DEDENT> def __eq__(self, o): <NEW_LINE> <INDENT> return self.v == GF(o).v <NEW_LINE> <DEDENT> def __pow__(self, p): <NEW_LINE> <INDENT> if self == 0: <NEW_LINE> <INDENT> return GF(0) <NEW_LINE> <DEDENT> if int(p) == 1: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> i = LOG[self.v] <NEW_LINE> i = (i * int(p)) % 255 <NEW_LINE> return GF(ANTI_LOG[i]) <NEW_LINE> <DEDENT> def mul_slow(self, b): <NEW_LINE> <INDENT> a = self.v <NEW_LINE> b = GF(b).v <NEW_LINE> p = 0 <NEW_LINE> for i in range(8): <NEW_LINE> <INDENT> if b & 1: <NEW_LINE> <INDENT> p ^= a <NEW_LINE> <DEDENT> b >>= 1 <NEW_LINE> carry = bool(a & 0b1000_0000) <NEW_LINE> a = (a << 1) & 0xFF <NEW_LINE> if carry: <NEW_LINE> <INDENT> a ^= (IRREDUCIBLE_P & 0xFF) <NEW_LINE> <DEDENT> <DEDENT> return GF(p) <NEW_LINE> <DEDENT> def __mul__(self, b): <NEW_LINE> <INDENT> if isinstance(b, int): <NEW_LINE> <INDENT> if abs(b) % 2 == 0: <NEW_LINE> <INDENT> return GF(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> <DEDENT> if self == 0 or b == 0: <NEW_LINE> <INDENT> return GF(0) <NEW_LINE> <DEDENT> ia = LOG[self.v] <NEW_LINE> ib = LOG[GF(b).v] <NEW_LINE> return GF(ANTI_LOG[(ia + ib) % 255]) <NEW_LINE> <DEDENT> def __truediv__(self, b): <NEW_LINE> <INDENT> assert b != 0 <NEW_LINE> if self == 0: <NEW_LINE> <INDENT> return GF(0) <NEW_LINE> <DEDENT> ia = LOG[self.v] <NEW_LINE> ib = LOG[b.v] <NEW_LINE> i = ia - ib <NEW_LINE> if i < 0: <NEW_LINE> <INDENT> i += 255 <NEW_LINE> <DEDENT> return GF(ANTI_LOG[i]) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"GF({self.v})"
Representation of an element from GF(256).
6259903830c21e258be99993
class ControlOptionsCommon(ClientOptionsBase): <NEW_LINE> <INDENT> def __init__(self, count: int=1, timeout: int=None, sync_mode: str=None, close_sleep: int=None): <NEW_LINE> <INDENT> self.count = count <NEW_LINE> self.timeout = timeout <NEW_LINE> self.sync_mode = sync_mode <NEW_LINE> self.close_sleep = close_sleep <NEW_LINE> <DEDENT> def valid_options(self) -> list: <NEW_LINE> <INDENT> return [ Prefixed('count', '--count'), Prefixed('timeout', '--timeout'), Prefixed('sync-mode', '--sync-mode'), Prefixed('close-sleep', '--close-sleep') ]
Common control options for all clients.
62599038cad5886f8bdc593f
class Mesh_Hub( Hub, Mesh_Node ): <NEW_LINE> <INDENT> def __init__( self, address, children, **args ): <NEW_LINE> <INDENT> Hub.__init__( self, address, children, **args ) <NEW_LINE> Mesh_Node.__init__( self, address )
hub with mesh to draw itself in opengl
625990389b70327d1c57ff0c
class LoadDialog(Toplevel): <NEW_LINE> <INDENT> def __init__(self, master, load_message = 'Loading', maxDots = 6): <NEW_LINE> <INDENT> assert isinstance(load_message, str) and isinstance(maxDots, int) and maxDots > 0 <NEW_LINE> Toplevel.__init__(self, master) <NEW_LINE> self.transient(master) <NEW_LINE> self.geometry(f"+{master.winfo_rootx() + 50}+{master.winfo_rooty() + 50}") <NEW_LINE> self.title('Load Dialog') <NEW_LINE> self.load_message = load_message <NEW_LINE> self.maxDots = maxDots <NEW_LINE> self.label = Label(self, text = load_message, width = 20) <NEW_LINE> self.label.pack() <NEW_LINE> self.protocol('WM_DELETE_WINDOW', self.dontClose) <NEW_LINE> self.isClose = False <NEW_LINE> threading.Thread(target = self.update).start() <NEW_LINE> <DEDENT> def setCloseEvent(self, target, args = ()): <NEW_LINE> <INDENT> threading.Thread(target = self.__task, args = (target, args)).start() <NEW_LINE> <DEDENT> def __task(self, target, args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> target(*args) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> <DEDENT> def dontClose(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> dots = 0 <NEW_LINE> while not self.isClose: <NEW_LINE> <INDENT> dots = (dots + 1) % (self.maxDots + 1) <NEW_LINE> time.sleep(0.5) <NEW_LINE> self.label['text'] = self.load_message + dots * '.' <NEW_LINE> <DEDENT> self.destroy() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.isClose = True
A simple LoadDialog
62599038d4950a0f3b111702
class LoadRawTestCase(tests.BaseTestCase): <NEW_LINE> <INDENT> def test(self): <NEW_LINE> <INDENT> expected = { 'Accessibility.Accessibility_Information.Cursor_Magnification': 'Off', 'Applications.Installer.Version': '9.0.11', 'Applications.Console.Version': '10.9', 'Locations.Automatic.Active_Location': 'Yes', 'Locations.Automatic.Services.Bluetooth_DUN.Type': 'PPP', 'Locations.Automatic.Services.USB_Ethernet.Type': 'Ethernet', 'Locations.Automatic.Services.USB_Ethernet.Hardware_(MAC)_Address': 'aa:aa:aa:aa:aa:aa'} <NEW_LINE> with open('tests/input.txt', 'r') as f: <NEW_LINE> <INDENT> result = kerfi.load_raw(f) <NEW_LINE> <DEDENT> self.logger.debug('result = %s', result) <NEW_LINE> for key, value in expected.iteritems(): <NEW_LINE> <INDENT> self.logger.debug('checking %s', key) <NEW_LINE> self.assertTrue(key in result) <NEW_LINE> self.logger.debug('%s found.', key) <NEW_LINE> self.assertEquals(value, result[key]) <NEW_LINE> self.logger.debug('%s value is equal.', key)
Test cases for kerfi.load_raw() function.
6259903891af0d3eaad3afb8
class NotificationContentViewSet(OwnerMessageViewSetMixin, CommonViewSet): <NEW_LINE> <INDENT> queryset = NotificationContent.objects.all() <NEW_LINE> serializer_class = serializers.NotificationContentSerializer <NEW_LINE> permission_classes = [permissions.DjangoModelPermissions] <NEW_LINE> filter_fields = ['title', 'contents', 'status', 'creator'] <NEW_LINE> search_fields = ['title', 'contents', 'status', 'creator']
api views for NotificationContent
62599038ec188e330fdf9a1e
class SelfPixelwiseNLLLoss(nn.Module): <NEW_LINE> <INDENT> def forward(self, o, δy=0, δx=0, global_best=False): <NEW_LINE> <INDENT> N, C, Y, X = o.shape <NEW_LINE> assert N==1 <NEW_LINE> if global_best: <NEW_LINE> <INDENT> score_00 = torch.sum(o[0, 0, ::2, ::2] + o[0, 1, ::2, 1::2] + o[0, 2, 1::2, ::2] + o[0, 3, 1::2, 1::2]).item() <NEW_LINE> score_01 = torch.sum(o[0, 0, ::2, 1::2] + o[0, 1, ::2, ::2] + o[0, 2, 1::2, 1::2] + o[0, 3, 1::2, ::2]).item() <NEW_LINE> score_10 = torch.sum(o[0, 0, 1::2, ::2] + o[0, 1, 1::2, 1::2] + o[0, 2, ::2, ::2] + o[0, 3, ::2, 1::2]).item() <NEW_LINE> score_11 = torch.sum(o[0, 0, 1::2, 1::2] + o[0, 1, 1::2, ::2] + o[0, 2, ::2, 1::2] + o[0, 3, ::2, ::2]).item() <NEW_LINE> best = np.argmax((score_00, score_01, score_10, score_11)) <NEW_LINE> δy = best//2 <NEW_LINE> δx = best%2 <NEW_LINE> <DEDENT> target = torch.zeros((N, Y, X), dtype=torch.long) <NEW_LINE> target[:, δy::2, 1-δx::2] = 1 <NEW_LINE> target[:, 1-δy::2, δx::2] = 2 <NEW_LINE> target[:, 1-δy::2, 1-δx::2] = 3 <NEW_LINE> target = target.cuda() <NEW_LINE> loss = F.nll_loss(o, target) <NEW_LINE> return loss
Modified version of nn.NLLLoss, for pixelwise auxiliary training.
62599038e76e3b2f99fd9b93
class ExponentialFamily(object): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> if not name or name[-1] != '/': <NEW_LINE> <INDENT> with tf.name_scope(name or type(self).__name__) as name: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_canonical(self): <NEW_LINE> <INDENT> return self._is_canonical <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def _call(self, predicted_linear_response): <NEW_LINE> <INDENT> raise NotImplementedError('`_call` is not implemented.') <NEW_LINE> <DEDENT> def __call__(self, predicted_linear_response, name=None): <NEW_LINE> <INDENT> with self._name_scope(name, 'call', [predicted_linear_response]): <NEW_LINE> <INDENT> predicted_linear_response = tf.convert_to_tensor( predicted_linear_response, name='predicted_linear_response') <NEW_LINE> return self._call(predicted_linear_response) <NEW_LINE> <DEDENT> <DEDENT> def _log_prob(self, response, predicted_linear_response): <NEW_LINE> <INDENT> raise NotImplementedError('`_log_prob` is not implemented.') <NEW_LINE> <DEDENT> def log_prob(self, response, predicted_linear_response, name=None): <NEW_LINE> <INDENT> with self._name_scope( name, 'log_prob', [response, predicted_linear_response]): <NEW_LINE> <INDENT> dtype = common_dtype([response, predicted_linear_response]) <NEW_LINE> response = tf.convert_to_tensor( response, dtype=dtype, name='response') <NEW_LINE> predicted_linear_response = tf.convert_to_tensor( predicted_linear_response, name='predicted_linear_response') <NEW_LINE> return self._log_prob(response, predicted_linear_response) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'tfp.glm.family.{type_name}(\'{self_name}\')'.format( type_name=type(self).__name__, self_name=self.name) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<tfp.glm.family.{type_name} \'{self_name}\'>'.format( type_name=type(self).__name__, self_name=self.name) <NEW_LINE> <DEDENT> @contextlib.contextmanager <NEW_LINE> def _name_scope(self, name=None, default_name=None, values=None): <NEW_LINE> <INDENT> with tf.name_scope(self.name): <NEW_LINE> <INDENT> with tf.name_scope(name, default_name, values=values or []) as scope: <NEW_LINE> <INDENT> yield scope
Specifies a mean-value parameterized exponential family. Subclasses implement [exponential-family distribution]( https://en.wikipedia.org/wiki/Exponential_family) properties (e.g., `log_prob`, `variance`) as a function of a real-value which is transformed via some [link function]( https://en.wikipedia.org/wiki/Generalized_linear_model#Link_function) to be interpreted as the distribution's mean. The distribution is parameterized by this mean, i.e., "mean-value parametrized." Subclasses are typically used to specify a Generalized Linear Model (GLM). A [GLM]( https://en.wikipedia.org/wiki/Generalized_linear_model) is a generalization of linear regression which enables efficient fitting of log-likelihood losses beyond just assuming `Normal` noise. See `tfp.glm.fit` for more details. Subclasses must implement `_call`, `_log_prob`, and `_is_canonical`. In context of `tfp.glm.fit`, these functions are used to find the best fitting weights for given model matrix ("X") and responses ("Y").
6259903830c21e258be99994
@BlClassRegistry() <NEW_LINE> class MUV_MT_CopyPasteUV_SelSeqCopyUV(bpy.types.Menu): <NEW_LINE> <INDENT> bl_idname = "MUV_MT_CopyPasteUV_SelSeqCopyUV" <NEW_LINE> bl_label = "Copy UV (Selection Sequence) (Menu)" <NEW_LINE> bl_description = "Menu of Copy UV coordinate by selection sequence" <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return _is_valid_context(context) <NEW_LINE> <DEDENT> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> obj = context.active_object <NEW_LINE> bm = common.create_bmesh(obj) <NEW_LINE> uv_maps = bm.loops.layers.uv.keys() <NEW_LINE> ops = layout.operator(MUV_OT_CopyPasteUV_SelSeqCopyUV.bl_idname, text="[Default]") <NEW_LINE> ops.uv_map = "__default" <NEW_LINE> ops = layout.operator(MUV_OT_CopyPasteUV_SelSeqCopyUV.bl_idname, text="[All]") <NEW_LINE> ops.uv_map = "__all" <NEW_LINE> for m in uv_maps: <NEW_LINE> <INDENT> ops = layout.operator(MUV_OT_CopyPasteUV_SelSeqCopyUV.bl_idname, text=m) <NEW_LINE> ops.uv_map = m
Menu class: Copy UV coordinate by selection sequence
62599038b830903b9686ed3c
class Error(Exception): <NEW_LINE> <INDENT> def __init__(self, code=None, msg=None): <NEW_LINE> <INDENT> self.code = code or httplib.INTERNAL_SERVER_ERROR <NEW_LINE> self.msg = msg or httplib.responses[self.code] <NEW_LINE> Exception.__init__(self, self.msg) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr([self.code, self.msg])
Base Exception for api/handlers.
62599038507cdc57c63a5f21
class ShufflePipeline(pipeline_base.PipelineBase): <NEW_LINE> <INDENT> def run(self, job_name, mapper_params, filenames, shards=None): <NEW_LINE> <INDENT> bucket_name = mapper_params["bucket_name"] <NEW_LINE> hashed_files = yield _HashPipeline(job_name, bucket_name, filenames, shards=shards) <NEW_LINE> sorted_files = yield _SortChunksPipeline(job_name, bucket_name, hashed_files) <NEW_LINE> temp_files = [hashed_files, sorted_files] <NEW_LINE> merged_files = yield _MergePipeline(job_name, bucket_name, sorted_files) <NEW_LINE> with pipeline.After(merged_files): <NEW_LINE> <INDENT> all_temp_files = yield pipeline_common.Extend(*temp_files) <NEW_LINE> yield _GCSCleanupPipeline(all_temp_files) <NEW_LINE> <DEDENT> yield pipeline_common.Return(merged_files)
A pipeline to shuffle multiple key-value files. Args: job_name: The descriptive name of the overall job. mapper_params: parameters to use for mapper phase. filenames: list of file names to sort. Files have to be of records format defined by Files API and contain serialized kv_pb.KeyValue protocol messages. The filenames may or may not contain the GCS bucket name in their path. shards: Optional. Number of output shards to generate. Defaults to the number of input files. Returns: default: a list of filenames as string. Resulting files contain serialized kv_pb.KeyValues protocol messages with all values collated to a single key. When there is no output, an empty list from shuffle service or a list of empty files from in memory shuffler.
62599038a4f1c619b294f74a
class PerMessageBzip2(PerMessageCompress, PerMessageBzip2Mixin): <NEW_LINE> <INDENT> DEFAULT_COMPRESS_LEVEL = 9 <NEW_LINE> @classmethod <NEW_LINE> def createFromResponseAccept(Klass, isServer, accept): <NEW_LINE> <INDENT> pmce = Klass(isServer, accept.response.server_max_compress_level, accept.compressLevel if accept.compressLevel is not None else accept.response.client_max_compress_level) <NEW_LINE> return pmce <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def createFromOfferAccept(Klass, isServer, accept): <NEW_LINE> <INDENT> pmce = Klass(isServer, accept.compressLevel if accept.compressLevel is not None else accept.offer.requestMaxCompressLevel, accept.requestMaxCompressLevel) <NEW_LINE> return pmce <NEW_LINE> <DEDENT> def __init__(self, isServer, server_max_compress_level, client_max_compress_level): <NEW_LINE> <INDENT> self._isServer = isServer <NEW_LINE> self._compressor = None <NEW_LINE> self._decompressor = None <NEW_LINE> self.server_max_compress_level = server_max_compress_level if server_max_compress_level != 0 else self.DEFAULT_COMPRESS_LEVEL <NEW_LINE> self.client_max_compress_level = client_max_compress_level if client_max_compress_level != 0 else self.DEFAULT_COMPRESS_LEVEL <NEW_LINE> <DEDENT> def __json__(self): <NEW_LINE> <INDENT> return {'extension': self.EXTENSION_NAME, 'isServer': self._isServer, 'server_max_compress_level': self.server_max_compress_level, 'client_max_compress_level': self.client_max_compress_level} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "PerMessageBzip2(isServer = %s, server_max_compress_level = %s, client_max_compress_level = %s)" % (self._isServer, self.server_max_compress_level, self.client_max_compress_level) <NEW_LINE> <DEDENT> def startCompressMessage(self): <NEW_LINE> <INDENT> if self._isServer: <NEW_LINE> <INDENT> if self._compressor is None: <NEW_LINE> <INDENT> self._compressor = bz2.BZ2Compressor(self.server_max_compress_level) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._compressor is None: <NEW_LINE> <INDENT> self._compressor = bz2.BZ2Compressor(self.client_max_compress_level) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def compressMessageData(self, data): <NEW_LINE> <INDENT> return self._compressor.compress(data) <NEW_LINE> <DEDENT> def endCompressMessage(self): <NEW_LINE> <INDENT> data = self._compressor.flush() <NEW_LINE> self._compressor = None <NEW_LINE> return data <NEW_LINE> <DEDENT> def startDecompressMessage(self): <NEW_LINE> <INDENT> if self._decompressor is None: <NEW_LINE> <INDENT> self._decompressor = bz2.BZ2Decompressor() <NEW_LINE> <DEDENT> <DEDENT> def decompressMessageData(self, data): <NEW_LINE> <INDENT> return self._decompressor.decompress(data) <NEW_LINE> <DEDENT> def endDecompressMessage(self): <NEW_LINE> <INDENT> self._decompressor = None
`permessage-bzip2` WebSocket extension processor.
62599038be383301e025499d
class CalcMag(PhotCalcs): <NEW_LINE> <INDENT> def __init__(self, sed, filterDict, cosmoModel): <NEW_LINE> <INDENT> PhotCalcs.__init__(self, sed, filterDict) <NEW_LINE> self.cosmoModel = cosmoModel <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> str_msg ='\n CalcMag Object: \n' <NEW_LINE> str_msg += " Contains -\n" <NEW_LINE> str_msg += PhotCalcs.__str__(self) <NEW_LINE> str_msg += "\n Cosmological model: \n" <NEW_LINE> str_msg += " " + str(self.cosmoModel) <NEW_LINE> return str_msg <NEW_LINE> <DEDENT> def getMag(self, filtObs, z, absMag): <NEW_LINE> <INDENT> filtRF = absMag[0] <NEW_LINE> aM = absMag[1] <NEW_LINE> kc = self.kCorrectionXY(filtObs, filtRF, z) <NEW_LINE> self.cosmoModel.setEmissionRedShift(z) <NEW_LINE> dL = self.cosmoModel.LuminosityDistanceMpc() <NEW_LINE> mu = 0. <NEW_LINE> if (dL>1e-5): <NEW_LINE> <INDENT> mu = 5.*math.log10(dL) + 25. <NEW_LINE> <DEDENT> mag = aM + mu + kc <NEW_LINE> return mag
Calculate magnitudes for the given SED at redshift z, with absolute magnitude absMag in all of the filters in filterDict @param sed SED object (spectral energy distribution) @param filterDict dictionary of filters: keyword=filter filename without path or extension, value=Filter object @param cosmoModel cosmology calculator
6259903882261d6c52730788
class LeafPluggedInSensor(LeafEntity, BinarySensorEntity): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return f"{self.car.leaf.nickname} Plug Status" <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self.car.data[DATA_PLUGGED_IN] <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> if self.car.data[DATA_PLUGGED_IN]: <NEW_LINE> <INDENT> return "mdi:power-plug" <NEW_LINE> <DEDENT> return "mdi:power-plug-off"
Plugged In Sensor class.
625990386e29344779b017d9
class TestFornavWrapper(unittest.TestCase): <NEW_LINE> <INDENT> def test_fornav_swath_larger_float32(self): <NEW_LINE> <INDENT> from pyresample.ewa import fornav <NEW_LINE> swath_shape = (1600, 3200) <NEW_LINE> data_type = np.float32 <NEW_LINE> rows = np.empty(swath_shape, dtype=np.float32) <NEW_LINE> rows[:] = np.linspace(-500, 2500, 1600)[:, None] <NEW_LINE> cols = np.empty(swath_shape, dtype=np.float32) <NEW_LINE> cols[:] = np.linspace(-2500, 1500, 3200) <NEW_LINE> data = np.ones(swath_shape, dtype=data_type) <NEW_LINE> out = np.empty((1000, 1000), dtype=data_type) <NEW_LINE> area = None <NEW_LINE> grid_points_covered, out_res = fornav(cols, rows, area, data, rows_per_scan=16, out=out) <NEW_LINE> self.assertIs(out, out_res) <NEW_LINE> self.assertEqual(grid_points_covered, out.size, msg="Not all grid pixels were filled") <NEW_LINE> self.assertTrue(((out == 1) | np.isnan(out)).all(), msg="Unexpected interpolation values were returned")
Test the function wrapping the lower-level fornav code.
625990381d351010ab8f4ca3
class GPXandKMLExport_TheWayIWantItQGIS2DialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/GPXandKMLExport_TheWayIWantItQGIS2/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull())
Test rerources work.
62599038a8ecb033258723a8
class PygameSystem( PygameFpsLimitMixin, PygameAudioMixin, PygameDisplayMixin, PygameJoystickMixin, ): <NEW_LINE> <INDENT> pass
This mixin simply combines the Display, Joystick, FpsLimit, and Audio mixins to provide a minimally-functional, bare-bones interactive emulator.
62599038b57a9660fecd2c04
class itkCenteredAffineTransformD3(itkAffineTransformPython.itkAffineTransformD3): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> SpaceDimension = _itkCenteredAffineTransformPython.itkCenteredAffineTransformD3_SpaceDimension <NEW_LINE> ParametersDimension = _itkCenteredAffineTransformPython.itkCenteredAffineTransformD3_ParametersDimension <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkCenteredAffineTransformPython.itkCenteredAffineTransformD3___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def GetInverse(self, *args): <NEW_LINE> <INDENT> return _itkCenteredAffineTransformPython.itkCenteredAffineTransformD3_GetInverse(self, *args) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkCenteredAffineTransformPython.delete_itkCenteredAffineTransformD3 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkCenteredAffineTransformPython.itkCenteredAffineTransformD3_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkCenteredAffineTransformPython.itkCenteredAffineTransformD3_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkCenteredAffineTransformD3.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkCenteredAffineTransformD3 class
6259903876d4e153a661db37
class Square(Rectangle): <NEW_LINE> <INDENT> def __init__(self, size): <NEW_LINE> <INDENT> self.__size = size <NEW_LINE> <DEDENT> def integer_validator(self, size): <NEW_LINE> <INDENT> if type(self.__size) is not int: <NEW_LINE> <INDENT> raise TypeError("{} must be an integer".format(self.__size)) <NEW_LINE> <DEDENT> if value <= 0: <NEW_LINE> <INDENT> raise ValueError("{} must be greater than 0".format(self.__size)) <NEW_LINE> <DEDENT> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__size * self.__size <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return("[Square] {}/{}".format(self.__size, self.__size))
Class Square - Type - Rectangle
625990386fece00bbacccb35
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = util.Counter() <NEW_LINE> for p in self.legalPositions: self.beliefs[p] = 1.0 <NEW_LINE> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observe(self, observation, gameState): <NEW_LINE> <INDENT> noisyDistance = observation <NEW_LINE> emissionModel = busters.getObservationDistribution(noisyDistance) <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> for p in self.legalPositions: <NEW_LINE> <INDENT> trueDistance = util.manhattanDistance(p, pacmanPosition) <NEW_LINE> if emissionModel[trueDistance] > 0: <NEW_LINE> <INDENT> allPossible[p] = self.beliefs[p] * emissionModel[trueDistance] <NEW_LINE> <DEDENT> <DEDENT> allPossible.normalize() <NEW_LINE> self.beliefs = allPossible <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> "*** YOUR CODE HERE ***" <NEW_LINE> nBelief = util.Counter() <NEW_LINE> for oldPos in self.legalPositions: <NEW_LINE> <INDENT> newPosDist = self.getPositionDistribution(self.setGhostPosition(gameState, oldPos)) <NEW_LINE> for newPos, p in newPosDist.items(): <NEW_LINE> <INDENT> nBelief[newPos] += self.beliefs[oldPos]*p <NEW_LINE> <DEDENT> <DEDENT> self.beliefs = nBelief <NEW_LINE> self.beliefs.normalize() <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs
The exact dynamic inference module should use forward-algorithm updates to compute the exact belief function at each time step.
6259903891af0d3eaad3afbc
class TestDetailedPlugin: <NEW_LINE> <INDENT> def test_creates(self, detailed_plugin: DetailedPlugin): <NEW_LINE> <INDENT> assert detailed_plugin
Tests for the DetailedPlugin model.
6259903873bcbd0ca4bcb413
class SortingHelpFormatter(HelpFormatter): <NEW_LINE> <INDENT> def add_arguments(self, actions): <NEW_LINE> <INDENT> actions = sorted(actions, key=attrgetter('option_strings')) <NEW_LINE> super(SortingHelpFormatter, self).add_arguments(actions)
Sort options alphabetically when -h prints usage See http://stackoverflow.com/questions/12268602
62599038d6c5a102081e32b1
class Monochromator(base.Monochromator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Monochromator, self).__init__() <NEW_LINE> self._energy = 100 * q.keV <NEW_LINE> <DEDENT> async def _get_energy_real(self): <NEW_LINE> <INDENT> return self._energy <NEW_LINE> <DEDENT> async def _set_energy_real(self, energy): <NEW_LINE> <INDENT> self._energy = energy
Monochromator class implementation.
6259903871ff763f4b5e8924
class TimeCycle(PmmlBinding): <NEW_LINE> <INDENT> def toPFA(self, options, context): <NEW_LINE> <INDENT> raise NotImplementedError
Represents a <TimeCycle> tag and provides methods to convert to PFA.
62599038796e427e5384f907
class startup_binder_drone(bee.drone): <NEW_LINE> <INDENT> def on_start(self): <NEW_LINE> <INDENT> for entity_name in self.get_entity_names(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> hivemap_name = self.get_hivemap(entity_name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.ensure_hivemap_registered(hivemap_name) <NEW_LINE> self.launch_hive(hivemap_name, entity_name) <NEW_LINE> <DEDENT> <DEDENT> def ensure_hivemap_registered(self, hivemap_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.get_hive(hivemap_name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> hivemap_ = Spyder.Hivemap.fromfile(hivemap_name) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> print("Couldn't find hivemap {} to launch".format(hivemap_name)) <NEW_LINE> return <NEW_LINE> <DEDENT> wrapper_hive = type(hivemap_name, (hivemapinithive,), dict(hivemap=hivemap_)) <NEW_LINE> self.register_hive(hivemap_name, wrapper_hive) <NEW_LINE> <DEDENT> <DEDENT> def set_get_hivemap(self, get_hivemap): <NEW_LINE> <INDENT> self.get_hivemap = get_hivemap <NEW_LINE> <DEDENT> def set_get_entity_names(self, plugin): <NEW_LINE> <INDENT> self.get_entity_names = plugin <NEW_LINE> <DEDENT> def set_launch_hive(self, launch_hive): <NEW_LINE> <INDENT> self.launch_hive = launch_hive <NEW_LINE> <DEDENT> def set_register_hive(self, register_hive): <NEW_LINE> <INDENT> self.register_hive = register_hive <NEW_LINE> <DEDENT> def set_get_hive(self, get_hive): <NEW_LINE> <INDENT> self.get_hive = get_hive <NEW_LINE> <DEDENT> def place(self): <NEW_LINE> <INDENT> socket = socket_single_required(self.set_get_entity_names) <NEW_LINE> libcontext.socket(("entity", "names"), socket) <NEW_LINE> listener = plugin_single_required(("trigger", self.on_start, "start", 9)) <NEW_LINE> libcontext.plugin(("evin", "listener"), listener) <NEW_LINE> socket = socket_single_required(self.set_get_hivemap) <NEW_LINE> libcontext.socket(("entity", "hivemap"), socket) <NEW_LINE> socket = socket_single_required(self.set_launch_hive) <NEW_LINE> libcontext.socket(("process", "launch"), socket) <NEW_LINE> socket = socket_single_required(self.set_register_hive) <NEW_LINE> libcontext.socket("register_hive", socket) <NEW_LINE> socket = socket_single_required(self.set_get_hive) <NEW_LINE> libcontext.socket("get_hive", socket)
Provides plugins for scene-object binding on startup
625990381d351010ab8f4ca5