code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class InvalidObjectException(TException): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'message', None, None, ), ) <NEW_LINE> def __init__(self, message=None,): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.message = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('InvalidObjectException') <NEW_LINE> if self.message is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('message', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.message) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- message | 6259903c8e05c05ec3f6f764 |
class SchoolGraph(models.Model): <NEW_LINE> <INDENT> school = models.OneToOneField(School, primary_key=True, related_name='graph') <NEW_LINE> up_votes = models.IntegerField(default=0) <NEW_LINE> down_votes = models.IntegerField(default=0) <NEW_LINE> coders_count = models.IntegerField(default=0) <NEW_LINE> updated_on = models.DateTimeField(auto_now=True) <NEW_LINE> updated_by = models.CharField(max_length=100) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'app' <NEW_LINE> db_table = 'app_school_graph' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.school) <NEW_LINE> <DEDENT> def guru_level(self): <NEW_LINE> <INDENT> return int(stats.rating(self.up_votes, self.down_votes) * 100) | School graph of user activaties, Separate graph allows faster loading while updates runs on this table | 6259903c63b5f9789fe8637f |
class GetOrderBook(View): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get(request, base_currency, relative_currency): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pair = CurrencyPair.objects.get( base_currency__code__iexact=base_currency, relative_currency__code__iexact=relative_currency ) <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> return JsonResponse( {'success': False, 'message': {'pair': ['pair not found']}} ) <NEW_LINE> <DEDENT> bid_orders = Order.objects.exclude( state='cancelled' ).exclude( state='complete' ).filter( pair=pair, order_type='bid' ).order_by( 'price' ) <NEW_LINE> ask_orders = Order.objects.exclude( state='cancelled' ).exclude( state='complete' ).filter( pair=pair, order_type='ask' ).order_by( '-price' ) <NEW_LINE> return JsonResponse( { 'success': True, 'message': { 'bid_orders': [ { 'order_id': order.id, 'order_type': order.order_type, 'amount': order.amount, 'original_amount': order.original_amount, 'price': order.price, 'state': order.state, 'pair': '{}/{}'.format( order.pair.base_currency.code.lower(), order.pair.relative_currency.code.lower() ) } for order in bid_orders ], 'ask_orders': [ { 'order_id': order.id, 'order_type': order.order_type, 'amount': order.amount, 'original_amount': order.original_amount, 'price': order.price, 'state': order.state, 'pair': '{}/{}'.format( order.pair.base_currency.code.lower(), order.pair.relative_currency.code.lower() ) } for order in ask_orders ] } } ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def post(request): <NEW_LINE> <INDENT> return JsonResponse({'success': False, 'message': {'HTTP Method': ['Use GET']}}) | Show open and partial orders on the books for the supplied pair. | 6259903c004d5f362081f8ee |
class Element: <NEW_LINE> <INDENT> def setDegree(self, deg): <NEW_LINE> <INDENT> self.degree = deg <NEW_LINE> self.ndof = 1 <NEW_LINE> <DEDENT> def getBasisFunctions(self, x, y, bvals): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getBasisGradients(self, x, y, bgrvals): <NEW_LINE> <INDENT> pass | @brief Abstract class for a finite element
with basis functions defined on the reference element.
Members are
nnodel: number of nodes in the element
phynodes: locations of physical nodes | 6259903c26238365f5fadd6a |
class spinner(threading.Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> if os.name == 'posix': <NEW_LINE> <INDENT> self.chars = (unicodedata.lookup('FIGURE DASH'),'\\ ','| ','/ ') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.chars = ('-','\\ ','| ','/ ') <NEW_LINE> <DEDENT> self.running = True <NEW_LINE> self.out = sys.stdout <NEW_LINE> threading.Thread.__init__(self, None, None, 'spinner') <NEW_LINE> self.daemon = True <NEW_LINE> <DEDENT> def spin(self): <NEW_LINE> <INDENT> for x in self.chars: <NEW_LINE> <INDENT> self.string = x + '\r' <NEW_LINE> self.out.write(self.string) <NEW_LINE> self.out.flush() <NEW_LINE> time.sleep(0.05) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> while self.running: <NEW_LINE> <INDENT> self.spin() <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.running = False <NEW_LINE> time.sleep(0.2) <NEW_LINE> self.out.write(' ' + '\r') | Simple rotating spinner in the terminal. | 6259903cb830903b9686ed83 |
class AdbShell(object): <NEW_LINE> <INDENT> def __init__(self, process, logger): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> self.p = process <NEW_LINE> self.stdout_q = Queue() <NEW_LINE> self.stdout_stop = Event() <NEW_LINE> self.stdout_t = Thread(target=_enqueue_output, args=(self.p.stdout, self.stdout_q, self.stdout_stop, self.logger)) <NEW_LINE> self.stdout_t.daemon = True <NEW_LINE> self.stdout_t.start() <NEW_LINE> self.stderr_q = Queue() <NEW_LINE> self.stderr_stop = Event() <NEW_LINE> self.stderr_t = Thread(target=_enqueue_output, args=(self.p.stderr, self.stderr_q, self.stderr_stop, self.logger)) <NEW_LINE> self.stderr_t.daemon = True <NEW_LINE> self.stderr_t.start() <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> return self.p <NEW_LINE> <DEDENT> def isalive(self): <NEW_LINE> <INDENT> self.p.poll() <NEW_LINE> if self.p.returncode is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def _read(self, q): <NEW_LINE> <INDENT> out = [] <NEW_LINE> while 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tmp = q.get_nowait() <NEW_LINE> self.logger.debug("out: {!r}".format(tmp)) <NEW_LINE> <DEDENT> except Empty: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out.append(tmp) <NEW_LINE> <DEDENT> <DEDENT> return ''.join(out) <NEW_LINE> <DEDENT> def read_stdout(self): <NEW_LINE> <INDENT> self.logger.info("AdbShell stdout Read") <NEW_LINE> return self._read(self.stdout_q) <NEW_LINE> <DEDENT> def read_stderr(self): <NEW_LINE> <INDENT> self.logger.info("AdbShell stderr Read") <NEW_LINE> return self._read(self.stderr_q) <NEW_LINE> <DEDENT> def write(self, cmd): <NEW_LINE> <INDENT> self.logger.info("AdbShell Write: {!r}".format(cmd)) <NEW_LINE> self.p.stdin.write(_to_utf8(cmd)) <NEW_LINE> <DEDENT> def kill(self): <NEW_LINE> <INDENT> self.p.poll() <NEW_LINE> if self.p.returncode is None: <NEW_LINE> <INDENT> with ignored(OSError): <NEW_LINE> <INDENT> self.p.kill() <NEW_LINE> <DEDENT> self.p.wait() <NEW_LINE> <DEDENT> if self.stdout_t.isAlive(): <NEW_LINE> <INDENT> self.stdout_stop.set() <NEW_LINE> try: <NEW_LINE> <INDENT> self.stdout_t.join(10) <NEW_LINE> <DEDENT> except RuntimeError as err: <NEW_LINE> <INDENT> self.logger.warning("AdbShell Kill stdout Timeout: %s", err) <NEW_LINE> <DEDENT> <DEDENT> if self.stderr_t.isAlive(): <NEW_LINE> <INDENT> self.stderr_stop.set() <NEW_LINE> try: <NEW_LINE> <INDENT> self.stderr_t.join(10) <NEW_LINE> <DEDENT> except RuntimeError as err: <NEW_LINE> <INDENT> self.logger.warning("AdbShell Kill stderr Timeout: %s", err) | AdbShell, offer easy write/read command for short adb shell process.
It should be created by AdbWrapper.shell_unblock
This class can only handle short shell communicate, all stdout/stderr
will in memory | 6259903c63f4b57ef008667e |
class NestedIterator(object): <NEW_LINE> <INDENT> def __init__(self, nestedList): <NEW_LINE> <INDENT> self.stack = nestedList[::-1] <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> return self.stack.pop().getInteger() <NEW_LINE> <DEDENT> def hasNext(self): <NEW_LINE> <INDENT> while self.stack: <NEW_LINE> <INDENT> if self.stack[-1].isInteger(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> first = self.stack.pop() <NEW_LINE> self.stack.extend(first.getList()[::-1]) <NEW_LINE> <DEDENT> return False | [1,[4,[6]]]
[[4,[6]], 1]
1
[[4,[6]]]
[[6], 4]
4
[[6]]
[6]
6 | 6259903c24f1403a926861d7 |
class NormalizationLayer(NeuralNetworkLayer): <NEW_LINE> <INDENT> def __init__(self, name = None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def initialize(self, network, previous): <NEW_LINE> <INDENT> self.name = create_name("normalize", self.name, network.name_context) <NEW_LINE> self.data_source = previous.data_source <NEW_LINE> self.cacher = network.cacher <NEW_LINE> self.params = [] <NEW_LINE> self.compute_statistics(self.data_source) <NEW_LINE> stdev = numpy.sqrt(self.image_variance / self.image_count) <NEW_LINE> scale = numpy.select([stdev != 0, stdev == 0], [1 / (stdev + (stdev == 0)), 0]) <NEW_LINE> self.output = (previous.output - self.image_mean) * scale <NEW_LINE> self.shape = previous.shape <NEW_LINE> <DEDENT> def compute_statistics(self, source): <NEW_LINE> <INDENT> if self.cacher.cached(self.name + "-mean") and self.cacher.cached(self.name + "-count"): <NEW_LINE> <INDENT> mean = self.cacher.uncache_numpy(self.name + "-mean") <NEW_LINE> variance = self.cacher.uncache_numpy(self.name + "-variance") <NEW_LINE> n = self.cacher.uncache(self.name + "-count") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n = 0.0 <NEW_LINE> mean = numpy.zeros(source.image_shape) <NEW_LINE> variance = numpy.zeros(source.image_shape) <NEW_LINE> for batchnum in xrange(source.num_batches): <NEW_LINE> <INDENT> data, labels, encoded_labels = source.batch(batchnum) <NEW_LINE> for i in xrange(data.shape[0]): <NEW_LINE> <INDENT> n = n + 1 <NEW_LINE> delta = data[i, :, :, :] - mean <NEW_LINE> mean += delta / n <NEW_LINE> variance += delta * (data[i, :, :, :] - mean) <NEW_LINE> <DEDENT> <DEDENT> self.cacher.cache_numpy(self.name + "-mean", mean) <NEW_LINE> self.cacher.cache_numpy(self.name + "-variance", variance) <NEW_LINE> self.cacher.cache(self.name + "-count", n) <NEW_LINE> <DEDENT> self.image_mean = mean <NEW_LINE> self.image_variance = variance <NEW_LINE> self.image_count = n | Data normalization layer.
A normalization layer is used to make the input data zero-mean and unit-variance. When created,
it reads the entire data set from the data source and computes the mean and variance, and
applies a linear transformation to the data in order to make it zero mean and unit variance.
Normalization is necessary for inputs to convolutional or fully connected layers. | 6259903c94891a1f408ba001 |
class RegistrationForm(forms.Form): <NEW_LINE> <INDENT> username = forms.RegexField(regex=r'^\w+$', max_length=30, widget=forms.TextInput(attrs=attrs_dict), label=u'username') <NEW_LINE> email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=75)), label=u'email address') <NEW_LINE> password1 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False), label=u'password') <NEW_LINE> password2 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False), label=u'password (again)') <NEW_LINE> def clean_username(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(username__iexact=self.cleaned_data['username']) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return self.cleaned_data['username'] <NEW_LINE> <DEDENT> raise forms.ValidationError(u'This username is already taken. Please choose another.') <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data: <NEW_LINE> <INDENT> if self.cleaned_data['password1'] != self.cleaned_data['password2']: <NEW_LINE> <INDENT> raise forms.ValidationError(u'You must type the same password each time') <NEW_LINE> <DEDENT> <DEDENT> return self.cleaned_data <NEW_LINE> <DEDENT> def save(self, profile_callback=None): <NEW_LINE> <INDENT> new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'], password=self.cleaned_data['password1'], email=self.cleaned_data['email'], profile_callback=profile_callback) <NEW_LINE> return new_user | Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should either preserve the base ``save()`` or implement
a ``save()`` which accepts the ``profile_callback`` keyword
argument and passes it through to
``RegistrationProfile.objects.create_inactive_user()``. | 6259903c5e10d32532ce420d |
class Migration(migrations.Migration): <NEW_LINE> <INDENT> dependencies = [ ('logger', '0015_add_delete_data_permission'), ] <NEW_LINE> operations = [ migrations.AlterModelOptions( name='xform', options={ 'ordering': ('id_string',), 'verbose_name': 'XForm', 'verbose_name_plural': 'XForms', 'permissions': ( ('report_xform', 'Can make submissions to the form'), ('transfer_xform', 'Can transfer form ownership.'), ('validate_xform', 'Can validate submissions.'), ('delete_data_xform', 'Can delete submissions'), ), }, ), migrations.AlterModelOptions( name='note', options={'permissions': ()}, ), ] | Re-apply parts of `0015_add_delete_data_permission.py` for upgrade
of existing installations.
See `0015_add_delete_data_permission.py` for details | 6259903c0a366e3fb87ddbfa |
class stateManager(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.parmDict = { 'Vk': 0.0, 'Inj_I': 0.0, 'Switch': 0, 'Tkm': 0.0, 'Flows': 0.0, 'Vm': 0.0, 'Ing_O': 0.0 } <NEW_LINE> self.memDict = { 'ff00': 0, 'ff01': 0, 'ff02': 0, 'ff03': 0, 'ff04': 0, 'ff05': 0, 'ff06': 0, 'ff07': 0, 'ff08': 0, 'ff09': 0, 'ff10': 0, 'ff11': 0, 'ff12': 0, 'ff13': 0, } <NEW_LINE> <DEDENT> def getVal(self, keyStr): <NEW_LINE> <INDENT> if keyStr in self.parmDict.keys(): <NEW_LINE> <INDENT> return self.parmDict[keyStr] <NEW_LINE> <DEDENT> <DEDENT> def setVal(self, keyStr, value): <NEW_LINE> <INDENT> if keyStr in self.parmDict.keys(): <NEW_LINE> <INDENT> self.parmDict[keyStr] = value <NEW_LINE> <DEDENT> <DEDENT> def changeRelated(self): <NEW_LINE> <INDENT> pass | Manager module to save the current system state. Add the parameter calculation
formula in this class. | 6259903c24f1403a926861d8 |
class APIResourceNotFoundError(APIError): <NEW_LINE> <INDENT> def __init__(self, field, message=''): <NEW_LINE> <INDENT> super(APIResourceNotFoundError, self).__init__('value:notfound', field, message) | Indicate the resources was not found. The data specifies the resource name. | 6259903c26068e7796d4db5d |
class ApiEndpointServiceServicer(object): <NEW_LINE> <INDENT> def Get(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def List(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') | Missing associated documentation comment in .proto file. | 6259903c8c3a8732951f776e |
class ViewportMenu(wx.Menu): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> wx.Menu.__init__(self) <NEW_LINE> <DEDENT> def addItem(self, name, parent = None, call = None, id = None): <NEW_LINE> <INDENT> if id is None: id = wx.NewId() <NEW_LINE> if parent is None: parent = self <NEW_LINE> item = wx.MenuItem(parent, id, name) <NEW_LINE> parent.AppendItem(item) <NEW_LINE> if call is not None: <NEW_LINE> <INDENT> self.Bind(wx.EVT_MENU, call, item) <NEW_LINE> <DEDENT> <DEDENT> def addMenu(self, name, parent = None, id = None): <NEW_LINE> <INDENT> if id is None: id = wx.NewId() <NEW_LINE> subMenu = wx.Menu() <NEW_LINE> if parent is None: parent = self <NEW_LINE> parent.AppendMenu(id, name, subMenu) <NEW_LINE> return subMenu | Represents a menu that appears when right-clicking a viewport. | 6259903c287bf620b6272e02 |
class Host(object): <NEW_LINE> <INDENT> def __init__(self, name, fqdn, ssh_port, memberof_hostgroups): <NEW_LINE> <INDENT> self.fqdn = fqdn <NEW_LINE> self.name = name <NEW_LINE> self.ssh_port = ssh_port <NEW_LINE> self.hostgroups = memberof_hostgroups <NEW_LINE> <DEDENT> def equal(self, server): <NEW_LINE> <INDENT> if self.fqdn == server.fqdn and self.ssh_port == server.ssh_port: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "fqdn:%s, ssh_port:%d, hostgroups:%s" % ( self.fqdn, int(self.ssh_port), self.hostgroups) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self | Class representing a single server entry,
each Host/server has to be a member one or more
hostgroup. Servers have the following attributes :
Attributes
fqdn: fully qualified domain name
ssh_port: server ssh port , default is 22
hostgroups: list of hostgroups this server is part of | 6259903cb57a9660fecd2c92 |
class Settings(dict): <NEW_LINE> <INDENT> def update(self, other): <NEW_LINE> <INDENT> _merge_dicts(self, other) | Special mergable dictionary for eve settings. Used as config keeper
returned by method :func:`EveMongoengine.create_settings`.
The difference between Settings object and default dict is that update()
method in Settings does not overwrite the key when value is dictionary,
but tries to merge inner dicts in an intelligent way. | 6259903c8e05c05ec3f6f766 |
class SessionUnitTests(unittest.TestCase): <NEW_LINE> <INDENT> ucs_session = UCSSession() <NEW_LINE> handle, err = ucs_session.login("admin", "nbv12345", "172.28.225.163") <NEW_LINE> def test_serverlist(self): <NEW_LINE> <INDENT> session = UCSSession() <NEW_LINE> msg = session.ensure_version(self.handle) <NEW_LINE> assert(msg == "") <NEW_LINE> version = session.get_version(self.handle) <NEW_LINE> assert(version.startswith('3')) | Tests for `ucs_server.py`. | 6259903c63b5f9789fe86383 |
class Url(dbs.Model): <NEW_LINE> <INDENT> __tablename__ = 'url' <NEW_LINE> __table_args__ = ( dbs.UniqueConstraint( 'url' ), ) <NEW_LINE> id = dbs.Column(dbs.Integer, primary_key=True) <NEW_LINE> url = dbs.Column(dbs.String, nullable=False) <NEW_LINE> shortcode = dbs.relationship('Shortcode', uselist=False, back_populates='url') <NEW_LINE> @classmethod <NEW_LINE> def insert_url(cls, url, shortcode=None): <NEW_LINE> <INDENT> _url = Url.query.filter_by(url=url).first() <NEW_LINE> if _url is None: <NEW_LINE> <INDENT> _shortcode = Shortcode.insert(shortcode=shortcode) <NEW_LINE> _url = cls(url=url, shortcode=_shortcode) <NEW_LINE> <DEDENT> dbs.session.add(_url) <NEW_LINE> dbs.session.commit() <NEW_LINE> return _url.shortcode.shortcode | This is the main logical entrypoint model for the url shortening
application. As we want to prevent database pollution with multiple
shortcodes for the same URL, the Url is the parent model for all
underlying database models.
For non time consuming development reasons, only One-to-One relations
are set. | 6259903cdc8b845886d547cc |
class Dialog(QDialog): <NEW_LINE> <INDENT> def __init__(self, ui): <NEW_LINE> <INDENT> super(Dialog, self).__init__() <NEW_LINE> self.ui = ui <NEW_LINE> <DEDENT> def closeEvent(self, event): <NEW_LINE> <INDENT> reply = QMessageBox.question(self, 'TCP网络测试助手', "是否要退出应用程序?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No) <NEW_LINE> if reply == QMessageBox.Yes: <NEW_LINE> <INDENT> self.ui.close_tcp() <NEW_LINE> event.accept() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> event.ignore() | 对QDialog类重写,实现一些功能 | 6259903cd53ae8145f91967b |
class RequestLog(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.status = 200 <NEW_LINE> self.response_size = None <NEW_LINE> self.end_time = None <NEW_LINE> self.app_logs = [] <NEW_LINE> self.finished = True | Simple mock of logservice.RequestLog. | 6259903c6fece00bbacccbc5 |
class CSBoardManager: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.devices = [] <NEW_LINE> <DEDENT> def appendDevices(self, inputs): <NEW_LINE> <INDENT> self.devices = self.devices + inputs <NEW_LINE> <DEDENT> def setDevices(self, inputs): <NEW_LINE> <INDENT> self.devices = inputs <NEW_LINE> <DEDENT> def cleanUp(self): <NEW_LINE> <INDENT> for device in self.devices: <NEW_LINE> <INDENT> device.cleanUp() <NEW_LINE> <DEDENT> <DEDENT> def deviceState(self, device): <NEW_LINE> <INDENT> return device.state() <NEW_LINE> <DEDENT> def setDeviceState(self, device, state): <NEW_LINE> <INDENT> device.setState(state) | Base class for managing connection between the board and sensors,
switches etc. | 6259903c8a349b6b4368745c |
class DrawCanvas(tk.Canvas): <NEW_LINE> <INDENT> def __init__(self, master=None, command=None, shape='rectangle', multiple=False, **kwargs): <NEW_LINE> <INDENT> super().__init__(master, **kwargs) <NEW_LINE> self.command = command <NEW_LINE> self.shape_cmds = { 'rectangle': self.create_rectangle, 'square': self.create_rectangle, 'oval': self.create_oval, 'circle': self.create_oval, 'line': self.create_line, } <NEW_LINE> self.shape = shape <NEW_LINE> self.multiple = multiple <NEW_LINE> self.start = None <NEW_LINE> self.current = None <NEW_LINE> self.refs = [] <NEW_LINE> self.bind("<Button>", self.on_click) <NEW_LINE> self.bind("<ButtonRelease>", self.on_release) <NEW_LINE> self.bind("<Motion>", self.on_motion) <NEW_LINE> <DEDENT> def on_click(self, event): <NEW_LINE> <INDENT> if not self.multiple: self.clear() <NEW_LINE> self.start = event.x, event.y <NEW_LINE> cmd = self.shape_cmds[self.shape] <NEW_LINE> if self.shape == 'line': <NEW_LINE> <INDENT> kwargs = dict(fill='red', width=2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwargs = dict(outline='red', width=2) <NEW_LINE> <DEDENT> self.current = cmd(*self.start, *self.start, **kwargs) <NEW_LINE> self.refs.append(self.current) <NEW_LINE> <DEDENT> def on_motion(self, event): <NEW_LINE> <INDENT> if self.current: <NEW_LINE> <INDENT> x, y = event.x, event.y <NEW_LINE> if self.shape in ('circle', 'square'): <NEW_LINE> <INDENT> deltas = [a-b for a,b in zip((x,y), self.start)] <NEW_LINE> d_min = min(abs(x) for x in deltas) <NEW_LINE> x, y = (a+d_min*(1,-1)[d<0] for a,d in zip(self.start, deltas)) <NEW_LINE> <DEDENT> self.coords(self.current, *self.start, x, y) <NEW_LINE> <DEDENT> <DEDENT> def on_release(self, event): <NEW_LINE> <INDENT> self.on_motion(event) <NEW_LINE> self.current = None <NEW_LINE> if self.command: <NEW_LINE> <INDENT> self.command(*self.start, event.x, event.y) <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> while self.refs: <NEW_LINE> <INDENT> self.delete(self.refs.pop()) | A canvas that will let you draw a shape on it
by clicking and dragging the mouse.
The Canvas acts as a normal canvas in every other way.
:shape='rectangle': Can be 'rectangle', 'oval', or 'line'
:command=None: the function to call when the mouse is released.
Will be passed the bounding box as an argument
:multiple=False: Allow multiple shapes to be drawn. | 6259903cac7a0e7691f73700 |
class ClienteServicosUpdateView( LoginRequiredMixin, HasRoleMixin, SuccessMessageMixin, UpdateView ): <NEW_LINE> <INDENT> allowed_roles = ["oficial", "contador"] <NEW_LINE> model = models.ClienteServicos <NEW_LINE> form_class = forms.ClienteServicosForm <NEW_LINE> template_name = "cliente_servicos_edit.html" <NEW_LINE> login_url = "login" <NEW_LINE> success_message = "O serviço foi alterado com sucesso." | Altera um serviço ao cliente | 6259903c66673b3332c3160f |
class ClozeQuery: <NEW_LINE> <INDENT> def __init__(self, context, start, end, answer, answer_word, candidates, support=None): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.answer = answer <NEW_LINE> self.answer_word = answer_word <NEW_LINE> self.candidates = candidates <NEW_LINE> assert support is None or all((isinstance(q, ContextQueries) for q in support)), "Support must be a list of ContextQueries" <NEW_LINE> self.support = support | Cloze-queries are fully defined by a a span-of-interest (start and end) within a certain context.
Note, the qa model embeds a query by its surrounding context (everything outside the span). If you
want the model to also encode the span itself, simply use the negative span, i.e. swap start and end.
If answer is None, only candidates will be scored in the QANetwork. If answer is defined the model will
score "[answer] + candidates". | 6259903ccad5886f8bdc5988 |
class Fetcher(object): <NEW_LINE> <INDENT> def __init__(self, constr): <NEW_LINE> <INDENT> self.constr = constr <NEW_LINE> self.conerr = 'Fetcher: Cannot connect with the supplied Connection String.' <NEW_LINE> self.sqlerr = 'Fetcher: Error executing the supplied SQL.' <NEW_LINE> <DEDENT> def fetch(self, fetchsql): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> con = pyodbc.connect(self.constr) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise self.FetcherError(self.conerr) <NEW_LINE> <DEDENT> cur = con.cursor() <NEW_LINE> cur.execute('SET QUERY_GOVERNOR_COST_LIMIT 0') <NEW_LINE> cur.execute('SET NOCOUNT ON') <NEW_LINE> try: <NEW_LINE> <INDENT> res = cur.execute(fetchsql) <NEW_LINE> dbrows = res.fetchall() <NEW_LINE> fields = [col[0] for col in cur.description] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise FetcherError(self.sqlerr) <NEW_LINE> <DEDENT> con.close() <NEW_LINE> rows = [collections.OrderedDict(zip(fields, row)) for row in dbrows] <NEW_LINE> return rows | Simple pyodbc-based SQL Server data grabber.
Based off of fetchdata.py, except much simpler. | 6259903cd4950a0f3b11174c |
class QCellToolBar(QtGui.QToolBar): <NEW_LINE> <INDENT> def __init__(self, sheet): <NEW_LINE> <INDENT> QtGui.QToolBar.__init__(self,sheet) <NEW_LINE> self.setOrientation(QtCore.Qt.Horizontal) <NEW_LINE> self.sheet = sheet <NEW_LINE> self.row = -1 <NEW_LINE> self.col = -1 <NEW_LINE> self.layout().setMargin(0) <NEW_LINE> self.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred) <NEW_LINE> pixmap = self.style().standardPixmap(QtGui.QStyle.SP_DialogCloseButton) <NEW_LINE> self.appendAction(QCellToolBarRemoveCell(QtGui.QIcon(pixmap), self)) <NEW_LINE> self.appendAction(QCellToolBarMergeCells(QtGui.QIcon(':celltoolbar/mergecells.png'), self)) <NEW_LINE> self.createToolBar() <NEW_LINE> <DEDENT> def addAnimationButtons(self): <NEW_LINE> <INDENT> self.appendAction(QCellToolBarCaptureToHistory(self)) <NEW_LINE> self.appendAction(QCellToolBarPlayHistory(self)) <NEW_LINE> self.appendAction(QCellToolBarClearHistory(self)) <NEW_LINE> <DEDENT> def createToolBar(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def snapTo(self, row, col): <NEW_LINE> <INDENT> self.row = row <NEW_LINE> self.col = col <NEW_LINE> self.updateToolBar() <NEW_LINE> <DEDENT> def updateToolBar(self): <NEW_LINE> <INDENT> cellWidget = self.sheet.getCell(self.row, self.col) <NEW_LINE> for action in self.actions(): <NEW_LINE> <INDENT> action.emit(QtCore.SIGNAL('needUpdateStatus'), (self.sheet, self.row, self.col, cellWidget)) <NEW_LINE> <DEDENT> <DEDENT> def connectAction(self, action, widget): <NEW_LINE> <INDENT> if hasattr(widget, 'updateStatus'): <NEW_LINE> <INDENT> self.connect(action, QtCore.SIGNAL('needUpdateStatus'), widget.updateStatus) <NEW_LINE> <DEDENT> if hasattr(widget, 'triggeredSlot'): <NEW_LINE> <INDENT> self.connect(action, QtCore.SIGNAL('triggered()'), widget.triggeredSlot) <NEW_LINE> <DEDENT> if hasattr(widget, 'toggledSlot'): <NEW_LINE> <INDENT> self.connect(action, QtCore.SIGNAL('toggled(bool)'), widget.toggledSlot) <NEW_LINE> <DEDENT> <DEDENT> def appendAction(self, action): <NEW_LINE> <INDENT> action.toolBar = self <NEW_LINE> self.addAction(action) <NEW_LINE> self.connectAction(action, action) <NEW_LINE> return action <NEW_LINE> <DEDENT> def appendWidget(self, widget): <NEW_LINE> <INDENT> action = self.addWidget(widget) <NEW_LINE> widget.toolBar = self <NEW_LINE> action.toolBar = self <NEW_LINE> self.connectAction(action, widget) <NEW_LINE> return action <NEW_LINE> <DEDENT> def getSnappedWidget(self): <NEW_LINE> <INDENT> if self.row>=0 and self.col>=0: <NEW_LINE> <INDENT> return self.sheet.getCell(self.row, self.col) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | CellToolBar is inherited from QToolBar with some functionalities
for interacting with CellHelpers | 6259903c711fe17d825e15a8 |
class Mask(layers.Layer): <NEW_LINE> <INDENT> def call(self, inputs, **kwargs): <NEW_LINE> <INDENT> if type(inputs) is list: <NEW_LINE> <INDENT> assert len(inputs) == 2 <NEW_LINE> inputs, mask = inputs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = K.sqrt(K.sum(K.square(inputs), -1)) <NEW_LINE> mask = K.one_hot(indices=K.argmax(x, 1), num_classes=x.get_shape().as_list()[1]) <NEW_LINE> <DEDENT> masked = K.batch_flatten(inputs * K.expand_dims(mask, -1)) <NEW_LINE> return masked <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> if type(input_shape[0]) is tuple: <NEW_LINE> <INDENT> return tuple([None, input_shape[0][1] * input_shape[0][2]]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return tuple([None, input_shape[1] * input_shape[2]]) <NEW_LINE> <DEDENT> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = super(Mask, self).get_config() <NEW_LINE> return config | Mask a Tensor with shape=[None, num_capsule, dim_vector] either by the capsule with max length or by an additional
input mask. Except the max-length capsule (or specified capsule), all vectors are masked to zeros. Then flatten the
masked Tensor.
For example:
```
x = keras.layers.Input(shape=[8, 3, 2]) # batch_size=8, each sample contains 3 capsules with dim_vector=2
y = keras.layers.Input(shape=[8, 3]) # True labels. 8 samples, 3 classes, one-hot coding.
out = Mask()(x) # out.shape=[8, 6]
# or
out2 = Mask()([x, y]) # out2.shape=[8,6]. Masked with true labels y. Of course y can also be manipulated.
``` | 6259903c21a7993f00c67186 |
class AdminSalesByOrganizersList(ResourceList): <NEW_LINE> <INDENT> def query(self, _): <NEW_LINE> <INDENT> query_ = self.session.query(User) <NEW_LINE> query_ = query_.join(UsersEventsRoles).filter( or_(Role.name == 'organizer', Role.name == 'owner') ) <NEW_LINE> query_ = query_.join(Event).outerjoin(Order).outerjoin(OrderTicket) <NEW_LINE> return query_ <NEW_LINE> <DEDENT> methods = ['GET'] <NEW_LINE> decorators = (api.has_permission('is_admin'),) <NEW_LINE> schema = AdminSalesByOrganizersSchema <NEW_LINE> data_layer = {'model': User, 'session': db.session, 'methods': {'query': query}} | Resource for sales by organizers. Joins organizers with events and orders
and subsequently accumulates sales by status | 6259903c8e05c05ec3f6f767 |
class FuncSource: <NEW_LINE> <INDENT> blank_rx = re.compile(r"^\s*finally:\s*(#.*)?$") <NEW_LINE> def __init__(self, fn): <NEW_LINE> <INDENT> self.fn = fn <NEW_LINE> self.filename = inspect.getsourcefile(fn) <NEW_LINE> self.sourcelines = {} <NEW_LINE> self.source = [] <NEW_LINE> self.firstlineno = self.firstcodelineno = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> self.source, self.firstlineno = inspect.getsourcelines(fn) <NEW_LINE> self.firstcodelineno = self.firstlineno <NEW_LINE> self.find_source_lines() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> self.filename = None <NEW_LINE> <DEDENT> <DEDENT> def find_source_lines(self): <NEW_LINE> <INDENT> if self.filename is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> strs = trace.find_strings(self.filename) <NEW_LINE> lines = trace.find_lines_from_code(self.fn.__code__, strs) <NEW_LINE> for lineno in lines: <NEW_LINE> <INDENT> self.sourcelines.setdefault(lineno, 0) <NEW_LINE> <DEDENT> if lines: <NEW_LINE> <INDENT> self.firstcodelineno = min(lines) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.firstcodelineno = self.firstlineno <NEW_LINE> <DEDENT> <DEDENT> def mark(self, lineno, count=1): <NEW_LINE> <INDENT> self.sourcelines[lineno] = self.sourcelines.get(lineno, 0) + count <NEW_LINE> <DEDENT> def count_never_executed(self): <NEW_LINE> <INDENT> lineno = self.firstlineno <NEW_LINE> counter = 0 <NEW_LINE> for line in self.source: <NEW_LINE> <INDENT> if self.sourcelines.get(lineno) == 0: <NEW_LINE> <INDENT> if not self.blank_rx.match(line): <NEW_LINE> <INDENT> counter += 1 <NEW_LINE> <DEDENT> <DEDENT> lineno += 1 <NEW_LINE> <DEDENT> return counter <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.filename is None: <NEW_LINE> <INDENT> return "cannot show coverage data since co_filename is None" <NEW_LINE> <DEDENT> lines = [] <NEW_LINE> lineno = self.firstlineno <NEW_LINE> for line in self.source: <NEW_LINE> <INDENT> counter = self.sourcelines.get(lineno) <NEW_LINE> if counter is None: <NEW_LINE> <INDENT> prefix = ' ' * 7 <NEW_LINE> <DEDENT> elif counter == 0: <NEW_LINE> <INDENT> if self.blank_rx.match(line): <NEW_LINE> <INDENT> prefix = ' ' * 7 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix = '>' * 6 + ' ' <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> prefix = '%5d: ' % counter <NEW_LINE> <DEDENT> lines.append(prefix + line) <NEW_LINE> lineno += 1 <NEW_LINE> <DEDENT> return ''.join(lines) | Source code annotator for a function. | 6259903c0a366e3fb87ddbfe |
@pytest.mark.components <NEW_LINE> @pytest.allure.story('Clients') <NEW_LINE> @pytest.allure.feature('POST') <NEW_LINE> class Test_PFE_Components(object): <NEW_LINE> <INDENT> @pytest.allure.link('https://jira.qumu.com/browse/TC-43057') <NEW_LINE> @pytest.mark.Clients <NEW_LINE> @pytest.mark.POST <NEW_LINE> def test_TC_43057_POST_Clients_Clients_Rule_Server_Host_Eq(self, context): <NEW_LINE> <INDENT> with pytest.allure.step("""Test1: Verify that user is able to add new rule with parameter 'Server Host>Equals' using request POST '/clients/'."""): <NEW_LINE> <INDENT> clientDetails = context.sc.ClientDetails( id='clientServerHost', matchingRule={ 'operator': 'ALL', 'rules': [{ 'expressionType': 'Single', 'contextField': 'serverHost', 'operator': 'EQ', 'contextFieldType': 'String', 'matchValue': '10.1.10.10', 'contextFieldKey': None }], 'groups': [] }, name='POST: Client with Server Host EQ Rule 1', sourceSelectionRule=[]) <NEW_LINE> response = check( context.cl.Clients.createEntity( body=clientDetails ) ) <NEW_LINE> <DEDENT> with pytest.allure.step("""Test2: Verify that user is able to add new rule with parameter 'Server Host>Equals' using request POST '/clients/'."""): <NEW_LINE> <INDENT> clientDetails = context.sc.ClientDetails( id='clientServerHost2', matchingRule={ 'operator': 'ALL', 'rules': [{ 'expressionType': 'Single', 'contextField': 'serverHost', 'operator': 'EQ', 'contextFieldType': 'String', 'matchValue': 'autovcc_24.com', 'contextFieldKey': None }], 'groups': [] }, name='POST: Client with Server Host EQ Rule2', sourceSelectionRule=[]) <NEW_LINE> response = check( context.cl.Clients.createEntity( body=clientDetails ) ) | PFE Clients test cases. | 6259903c23849d37ff8522d2 |
class AccessDescriptor(ImmutableRecord): <NEW_LINE> <INDENT> __slots__ = [ "identifier", "storage_axis_exprs", ] | .. attribute:: identifier
An identifier under user control, used to connect this access descriptor
to the access that generated it. Any Python value. | 6259903cd99f1b3c44d068c0 |
class WeekendFilter (object): <NEW_LINE> <INDENT> def __init__ (self, timezone=None, weekend=None, weekstart=None): <NEW_LINE> <INDENT> self._timezone = pytz.timezone(timezone) <NEW_LINE> week_ordinal_map = dict(Monday=0, Tuesday=1, Wednesday=2, Thursday=3, Friday=4, Saturday=5, Sunday=6) <NEW_LINE> self._weekend_day = week_ordinal_map[weekend.split(" ")[0]] <NEW_LINE> self._weekstart_day = week_ordinal_map[weekstart.split(" ")[0]] <NEW_LINE> self._weekend_time = datetime.datetime.strptime(weekend, "%A %H:%M").time() <NEW_LINE> self._weekstart_time = datetime.datetime.strptime(weekstart, "%A %H:%M").time() <NEW_LINE> self._next_enter_filter = None <NEW_LINE> self._next_exit_filter = None <NEW_LINE> <DEDENT> def filter (self, row): <NEW_LINE> <INDENT> time = row["time"] <NEW_LINE> filter_tick = False <NEW_LINE> if self._next_enter_filter == None and self._next_exit_filter == None: <NEW_LINE> <INDENT> self._update_enter_exit(time) <NEW_LINE> <DEDENT> if self._next_enter_filter and self._next_exit_filter: <NEW_LINE> <INDENT> if time < self._next_enter_filter: <NEW_LINE> <INDENT> filter_tick = False <NEW_LINE> <DEDENT> elif time > self._next_enter_filter and time < self._next_exit_filter: <NEW_LINE> <INDENT> filter_tick = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._next_enter_filter = None <NEW_LINE> self._next_exit_filter = None <NEW_LINE> self._update_enter_exit(common.Time.tick(common.Time.time(time) + datetime.timedelta(1))) <NEW_LINE> filter_tick = False <NEW_LINE> <DEDENT> <DEDENT> if filter_tick: <NEW_LINE> <INDENT> return ("weekend", "") <NEW_LINE> <DEDENT> else: return None <NEW_LINE> <DEDENT> def _update_enter_exit (self, time): <NEW_LINE> <INDENT> localtime = common.Time.time(time).astimezone(self._timezone) <NEW_LINE> if self._next_enter_filter == None: <NEW_LINE> <INDENT> localenter = localtime <NEW_LINE> while self._next_enter_filter == None: <NEW_LINE> <INDENT> while localenter.weekday() != self._weekend_day: <NEW_LINE> <INDENT> localenter += datetime.timedelta(1) <NEW_LINE> <DEDENT> localenter = localenter.replace(hour=self._weekend_time.hour, minute=self._weekend_time.minute, second=0, microsecond=0) <NEW_LINE> self._next_enter_filter = common.Time.tick(localenter.astimezone(pytz.utc)) <NEW_LINE> <DEDENT> <DEDENT> if self._next_exit_filter == None: <NEW_LINE> <INDENT> localexit = localtime <NEW_LINE> while self._next_exit_filter == None: <NEW_LINE> <INDENT> while localexit.weekday() != self._weekstart_day: <NEW_LINE> <INDENT> localexit += datetime.timedelta(1) <NEW_LINE> <DEDENT> localexit = localexit.replace(hour=self._weekstart_time.hour, minute=self._weekstart_time.minute, second=0, microsecond=0) <NEW_LINE> self._next_exit_filter = common.Time.tick(localexit.astimezone(pytz.utc)) | Filter ticks within the weekend boundary of a given timezone and market | 6259903cbaa26c4b54d504c2 |
class RouterInterface(pulumi.CustomResource): <NEW_LINE> <INDENT> def __init__(__self__, __name__, __opts__=None, port_id=None, region=None, router_id=None, subnet_id=None): <NEW_LINE> <INDENT> if not __name__: <NEW_LINE> <INDENT> raise TypeError('Missing resource name argument (for URN creation)') <NEW_LINE> <DEDENT> if not isinstance(__name__, basestring): <NEW_LINE> <INDENT> raise TypeError('Expected resource name to be a string') <NEW_LINE> <DEDENT> if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions): <NEW_LINE> <INDENT> raise TypeError('Expected resource options to be a ResourceOptions instance') <NEW_LINE> <DEDENT> __props__ = dict() <NEW_LINE> if port_id and not isinstance(port_id, basestring): <NEW_LINE> <INDENT> raise TypeError('Expected property port_id to be a basestring') <NEW_LINE> <DEDENT> __self__.port_id = port_id <NEW_LINE> __props__['portId'] = port_id <NEW_LINE> if region and not isinstance(region, basestring): <NEW_LINE> <INDENT> raise TypeError('Expected property region to be a basestring') <NEW_LINE> <DEDENT> __self__.region = region <NEW_LINE> __props__['region'] = region <NEW_LINE> if not router_id: <NEW_LINE> <INDENT> raise TypeError('Missing required property router_id') <NEW_LINE> <DEDENT> elif not isinstance(router_id, basestring): <NEW_LINE> <INDENT> raise TypeError('Expected property router_id to be a basestring') <NEW_LINE> <DEDENT> __self__.router_id = router_id <NEW_LINE> __props__['routerId'] = router_id <NEW_LINE> if subnet_id and not isinstance(subnet_id, basestring): <NEW_LINE> <INDENT> raise TypeError('Expected property subnet_id to be a basestring') <NEW_LINE> <DEDENT> __self__.subnet_id = subnet_id <NEW_LINE> __props__['subnetId'] = subnet_id <NEW_LINE> super(RouterInterface, __self__).__init__( 'openstack:networking/routerInterface:RouterInterface', __name__, __props__, __opts__) <NEW_LINE> <DEDENT> def set_outputs(self, outs): <NEW_LINE> <INDENT> if 'portId' in outs: <NEW_LINE> <INDENT> self.port_id = outs['portId'] <NEW_LINE> <DEDENT> if 'region' in outs: <NEW_LINE> <INDENT> self.region = outs['region'] <NEW_LINE> <DEDENT> if 'routerId' in outs: <NEW_LINE> <INDENT> self.router_id = outs['routerId'] <NEW_LINE> <DEDENT> if 'subnetId' in outs: <NEW_LINE> <INDENT> self.subnet_id = outs['subnetId'] | Manages a V2 router interface resource within OpenStack. | 6259903c379a373c97d9a242 |
class OpError(Exception): <NEW_LINE> <INDENT> def __init__(self, node_def, op, message, error_code): <NEW_LINE> <INDENT> super(OpError, self).__init__() <NEW_LINE> self._message = message <NEW_LINE> self._node_def = node_def <NEW_LINE> self._op = op <NEW_LINE> self._error_code = error_code <NEW_LINE> <DEDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self._message <NEW_LINE> <DEDENT> @property <NEW_LINE> def op(self): <NEW_LINE> <INDENT> return self._op <NEW_LINE> <DEDENT> @property <NEW_LINE> def error_code(self): <NEW_LINE> <INDENT> return self._error_code <NEW_LINE> <DEDENT> @property <NEW_LINE> def node_def(self): <NEW_LINE> <INDENT> return self._node_def <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self._op is not None: <NEW_LINE> <INDENT> output = ["%s\nCaused by op %r, defined at:\n" % (self.message, self._op.name,)] <NEW_LINE> curr_traceback_list = traceback.format_list(self._op.traceback) <NEW_LINE> output.extend(curr_traceback_list) <NEW_LINE> original_op = self._op._original_op <NEW_LINE> while original_op is not None: <NEW_LINE> <INDENT> output.append( "\n...which was originally created as op %r, defined at:\n" % (original_op.name,)) <NEW_LINE> prev_traceback_list = curr_traceback_list <NEW_LINE> curr_traceback_list = traceback.format_list(original_op.traceback) <NEW_LINE> is_eliding = False <NEW_LINE> elide_count = 0 <NEW_LINE> last_elided_line = None <NEW_LINE> for line, line_in_prev in zip(curr_traceback_list, prev_traceback_list): <NEW_LINE> <INDENT> if line == line_in_prev: <NEW_LINE> <INDENT> if is_eliding: <NEW_LINE> <INDENT> elide_count += 1 <NEW_LINE> last_elided_line = line <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output.append(line) <NEW_LINE> is_eliding = True <NEW_LINE> elide_count = 0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if is_eliding: <NEW_LINE> <INDENT> if elide_count > 0: <NEW_LINE> <INDENT> output.extend( ["[elided %d identical lines from previous traceback]\n" % (elide_count - 1,), last_elided_line]) <NEW_LINE> <DEDENT> is_eliding = False <NEW_LINE> <DEDENT> output.extend(line) <NEW_LINE> <DEDENT> <DEDENT> original_op = original_op._original_op <NEW_LINE> <DEDENT> return ''.join(output) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.message | A generic error that is raised when TensorFlow execution fails.
Whenever possible, the session will raise a more specific subclass
of `OpError` from the `tf.errors` module.
@@op
@@node_def | 6259903c287bf620b6272e05 |
class ParametersForm(param_forms.AdminParametersForm): <NEW_LINE> <INDENT> app = "modoboa_rspamd" <NEW_LINE> dkim_settings_sep = form_utils.SeparatorField( label=ugettext_lazy("DKIM signing settings")) <NEW_LINE> path_map_path = forms.CharField( label=ugettext_lazy("Path map path"), initial="", help_text=ugettext_lazy( "Absolute path of the file which contains paths to DKIM " "private keys." ), required=False ) <NEW_LINE> selector_map_path = forms.CharField( label=ugettext_lazy("Selector map path"), initial="", help_text=ugettext_lazy( "Absolute path of the file which contains names of " "DKIM selectors." ), required=False ) | Extension settings. | 6259903c3c8af77a43b68849 |
class ContentFilter(object): <NEW_LINE> <INDENT> def __init__( self, workspace_id: int = None, parent_id: int = None, show_archived: int = 0, show_deleted: int = 0, show_active: int = 1, content_type: str = None, label: str = None, offset: int = None, limit: int = None, ) -> None: <NEW_LINE> <INDENT> self.parent_id = parent_id <NEW_LINE> self.workspace_id = workspace_id <NEW_LINE> self.show_archived = bool(show_archived) <NEW_LINE> self.show_deleted = bool(show_deleted) <NEW_LINE> self.show_active = bool(show_active) <NEW_LINE> self.limit = limit <NEW_LINE> self.offset = offset <NEW_LINE> self.label = label <NEW_LINE> self.content_type = content_type | Content filter model | 6259903c94891a1f408ba004 |
class LocDict(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = dict() <NEW_LINE> self.locales = list() <NEW_LINE> if isinstance(data, dict): <NEW_LINE> <INDENT> self._init_with_dict(data) <NEW_LINE> <DEDENT> elif isinstance(data, (tuple, list)): <NEW_LINE> <INDENT> self._init_with_list(data) <NEW_LINE> <DEDENT> <DEDENT> def _init_with_dict(self, data): <NEW_LINE> <INDENT> _data = dict() <NEW_LINE> _locales = list() <NEW_LINE> locale_vs_value_counts = dict() <NEW_LINE> for locale in locale_list: <NEW_LINE> <INDENT> if locale in data: <NEW_LINE> <INDENT> value_list = data.get(locale) <NEW_LINE> locale_vs_value_counts[locale] = len(value_list) <NEW_LINE> _data[locale] = bidict( dict( [(value, ind) for ind, value in enumerate(value_list)] ) ) <NEW_LINE> _locales.append(locale) <NEW_LINE> <DEDENT> <DEDENT> count_set = set(locale_vs_value_counts.values()) <NEW_LINE> if len(count_set) != 1: <NEW_LINE> <INDENT> locales_with_bad_data = list() <NEW_LINE> for count in count_set: <NEW_LINE> <INDENT> for locale, value_counts in locale_vs_value_counts.items(): <NEW_LINE> <INDENT> if count == value_counts: <NEW_LINE> <INDENT> locales_with_bad_data.append(locale) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> msg = "number of unique value in `{}` and `{}` is different!".format( locales_with_bad_data[0], locales_with_bad_data[1] ) <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> _locales.sort() <NEW_LINE> self.data = _data <NEW_LINE> self.locales = _locales <NEW_LINE> <DEDENT> def _init_with_list(self, data): <NEW_LINE> <INDENT> _data = dict() <NEW_LINE> for row in data: <NEW_LINE> <INDENT> for locale, value in iteritems(row): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _data[locale].append(value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> _data[locale] = [value, ] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._init_with_dict(_data) <NEW_LINE> <DEDENT> def find_locale(self, value): <NEW_LINE> <INDENT> for locale in self.locales: <NEW_LINE> <INDENT> if value in self.data[locale]: <NEW_LINE> <INDENT> return locale <NEW_LINE> <DEDENT> <DEDENT> msg = "Can't detect language of '%s'" % value <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> def trans_to(self, value, dst_loc, src_loc=None): <NEW_LINE> <INDENT> if dst_loc not in self.data: <NEW_LINE> <INDENT> msg = "%s is not available!" % dst_loc <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> if src_loc is None: <NEW_LINE> <INDENT> src_loc = self.find_locale(value) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ind = self.data[src_loc][value] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> msg = "%r is not available in %s" % (value, src_loc) <NEW_LINE> raise KeyError(msg) <NEW_LINE> <DEDENT> return self.data[dst_loc].inv[ind] | Localization Dictionary.
Assumptions:
1. There's no duplicate words in single Locale.
2. There's no duplicate words globally in all Locales.
:param data: a dict, key = locale code, value = bi-direction dict.
key value definition for the bi-direction dict is, key = localized text,
value = number index.
:param locales: a list, supported locale list. | 6259903c15baa723494631ab |
@attr.s(frozen=True, auto_attribs=True) <NEW_LINE> class Template: <NEW_LINE> <INDENT> string: str <NEW_LINE> _template: jinja2.Template = attr.ib(init=False, eq=False) <NEW_LINE> variables: FrozenSet[str] = attr.ib(init=False) <NEW_LINE> @_template.default <NEW_LINE> def __set_template(self): <NEW_LINE> <INDENT> return _env.from_string(self.string) <NEW_LINE> <DEDENT> @variables.default <NEW_LINE> def __set_variables(self) -> frozenset: <NEW_LINE> <INDENT> parsed_content = _env.parse(self.string) <NEW_LINE> return frozenset(meta.find_undeclared_variables(parsed_content)) <NEW_LINE> <DEDENT> def render(self, values: dict) -> str: <NEW_LINE> <INDENT> return self._template.render(values) | A container for a Jinja2 template. Exposes the variables found in the template through the variables property. | 6259903c004d5f362081f8f2 |
class WorkerError(Exception): <NEW_LINE> <INDENT> pass | Base class for all worker exceptions. | 6259903c5e10d32532ce4210 |
class Vimball(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> if not os.path.exists(path): <NEW_LINE> <INDENT> raise ArchiveError(f"path doesn't exist: {path!r}") <NEW_LINE> <DEDENT> self.path = path <NEW_LINE> _filebase, ext = os.path.splitext(path) <NEW_LINE> if ext == ".gz": <NEW_LINE> <INDENT> self.fd = gzip.open(path) <NEW_LINE> <DEDENT> elif ext == ".bz2": <NEW_LINE> <INDENT> self.fd = bz2.BZ2File(path) <NEW_LINE> <DEDENT> elif ext == ".xz": <NEW_LINE> <INDENT> self.fd = lzma.open(path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fd = open(path) <NEW_LINE> <DEDENT> if not is_vimball(self.fd): <NEW_LINE> <INDENT> raise ArchiveError('invalid archive format') <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.fd.close() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def readline(self): <NEW_LINE> <INDENT> line = self.fd.__class__.readline(self.fd) <NEW_LINE> if isinstance(line, bytes): <NEW_LINE> <INDENT> line = line.decode() <NEW_LINE> <DEDENT> return line <NEW_LINE> <DEDENT> @property <NEW_LINE> def files(self): <NEW_LINE> <INDENT> for header in (r"(.*)\t\[\[\[1\n", r"^(\d+)\n$"): <NEW_LINE> <INDENT> header = re.compile(header) <NEW_LINE> filename = None <NEW_LINE> self.fd.seek(0) <NEW_LINE> line = self.readline() <NEW_LINE> while line: <NEW_LINE> <INDENT> m = header.match(line) <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> filename = m.group(1) <NEW_LINE> try: <NEW_LINE> <INDENT> filelines = int(self.readline().rstrip()) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ArchiveError('invalid archive format') <NEW_LINE> <DEDENT> filestart = self.fd.tell() <NEW_LINE> yield (filename, filelines, filestart) <NEW_LINE> <DEDENT> line = self.readline() <NEW_LINE> <DEDENT> if filename is not None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def extract(self, extractdir=None, verbose=False): <NEW_LINE> <INDENT> if extractdir is None: <NEW_LINE> <INDENT> filebase, ext = os.path.splitext(self.path) <NEW_LINE> if ext in ('.gz', '.bz2', '.xz'): <NEW_LINE> <INDENT> filebase, _ext = os.path.splitext(filebase) <NEW_LINE> <DEDENT> extractdir = os.path.basename(filebase) <NEW_LINE> if os.path.exists(extractdir): <NEW_LINE> <INDENT> tempdir = tempfile.mkdtemp(prefix='vimball-', dir=os.getcwd()) <NEW_LINE> extractdir = os.path.join(tempdir.split('/')[-1], extractdir) <NEW_LINE> <DEDENT> <DEDENT> self.fd.seek(0) <NEW_LINE> for filename, lines, offset in self.files: <NEW_LINE> <INDENT> filepath = os.path.join(extractdir, filename) <NEW_LINE> try: <NEW_LINE> <INDENT> directory = os.path.dirname(filepath) <NEW_LINE> mkdir_p(directory) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> raise ArchiveError(f"failed creating directory {directory!r}: {e.strerror}") <NEW_LINE> <DEDENT> with open(filepath, 'w') as f: <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print(filepath) <NEW_LINE> <DEDENT> self.fd.seek(offset) <NEW_LINE> for i in range(lines): <NEW_LINE> <INDENT> f.write(self.readline()) | Vimball archive format. | 6259903c1d351010ab8f4d37 |
class GameNumPy(BaseGameNumPy): <NEW_LINE> <INDENT> def _init(self): <NEW_LINE> <INDENT> super(GameNumPy, self)._init() <NEW_LINE> self.fates = np.zeros((self.width, self.height), dtype=np.int8) <NEW_LINE> self.fates.fill(Fate.StayDead) <NEW_LINE> self.ages = np.zeros((self.width, self.height), dtype=np.int64) <NEW_LINE> <DEDENT> def populate_random(self, prob=0.5): <NEW_LINE> <INDENT> super(GameNumPy, self).populate_random(prob) <NEW_LINE> self._compute_fates() <NEW_LINE> <DEDENT> def _step(self): <NEW_LINE> <INDENT> self._apply_fates() <NEW_LINE> self._compute_fates() <NEW_LINE> <DEDENT> def fate(self, row, col): <NEW_LINE> <INDENT> line = self.fates.take(row, axis=0, mode='wrap') <NEW_LINE> fate = line.take(col, mode='wrap') <NEW_LINE> return fate <NEW_LINE> <DEDENT> def age(self, row, col): <NEW_LINE> <INDENT> line = self.ages.take(row, axis=0, mode='wrap') <NEW_LINE> age = line.take(col, mode='wrap') <NEW_LINE> return age <NEW_LINE> <DEDENT> def _compute_fates(self): <NEW_LINE> <INDENT> con = convolve(self.cells, self.WEIGHTS, mode='wrap') <NEW_LINE> self.fates.fill(Fate.StayDead) <NEW_LINE> self.fates[con == 3] = Fate.Birth <NEW_LINE> self.fates[(con >= 10) & (con < 12)] = Fate.DeathByIsolation <NEW_LINE> self.fates[(con == 12) | (con == 13)] = Fate.Survive <NEW_LINE> self.fates[con > 13] = Fate.DeathByOvercrowding <NEW_LINE> <DEDENT> def _apply_fates(self): <NEW_LINE> <INDENT> new_cells = np.zeros((self.width, self.height), dtype=np.int8) <NEW_LINE> new_cells[(self.fates == Fate.Birth) | (self.fates == Fate.Survive)] = 1 <NEW_LINE> unchanged = new_cells == self.cells <NEW_LINE> changed = np.logical_not(unchanged) <NEW_LINE> self.ages[unchanged] += 1 <NEW_LINE> self.ages[changed] = 0 <NEW_LINE> self.cells = new_cells | Full-featured NumPy/SciPy-based implementation of the Game of Life. | 6259903c6fece00bbacccbca |
class Sinusoidal(VoltageSource): <NEW_LINE> <INDENT> def __init__(self, name, node_plus, node_minus, dc_offset=0, offset=0, amplitude=1, frequency=50, delay=0, damping_factor=0): <NEW_LINE> <INDENT> super(Sinusoidal, self).__init__(name, node_plus, node_minus) <NEW_LINE> self.dc_offset = dc_offset <NEW_LINE> self.offset = offset <NEW_LINE> self.amplitude = amplitude <NEW_LINE> self.frequency = Frequency(frequency) <NEW_LINE> self.delay = delay <NEW_LINE> self.damping_factor = damping_factor <NEW_LINE> <DEDENT> @property <NEW_LINE> def rms_voltage(self): <NEW_LINE> <INDENT> return amplitude_to_rms(self.amplitude) <NEW_LINE> <DEDENT> @property <NEW_LINE> def period(self): <NEW_LINE> <INDENT> return self.frequency.period <NEW_LINE> <DEDENT> @property <NEW_LINE> def parameters(self): <NEW_LINE> <INDENT> return ('DC {}V'.format(self.dc_offset), 'AC SIN({}V {}V {}Hz {}s {})'.format(self.offset, self.amplitude, self.frequency, self.delay, self.damping_factor)) | SIN waveform::
SIN ( VO VA FREQ TD THETA ) | 6259903c15baa723494631ac |
class TextAnalyticsAPIConfiguration(Configuration): <NEW_LINE> <INDENT> def __init__( self, azure_region, credentials): <NEW_LINE> <INDENT> if azure_region is None: <NEW_LINE> <INDENT> raise ValueError("Parameter 'azure_region' must not be None.") <NEW_LINE> <DEDENT> if credentials is None: <NEW_LINE> <INDENT> raise ValueError("Parameter 'credentials' must not be None.") <NEW_LINE> <DEDENT> base_url = 'https://{AzureRegion}.api.cognitive.microsoft.com/text/analytics' <NEW_LINE> super(TextAnalyticsAPIConfiguration, self).__init__(base_url) <NEW_LINE> self.add_user_agent('azure-cognitiveservices-language-textanalytics/{}'.format(VERSION)) <NEW_LINE> self.azure_region = azure_region <NEW_LINE> self.credentials = credentials | Configuration for TextAnalyticsAPI
Note that all parameters used to create this instance are saved as instance
attributes.
:param azure_region: Supported Azure regions for Cognitive Services
endpoints. Possible values include: 'westus', 'westeurope',
'southeastasia', 'eastus2', 'westcentralus', 'westus2', 'eastus',
'southcentralus', 'northeurope', 'eastasia', 'australiaeast',
'brazilsouth'
:type azure_region: str or
~azure.cognitiveservices.language.textanalytics.models.AzureRegions
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None | 6259903c26238365f5fadd72 |
class noenvelope(Envelope): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info) <NEW_LINE> if len(self.ta_list) > 1: <NEW_LINE> <INDENT> tofile = botslib.opendata(self.ta_info['filename'],'wb',self.ta_info['charset']) <NEW_LINE> self.writefilelist(tofile) <NEW_LINE> tofile.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ta_info['filename'] = self.ta_list[0] | Only copies the input files to one output file. | 6259903c24f1403a926861db |
class SessionAttributeUtil: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get(request: HttpRequest, attr_name: SessionAttribute, default_value): <NEW_LINE> <INDENT> if attr_name.value not in request.session: <NEW_LINE> <INDENT> request.session[attr_name.value] = default_value <NEW_LINE> <DEDENT> return request.session[attr_name.value] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def set(request: HttpRequest, attr_name: SessionAttribute, attr_value): <NEW_LINE> <INDENT> request.session[attr_name.value] = attr_value <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def remove(request: HttpRequest, attr_name: SessionAttribute): <NEW_LINE> <INDENT> if attr_name.value in request.session: <NEW_LINE> <INDENT> del request.session[attr_name.value] | Class created to force using SessionAttribute enum for naming session attributes | 6259903c8c3a8732951f7774 |
class XiaoGimbutas(object): <NEW_LINE> <INDENT> def __init__(self, degree, symbolic=False): <NEW_LINE> <INDENT> self.name = "XG({})".format(degree) <NEW_LINE> this_dir = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> filename = "xg{:02d}.json".format(degree) <NEW_LINE> with open(os.path.join(this_dir, filename), "r") as f: <NEW_LINE> <INDENT> data = json.load(f) <NEW_LINE> <DEDENT> self.degree = data.pop("degree") <NEW_LINE> self.bary = numpy.array(data["bary"]) <NEW_LINE> self.weights = numpy.array(data["weights"]) <NEW_LINE> self.points = self.bary[:, 1:] <NEW_LINE> return | Hong Xiao, Zydrunas Gimbutas,
A numerical algorithm for the construction of efficient quadrature rules in
two and higher dimensions,
Computers & Mathematics with Applications,
Volume 59, Issue 2, January 2010, Pages 663–676,
<https://doi.org/10.1016/j.camwa.2009.10.027>.
Abstract:
We present a numerical algorithm for the construction of efficient,
high-order quadratures in two and higher dimensions. Quadrature rules
constructed via this algorithm possess positive weights and interior nodes,
resembling the Gaussian quadratures in one dimension. In addition, rules
can be generated with varying degrees of symmetry, adaptable to individual
domains. We illustrate the performance of our method with numerical
examples, and report quadrature rules for polynomials on triangles,
squares, and cubes, up to degree 50. These formulae are near optimal in the
number of nodes used, and many of them appear to be new.
Data adapted from
<https://people.sc.fsu.edu/~jburkardt/f_src/triangle_symq_rule/triangle_symq_rule.f90>. | 6259903c3c8af77a43b6884a |
class BackendAdapter(ConnectorUnit): <NEW_LINE> <INDENT> _model_name = None | Base Backend Adapter for the connectors | 6259903cd4950a0f3b11174e |
class ZFunction(SFunction): <NEW_LINE> <INDENT> def __init__(self,a=0.0,delta=1.0): <NEW_LINE> <INDENT> super(ZFunction, self).__init__(a,delta) <NEW_LINE> <DEDENT> def __call__(self,x): <NEW_LINE> <INDENT> return 1.0 - SFunction.__call__(self,x) | Realize a Z-shaped fuzzy set::
__
\
|\
| \
| |\
| | \__
| a |
| |
delta
see also U{http://pyfuzzy.sourceforge.net/test/set/ZFunction.png}
@ivar a: center of set.
@type a: float
@ivar delta: absolute distance between x-values for minimum and maximum.
@type delta: float | 6259903c21bff66bcd723e86 |
class LogMetric(messages.Message): <NEW_LINE> <INDENT> description = messages.StringField(1) <NEW_LINE> filter = messages.StringField(2) <NEW_LINE> name = messages.StringField(3) | An object that describes a collected metric associated with a particular
log. Each LogEntry that matches the filter in this metric will increase the
value of the metric by 1.
Fields:
description: Description of this metric.
filter: A filter that is applied to a LogEntry that determines whether the
given LogEntry matches this metric.
name: The name of this metric. This is a user defined identifier for the
resource. Allowed characters include letters, numbers, and: / \ $ - _ .
+ ! * ' () % | 6259903c23e79379d538d71c |
class DummyBot(interwikidata.IWBot): <NEW_LINE> <INDENT> def put_current(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def create_item(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def try_to_add(self): <NEW_LINE> <INDENT> return None | A dummy bot to prevent editing in production wikis. | 6259903c82261d6c527307d2 |
class AbortError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg, temp_file_path): <NEW_LINE> <INDENT> self.temp_file_path = temp_file_path <NEW_LINE> super(AbortError, self).__init__(msg) | Exception thrown when a renaming operation is been started but cannot be
completed, and the image files have necessarily been left in an intermediate
state.
The temp_file_path member is the full path to the temporary folder which
still contains isolated but un-renamed files. | 6259903cd164cc6175822193 |
class Preferences(GObject.Object, PeasGtk.Configurable): <NEW_LINE> <INDENT> __gtype_name__ = 'RhythmwebPreferences' <NEW_LINE> object = GObject.property(type=GObject.Object) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.settings = Gio.Settings("org.gnome.rhythmbox.plugins.rhythmweb") <NEW_LINE> GObject.Object.__init__(self) <NEW_LINE> <DEDENT> def do_create_configure_widget(self): <NEW_LINE> <INDENT> builder = Gtk.Builder() <NEW_LINE> builder.add_from_file(rb.find_plugin_file(self, 'ui/rhythmweb_prefs.ui')) <NEW_LINE> port = builder.get_object( 'textfieldport') <NEW_LINE> self.settings.bind('port', port, 'value', Gio.SettingsBindFlags.DEFAULT) <NEW_LINE> return builder.get_object('main_notebook') | Preferences for the Rhythmweb Plugin. It holds the settings for
the plugin and also is the responsible of creating the preferences dialog. | 6259903c8da39b475be0440c |
class ControllerBase(object): <NEW_LINE> <INDENT> def __init__(self, driver): <NEW_LINE> <INDENT> self.driver = driver | Top-level class for controllers.
:param driver: Instance of the driver
instantiating this controller. | 6259903ca4f1c619b294f796 |
@attrs.define(kw_only=True) <NEW_LINE> class ProfileComponent: <NEW_LINE> <INDENT> profiles: typing.Optional[profile.Profile] <NEW_LINE> profile_progression: typing.Optional[profile.ProfileProgression] <NEW_LINE> profile_currencies: typing.Optional[collections.Sequence[profile.ProfileItemImpl]] <NEW_LINE> profile_inventories: typing.Optional[collections.Sequence[profile.ProfileItemImpl]] | Represents a profile-only Bungie component.
This includes all components that falls under the profile object.
Included Components
-------------------
- `Profiles`
- `ProfileInventories`
- `ProfileCurrencies`
- `ProfileProgression` | 6259903c66673b3332c31615 |
class ElasticFilterBackend(OrderingFilter, DjangoFilterBackend): <NEW_LINE> <INDENT> def filter_queryset(self, request, queryset, view): <NEW_LINE> <INDENT> q_size = view.paginator.get_limit(request) <NEW_LINE> q_from = view.paginator.get_offset(request) <NEW_LINE> filterable = getattr(view, 'filter_fields', []) <NEW_LINE> filters = dict([(k, v) for k, v in request.GET.items() if k in filterable]) <NEW_LINE> search_filters = self._build_filters(filters=filters) <NEW_LINE> if search_filters is not None: <NEW_LINE> <INDENT> q = ElasticQuery.filtered(Query.match_all(), search_filters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> q = ElasticQuery(Query.match_all()) <NEW_LINE> <DEDENT> s = Search(search_query=q, idx=getattr(view, 'idx'), size=q_size, search_from=q_from) <NEW_LINE> json_results = s.get_json_response() <NEW_LINE> results = [] <NEW_LINE> for result in json_results['hits']['hits']: <NEW_LINE> <INDENT> new_obj = ElasticObject(initial=result['_source']) <NEW_LINE> new_obj.uuid = result['_id'] <NEW_LINE> results.append(new_obj) <NEW_LINE> <DEDENT> view.es_count = json_results['hits']['total'] <NEW_LINE> return results <NEW_LINE> <DEDENT> def _build_filters(self, filters=None): <NEW_LINE> <INDENT> if filters is None: <NEW_LINE> <INDENT> filters = {} <NEW_LINE> <DEDENT> and_filter = None <NEW_LINE> for filter_expr, value in filters.items(): <NEW_LINE> <INDENT> filter_bits = filter_expr.split('__') <NEW_LINE> field_name = filter_bits.pop(0) <NEW_LINE> filter_type = 'exact' <NEW_LINE> if len(filter_bits): <NEW_LINE> <INDENT> filter_type = filter_bits.pop() <NEW_LINE> <DEDENT> if filter_type != 'exact': <NEW_LINE> <INDENT> field_name = field_name + "." + filter_type <NEW_LINE> <DEDENT> q = Query.query_string(value, fields=[field_name]).query_wrap() <NEW_LINE> if and_filter is None: <NEW_LINE> <INDENT> and_filter = AndFilter(q) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> and_filter.extend(q) <NEW_LINE> <DEDENT> <DEDENT> return and_filter | Extend L{DjangoFilterBackend} for filtering elastic resources. | 6259903c1d351010ab8f4d3a |
class V1alpha1Policy(object): <NEW_LINE> <INDENT> openapi_types = { 'level': 'str', 'stages': 'list[str]' } <NEW_LINE> attribute_map = { 'level': 'level', 'stages': 'stages' } <NEW_LINE> def __init__(self, level=None, stages=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._level = None <NEW_LINE> self._stages = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.level = level <NEW_LINE> if stages is not None: <NEW_LINE> <INDENT> self.stages = stages <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def level(self): <NEW_LINE> <INDENT> return self._level <NEW_LINE> <DEDENT> @level.setter <NEW_LINE> def level(self, level): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and level is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `level`, must not be `None`") <NEW_LINE> <DEDENT> self._level = level <NEW_LINE> <DEDENT> @property <NEW_LINE> def stages(self): <NEW_LINE> <INDENT> return self._stages <NEW_LINE> <DEDENT> @stages.setter <NEW_LINE> def stages(self, stages): <NEW_LINE> <INDENT> self._stages = stages <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1alpha1Policy): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1alpha1Policy): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 6259903c45492302aabfd6f6 |
class PasswordResetForm(forms.Form): <NEW_LINE> <INDENT> username = forms.CharField(label=_("Username or email address")) <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> username = self.cleaned_data.get("username") <NEW_LINE> username_or_email = Q(username=username) | Q(email=username) <NEW_LINE> try: <NEW_LINE> <INDENT> user = User.objects.get(username_or_email, is_active=True) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> raise forms.ValidationError( _("Invalid username/email")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._user = user <NEW_LINE> <DEDENT> return self.cleaned_data <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> return getattr(self, "_user", None) | Validates the user's username or email for sending a login
token for authenticating to change their password. | 6259903c07d97122c4217ebc |
class StructA(TBase): <NEW_LINE> <INDENT> __slots__ = ( 's', ) <NEW_LINE> def __init__(self, s=None,): <NEW_LINE> <INDENT> self.s = s | Attributes:
- s | 6259903c21bff66bcd723e88 |
class SubnetAssociation(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SubnetAssociation, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.security_rules = kwargs.get('security_rules', None) | Subnet and it's custom security rules.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Subnet ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list[~azure.mgmt.network.v2019_12_01.models.SecurityRule] | 6259903c50485f2cf55dc1a2 |
class IPAddress(BASEV2, models.HasId, models.HasTenant): <NEW_LINE> <INDENT> __tablename__ = "quark_ip_addresses" <NEW_LINE> address_readable = sa.Column(sa.String(128), nullable=False) <NEW_LINE> address = sa.Column(custom_types.INET(), nullable=False) <NEW_LINE> subnet_id = sa.Column(sa.String(36), sa.ForeignKey("quark_subnets.id", ondelete="CASCADE")) <NEW_LINE> network_id = sa.Column(sa.String(36), sa.ForeignKey("quark_networks.id", ondelete="CASCADE")) <NEW_LINE> version = sa.Column(sa.Integer()) <NEW_LINE> allocated_at = sa.Column(sa.DateTime()) <NEW_LINE> subnet = orm.relationship("Subnet", lazy="joined") <NEW_LINE> _deallocated = sa.Column(sa.Boolean()) <NEW_LINE> @hybrid.hybrid_property <NEW_LINE> def deallocated(self): <NEW_LINE> <INDENT> return self._deallocated and not self.ports <NEW_LINE> <DEDENT> @deallocated.setter <NEW_LINE> def deallocated(self, val): <NEW_LINE> <INDENT> self._deallocated = val <NEW_LINE> self.deallocated_at = None <NEW_LINE> if val: <NEW_LINE> <INDENT> self.deallocated_at = timeutils.utcnow() <NEW_LINE> self.allocated_at = None <NEW_LINE> <DEDENT> <DEDENT> @deallocated.expression <NEW_LINE> def deallocated(cls): <NEW_LINE> <INDENT> return IPAddress._deallocated <NEW_LINE> <DEDENT> def formatted(self): <NEW_LINE> <INDENT> ip = netaddr.IPAddress(self.address_readable) <NEW_LINE> if self.version == 4: <NEW_LINE> <INDENT> return str(ip.ipv4()) <NEW_LINE> <DEDENT> return str(ip.ipv6()) <NEW_LINE> <DEDENT> deallocated_at = sa.Column(sa.DateTime()) | More closely emulate the melange version of the IP table.
We always mark the record as deallocated rather than deleting it.
Gives us an IP address owner audit log for free, essentially. | 6259903cbaa26c4b54d504c7 |
class PageRank: <NEW_LINE> <INDENT> def __init__(self, adjacency_matrix=np.ndarray, alpha: float = None, converge: float = None, pickle=None): <NEW_LINE> <INDENT> if pickle is None: <NEW_LINE> <INDENT> self._matrix = adjacency_matrix <NEW_LINE> self._alpha = alpha <NEW_LINE> self._converge = converge <NEW_LINE> self._max_iter = 100 <NEW_LINE> self._run() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not os.path.isfile(pickle): <NEW_LINE> <INDENT> raise FileNotFoundError("The file does not exist.") <NEW_LINE> <DEDENT> self._load_rank_vector(pickle) <NEW_LINE> <DEDENT> <DEDENT> def _run(self): <NEW_LINE> <INDENT> n = self._matrix.shape[0] <NEW_LINE> e_t = np.array([np.ones(n)]) <NEW_LINE> e = np.transpose(e_t) <NEW_LINE> r_0, r_1 = np.zeros(n), np.ones(n) / n <NEW_LINE> a = np.transpose([np.array(self._matrix.sum(axis=1))[:, 0] == 0]).astype(np.float64) <NEW_LINE> self._matrix = normalize(self._matrix, norm='l1', axis=1) <NEW_LINE> i = 0 <NEW_LINE> while (np.sum(np.abs(r_1 - r_0)) > self._converge) and (i < self._max_iter): <NEW_LINE> <INDENT> r_0 = r_1.copy() <NEW_LINE> r_1 = self._alpha * csr_matrix.dot(r_0, self._matrix) + (self._alpha * np.dot(r_0, a) + (1 - self._alpha) * np.dot(r_0, e)) * (e_t / n) <NEW_LINE> i += 1 <NEW_LINE> util.log("PageRank Iteration: " + str(i)) <NEW_LINE> <DEDENT> self._ranking = r_1 <NEW_LINE> <DEDENT> def get_pagerank(self, normalized: bool = True): <NEW_LINE> <INDENT> if normalized: <NEW_LINE> <INDENT> return self._ranking / float(np.sum(self._ranking)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._ranking <NEW_LINE> <DEDENT> <DEDENT> def store_rank_vector(self, pickle): <NEW_LINE> <INDENT> with open(pickle, 'wb') as f: <NEW_LINE> <INDENT> pkl.dump(self._ranking, f, -1) <NEW_LINE> <DEDENT> <DEDENT> def _load_rank_vector(self, pickle): <NEW_LINE> <INDENT> self._ranking = pkl.load(open(pickle, "rb")) | Calculates the PageRank scores for the documents contained in the Adjacency Matrix
adjacency_matrix = sparse matrix containing the weighted links between documents
alpha = propapility for random teleports instead of following only links
converge = threshold for the convergence of the PageRank algorithm | 6259903c0fa83653e46f60fa |
class DiskPlugin: <NEW_LINE> <INDENT> def __init__(self, pack_path): <NEW_LINE> <INDENT> self.all_plug = {} <NEW_LINE> self.pack_path = pack_path <NEW_LINE> self.git_config = [GetRemote("")] <NEW_LINE> self._list_remote_url(self._list_plug_dir()) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _walklevel(some_dir, level): <NEW_LINE> <INDENT> some_dir = some_dir.rstrip(os.path.sep) <NEW_LINE> assert os.path.isdir(some_dir) <NEW_LINE> num_sep = some_dir.count(os.path.sep) <NEW_LINE> for root, dirs, files in os.walk(some_dir): <NEW_LINE> <INDENT> yield root, dirs, files <NEW_LINE> num_sep_this = root.count(os.path.sep) <NEW_LINE> if num_sep + level <= num_sep_this: <NEW_LINE> <INDENT> del dirs[:] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _list_plug_dir(self, level=3): <NEW_LINE> <INDENT> dir_list = {} <NEW_LINE> for dirpath, dirnames, filenames in self._walklevel(self.pack_path, level): <NEW_LINE> <INDENT> dir_list[dirpath] = 0 <NEW_LINE> <DEDENT> for elem in dir_list.keys(): <NEW_LINE> <INDENT> elem_norm = os.path.normpath(elem) <NEW_LINE> dir_list[elem] = len(elem_norm.split(os.sep)) <NEW_LINE> <DEDENT> max_depth = max(dir_list.values()) <NEW_LINE> dir_list = {k: v for k, v in dir_list.items() if v == max_depth} <NEW_LINE> return dir_list.keys() <NEW_LINE> <DEDENT> def _list_remote_url(self, dir_list): <NEW_LINE> <INDENT> self.all_plug = {} <NEW_LINE> for elem in dir_list: <NEW_LINE> <INDENT> out = 1 <NEW_LINE> self.git_config[0].local_dir = elem <NEW_LINE> out = self.git_config[0].git_cmd() <NEW_LINE> if out == 0: <NEW_LINE> <INDENT> rel_plug_path = os.path.relpath(elem, self.pack_path) <NEW_LINE> self.all_plug[rel_plug_path] = self.git_config[0].retrieve_stdout() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def filt_plug(self, plug_type): <NEW_LINE> <INDENT> plug = [] <NEW_LINE> for key in self.all_plug.keys(): <NEW_LINE> <INDENT> if key.split('/')[1] == plug_type: <NEW_LINE> <INDENT> plug.append(key) <NEW_LINE> <DEDENT> <DEDENT> return plug | This class allow to access the plugins locally installed on the pack
path.
Attributs:
- all_plug (dict): key = <package_name>/{start|opt}/<plugin_name>
value = remote_url, ex: https://path/to/repo
- pack_path (str): package directory | 6259903c63f4b57ef0086684 |
@register_command <NEW_LINE> class ContextCommand(StackCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def name(): <NEW_LINE> <INDENT> return '%s %s' % (VoidwalkerCommand.name(), 'context') <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> super(ContextCommand, self).__init__() <NEW_LINE> <DEDENT> def execute(self, config, terminal, thread, platform_factory, argument): <NEW_LINE> <INDENT> if not thread.is_valid(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> context = platform_factory.create_context(config, thread) <NEW_LINE> previous_context = context <NEW_LINE> if len(thread.contexts()): <NEW_LINE> <INDENT> previous_context = thread.contexts()[-1] <NEW_LINE> <DEDENT> context_widget = ContextWidget(previous_context, context) <NEW_LINE> context_widget.draw(terminal, terminal.width()) | Show the current context.
If the current thread of the inferior is valid the context will be recorded and
dumped. The contents of the context can be controlled using the (void)walker
parameters | 6259903d6e29344779b01872 |
@no_db_testcase <NEW_LINE> @tags(['unit']) <NEW_LINE> class TaskGetAbsoluteUrlTest(TestCase): <NEW_LINE> <INDENT> def test_should_return_task_absolute_url(self): <NEW_LINE> <INDENT> owner = UserFactory.build(pk=1) <NEW_LINE> task = TaskFactory.build(owner=owner, author=owner) <NEW_LINE> url = task.get_absolute_url() <NEW_LINE> self.assertEqual(url, '/%s/' % owner.username) | :py:meth:`tasks.models.Task.get_absolute_url` | 6259903db5575c28eb7135d9 |
class _BaseHeterogeneousEnsemble(MetaEstimatorMixin, _BaseComposition, metaclass=ABCMeta): <NEW_LINE> <INDENT> _required_parameters = ['estimators'] <NEW_LINE> @property <NEW_LINE> def named_estimators(self): <NEW_LINE> <INDENT> return Bunch(**dict(self.estimators)) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __init__(self, estimators): <NEW_LINE> <INDENT> self.estimators = estimators <NEW_LINE> <DEDENT> def _validate_estimators(self): <NEW_LINE> <INDENT> if self.estimators is None or len(self.estimators) == 0: <NEW_LINE> <INDENT> raise ValueError( "Invalid 'estimators' attribute, 'estimators' should be a list" " of (string, estimator) tuples." ) <NEW_LINE> <DEDENT> names, estimators = zip(*self.estimators) <NEW_LINE> self._validate_names(names) <NEW_LINE> if any(est is None for est in estimators): <NEW_LINE> <INDENT> warnings.warn( "Using 'None' to drop an estimator from the ensemble is " "deprecated in 0.22 and support will be dropped in 0.24. " "Use the string 'drop' instead.", FutureWarning ) <NEW_LINE> <DEDENT> has_estimator = any(est not in (None, 'drop') for est in estimators) <NEW_LINE> if not has_estimator: <NEW_LINE> <INDENT> raise ValueError( "All estimators are dropped. At least one is required " "to be an estimator." ) <NEW_LINE> <DEDENT> is_estimator_type = (is_classifier if is_classifier(self) else is_regressor) <NEW_LINE> for est in estimators: <NEW_LINE> <INDENT> if est not in (None, 'drop') and not is_estimator_type(est): <NEW_LINE> <INDENT> raise ValueError( "The estimator {} should be a {}.".format( est.__class__.__name__, is_estimator_type.__name__[3:] ) ) <NEW_LINE> <DEDENT> <DEDENT> return names, estimators <NEW_LINE> <DEDENT> def set_params(self, **params): <NEW_LINE> <INDENT> super()._set_params('estimators', **params) <NEW_LINE> return self <NEW_LINE> <DEDENT> def get_params(self, deep=True): <NEW_LINE> <INDENT> return super()._get_params('estimators', deep=deep) | Base class for heterogeneous ensemble of learners.
Parameters
----------
estimators : list of (str, estimator) tuples
The ensemble of estimators to use in the ensemble. Each element of the
list is defined as a tuple of string (i.e. name of the estimator) and
an estimator instance. An estimator can be set to `'drop'` using
`set_params`.
Attributes
----------
estimators_ : list of estimators
The elements of the estimators parameter, having been fitted on the
training data. If an estimator has been set to `'drop'`, it will not
appear in `estimators_`. | 6259903db57a9660fecd2c9b |
class Clip(Preprocessor): <NEW_LINE> <INDENT> def __init__(self, min, max): <NEW_LINE> <INDENT> super(Clip, self).__init__() <NEW_LINE> self.min = min <NEW_LINE> self.max = max <NEW_LINE> <DEDENT> def process(self, state): <NEW_LINE> <INDENT> return np.clip(state, self.min, self.max) | Clip by min/max. | 6259903d16aa5153ce40170d |
class ContainerProviderAddView(ProviderAddView): <NEW_LINE> <INDENT> prov_type = BootstrapSelect(id='ems_type') <NEW_LINE> @property <NEW_LINE> def is_displayed(self): <NEW_LINE> <INDENT> return (super(ProviderAddView, self).is_displayed and self.navigation.currently_selected == ['Compute', 'Containers', 'Providers'] and self.title.text == 'Add New Containers Provider') | represents Container Provider Add View | 6259903ddc8b845886d547d6 |
class TestCylinderAddObject(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> radius = 5 <NEW_LINE> length = 40 <NEW_LINE> density = 20 <NEW_LINE> self.ana = CylinderModel() <NEW_LINE> self.ana.setParam('scale', 1.0) <NEW_LINE> self.ana.setParam('background', 0.0) <NEW_LINE> self.ana.setParam('sld', 1.0) <NEW_LINE> self.ana.setParam('sld_solvent', 0.0) <NEW_LINE> self.ana.setParam('radius', radius) <NEW_LINE> self.ana.setParam('length', length) <NEW_LINE> self.model = VolumeCanvas.VolumeCanvas() <NEW_LINE> cyl = VolumeCanvas.CylinderDescriptor() <NEW_LINE> self.handle = self.model.addObject(cyl) <NEW_LINE> self.model.setParam('lores_density', density) <NEW_LINE> self.model.setParam('scale' , 1.0) <NEW_LINE> self.model.setParam('background' , 0.0) <NEW_LINE> self.model.setParam('%s.contrast' % self.handle, 1.0) <NEW_LINE> self.model.setParam('%s.radius' % self.handle, radius) <NEW_LINE> self.model.setParam('%s.length' % self.handle, length) <NEW_LINE> <DEDENT> def testalongY(self): <NEW_LINE> <INDENT> self.ana.setParam('theta', math.pi/2.0) <NEW_LINE> self.ana.setParam('phi', math.pi/2.0) <NEW_LINE> self.model.setParam('%s.orientation' % self.handle, [0,0,0]) <NEW_LINE> ana_val = self.ana.runXY([0.1, 0.2]) <NEW_LINE> sim_val = self.model.getIq2D(0.1, 0.2) <NEW_LINE> self.assertTrue( math.fabs(sim_val/ana_val-1.0)<0.05 ) | Tests for oriented (2D) systems | 6259903d30c21e258be99a2d |
class OsidRuntimeProfile(abc_osid_managers.OsidRuntimeProfile, OsidProfile): <NEW_LINE> <INDENT> def supports_configuration(self): <NEW_LINE> <INDENT> raise errors.Unimplemented() | The ``OsidRuntimeProfile`` defines the service aspects of the OSID runtime service. | 6259903d82261d6c527307d4 |
class TestFileQueue(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.directory_paths = ['2010-10-08-report.xlsx', '2020-07-09-report.xlsm', 'abcdef.txt', 'spam.csv', 'abcdef.xlsx', 'spam.xlsm', 'thisisadir/'] <NEW_LINE> self.correct_paths = [True, True, False, False, True, True, False] <NEW_LINE> self.test_queue = filequeue.FileQueue() <NEW_LINE> <DEDENT> def test_filter_file(self): <NEW_LINE> <INDENT> for i in range(len(self.directory_paths)): <NEW_LINE> <INDENT> self.assertEqual(filequeue.FileQueue.filter_file(self.directory_paths[i]), self.correct_paths[i]) <NEW_LINE> <DEDENT> <DEDENT> def test_add_file(self): <NEW_LINE> <INDENT> for i in range(len(self.directory_paths)): <NEW_LINE> <INDENT> self.assertEqual(self.test_queue.add_file(self.directory_paths[i]), self.correct_paths[i]) <NEW_LINE> <DEDENT> <DEDENT> def test_empty(self): <NEW_LINE> <INDENT> self.assertTrue(self.test_queue.empty()) <NEW_LINE> for path in self.directory_paths: <NEW_LINE> <INDENT> self.test_queue.add_file(path) <NEW_LINE> self.assertFalse(self.test_queue.empty()) <NEW_LINE> <DEDENT> <DEDENT> def test_pop_file(self): <NEW_LINE> <INDENT> filtered_paths = [] <NEW_LINE> for i in range(len(self.directory_paths)): <NEW_LINE> <INDENT> if self.correct_paths[i]: <NEW_LINE> <INDENT> filtered_paths.append(self.directory_paths[i]) <NEW_LINE> <DEDENT> <DEDENT> for path in self.directory_paths: <NEW_LINE> <INDENT> self.test_queue.add_file(path) <NEW_LINE> <DEDENT> while not self.test_queue.empty(): <NEW_LINE> <INDENT> self.assertIn(self.test_queue.pop_file(), filtered_paths) <NEW_LINE> <DEDENT> with self.assertRaises(IndexError): <NEW_LINE> <INDENT> self.test_queue.pop_file() | Test Queueing functionality as xlsx/m files enter the directory. | 6259903d8a43f66fc4bf33b0 |
class MMLLexer(object): <NEW_LINE> <INDENT> STC_MML_DEFAULT, STC_MML_KEYWORD, STC_MML_KEYWORD2, STC_MML_COMMENT, STC_MML_VARIABLE, STC_MML_VOICE_TOKEN = list( range(6) ) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(MMLLexer, self).__init__() <NEW_LINE> self.alpha = "abcdefghijklmnopqrstuvwxyz" <NEW_LINE> self.digits = "0123456789" <NEW_LINE> notes = ["a", "b", "c", "d", "e", "f", "g", "r"] <NEW_LINE> self.keywords = notes + ["%s%d" % (n, i) for n in notes for i in range(10)] <NEW_LINE> stmts = ["t", "o", "v"] <NEW_LINE> self.keywords2 = ( stmts + ["t%d" % i for i in range(256)] + ["o%d" % i for i in range(16)] + ["v%d" % i for i in range(101)] ) <NEW_LINE> <DEDENT> def StyleText(self, evt): <NEW_LINE> <INDENT> stc = evt.GetEventObject() <NEW_LINE> last_styled_pos = stc.GetEndStyled() <NEW_LINE> line = stc.LineFromPosition(last_styled_pos) <NEW_LINE> start_pos = stc.PositionFromLine(line) <NEW_LINE> end_pos = evt.GetPosition() <NEW_LINE> userXYZ = voiceToken = False <NEW_LINE> while start_pos < end_pos: <NEW_LINE> <INDENT> stc.StartStyling(start_pos) <NEW_LINE> curchar = chr(stc.GetCharAt(start_pos)) <NEW_LINE> if curchar in "xyz": <NEW_LINE> <INDENT> userXYZ = True <NEW_LINE> <DEDENT> elif userXYZ and curchar in " \t\n": <NEW_LINE> <INDENT> userXYZ = False <NEW_LINE> <DEDENT> if curchar == "#": <NEW_LINE> <INDENT> voiceToken = True <NEW_LINE> <DEDENT> elif voiceToken and curchar in " \t\n": <NEW_LINE> <INDENT> voiceToken = False <NEW_LINE> <DEDENT> if userXYZ: <NEW_LINE> <INDENT> style = self.STC_MML_VARIABLE <NEW_LINE> stc.SetStyling(1, style) <NEW_LINE> start_pos += 1 <NEW_LINE> <DEDENT> elif voiceToken: <NEW_LINE> <INDENT> style = self.STC_MML_VOICE_TOKEN <NEW_LINE> stc.SetStyling(1, style) <NEW_LINE> start_pos += 1 <NEW_LINE> <DEDENT> elif curchar in self.alpha: <NEW_LINE> <INDENT> start = stc.WordStartPosition(start_pos, True) <NEW_LINE> end = stc.WordEndPosition(start, True) <NEW_LINE> word = stc.GetTextRange(start, end) <NEW_LINE> if word in self.keywords: <NEW_LINE> <INDENT> style = self.STC_MML_KEYWORD <NEW_LINE> stc.SetStyling(len(word), style) <NEW_LINE> <DEDENT> elif word in self.keywords2: <NEW_LINE> <INDENT> style = self.STC_MML_KEYWORD2 <NEW_LINE> stc.SetStyling(len(word), style) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> style = self.STC_MML_DEFAULT <NEW_LINE> stc.SetStyling(len(word), style) <NEW_LINE> <DEDENT> start_pos += len(word) <NEW_LINE> <DEDENT> elif curchar == ";": <NEW_LINE> <INDENT> eol = stc.GetLineEndPosition(stc.LineFromPosition(start_pos)) <NEW_LINE> style = self.STC_MML_COMMENT <NEW_LINE> stc.SetStyling(eol - start_pos, style) <NEW_LINE> start_pos = eol <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> style = self.STC_MML_DEFAULT <NEW_LINE> stc.SetStyling(1, style) <NEW_LINE> start_pos += 1 | Defines simple interface for custom lexer objects. | 6259903dd99f1b3c44d068c8 |
class UnknowMimeType(Exception): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'MimeType is undefined for the file "'+self.value()+'"' | Exception throw if no mime type is found | 6259903dbaa26c4b54d504c9 |
class CreateUserFollowingView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = UserFollowing.objects.all() <NEW_LINE> serializer_class = UserFollowingSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated,IsOwner) <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> data = self.request.data <NEW_LINE> serializer = self.get_serializer(data=request.data) <NEW_LINE> ids=self.kwargs['userid'] <NEW_LINE> print ("="+ids) <NEW_LINE> test = {"token":""} <NEW_LINE> return Response(test, status=200) | This class defines the create behavior of our rest api. | 6259903d30c21e258be99a2e |
class TestUpdatePreviewDiff(TestCaseWithFactory): <NEW_LINE> <INDENT> layer = LaunchpadFunctionalLayer <NEW_LINE> def _updatePreviewDiff(self, merge_proposal): <NEW_LINE> <INDENT> diff_text = ( "=== modified file 'sample.py'\n" "--- sample\t2009-01-15 23:44:22 +0000\n" "+++ sample\t2009-01-29 04:10:57 +0000\n" "@@ -19,7 +19,7 @@\n" " from zope.interface import implements\n" "\n" " from storm.expr import Desc, Join, LeftJoin\n" "-from storm.references import Reference\n" "+from storm.locals import Int, Reference\n" " from sqlobject import ForeignKey, IntCol\n" "\n" " from lp.services.config import config\n") <NEW_LINE> diff_stat = {'sample': (1, 1)} <NEW_LINE> login_person(merge_proposal.registrant) <NEW_LINE> merge_proposal.updatePreviewDiff( diff_text, u"source_id", u"target_id") <NEW_LINE> transaction.commit() <NEW_LINE> return diff_text, diff_stat <NEW_LINE> <DEDENT> def test_new_diff(self): <NEW_LINE> <INDENT> merge_proposal = self.factory.makeBranchMergeProposal() <NEW_LINE> diff_text, diff_stat = self._updatePreviewDiff(merge_proposal) <NEW_LINE> self.assertEqual(diff_text, merge_proposal.preview_diff.text) <NEW_LINE> self.assertEqual(diff_stat, merge_proposal.preview_diff.diffstat) <NEW_LINE> <DEDENT> def test_update_diff(self): <NEW_LINE> <INDENT> merge_proposal = self.factory.makeBranchMergeProposal() <NEW_LINE> login_person(merge_proposal.registrant) <NEW_LINE> diff_bytes = ''.join(unified_diff('', 'random text')) <NEW_LINE> merge_proposal.updatePreviewDiff(diff_bytes, u"a", u"b") <NEW_LINE> transaction.commit() <NEW_LINE> preview_diff_id = removeSecurityProxy(merge_proposal.preview_diff).id <NEW_LINE> diff_id = removeSecurityProxy(merge_proposal.preview_diff).diff_id <NEW_LINE> diff_text, diff_stat = self._updatePreviewDiff(merge_proposal) <NEW_LINE> self.assertEqual(diff_text, merge_proposal.preview_diff.text) <NEW_LINE> self.assertEqual(diff_stat, merge_proposal.preview_diff.diffstat) <NEW_LINE> self.assertNotEqual( preview_diff_id, removeSecurityProxy(merge_proposal.preview_diff).id) <NEW_LINE> self.assertNotEqual( diff_id, removeSecurityProxy(merge_proposal.preview_diff).diff_id) | Test the updateMergeDiff method of BranchMergeProposal. | 6259903d71ff763f4b5e89be |
class EntitySet(Mapping, Generic[T]): <NEW_LINE> <INDENT> pprint.sorted = lambda v, key=None: v <NEW_LINE> def __init__(self, entities: Union[Iterable[T], Dict[str, T]]): <NEW_LINE> <INDENT> if type(entities) is dict: <NEW_LINE> <INDENT> self._map = entities <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._map = {e.nickname: e for e in entities} <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'EntitySet({pprint.pformat(self._map)})' <NEW_LINE> <DEDENT> def __getitem__(self, key: str) -> T: <NEW_LINE> <INDENT> if type(key) is not str: <NEW_LINE> <INDENT> raise TypeError(f'Only strings may be used as indices, not {type(key)!r}') <NEW_LINE> <DEDENT> assert type(key) is str, repr(type(key)) <NEW_LINE> return self._map[key] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._map.values()) <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return type(item) is str and item in self._map <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._map) <NEW_LINE> <DEDENT> def __eq__(self, other: 'EntitySet'): <NEW_LINE> <INDENT> if not isinstance(other, EntitySet): <NEW_LINE> <INDENT> raise TypeError(f'Cannot compare EntitySet with {type(other)!r}') <NEW_LINE> <DEDENT> return self._map == other._map <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(frozenset(self._map)) <NEW_LINE> <DEDENT> def __add__(self, other) -> 'EntitySet[T]': <NEW_LINE> <INDENT> if type(other) is not type(self): <NEW_LINE> <INDENT> raise TypeError(f'Can only concatenate EntitySet (not {type(other)}) with EntitySet.') <NEW_LINE> <DEDENT> return EntitySet({e for e in self} | {e for e in other}) <NEW_LINE> <DEDENT> def __iadd__(self, other) -> 'EntitySet[T]': <NEW_LINE> <INDENT> return self + other <NEW_LINE> <DEDENT> def keys(self) -> KeysView: <NEW_LINE> <INDENT> return self._map.keys() <NEW_LINE> <DEDENT> def items(self) -> ItemsView: <NEW_LINE> <INDENT> return self._map.items() <NEW_LINE> <DEDENT> @cached <NEW_LINE> def of_type(self, type_: Type[F]) -> 'EntitySet[F]': <NEW_LINE> <INDENT> return EntitySet(filter(lambda e: isinstance(e, type_), self)) <NEW_LINE> <DEDENT> @cached <NEW_LINE> def reindex(self, on: str) -> 'EntitySet[T]': <NEW_LINE> <INDENT> return EntitySet({getattr(entity, on): entity for entity in self}) <NEW_LINE> <DEDENT> def where(self, op=operator.eq, **kwargs) -> 'EntitySet[T]': <NEW_LINE> <INDENT> assert len(kwargs) == 1 <NEW_LINE> field, value = next(iter(kwargs.items())) <NEW_LINE> if not callable(getattr(self.first, field, None)): <NEW_LINE> <INDENT> return self.reindex(field) <NEW_LINE> <DEDENT> return EntitySet(e for e in self if op(vars(e).get(field) or getattr(e, field)(), value)) <NEW_LINE> <DEDENT> def unique(self, **kwargs) -> Optional[T]: <NEW_LINE> <INDENT> assert len(kwargs) == 1 <NEW_LINE> field, value = next(iter(kwargs.items())) <NEW_LINE> return self.reindex(field).get(value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def first(self) -> Optional[T]: <NEW_LINE> <INDENT> return next(iter(self), None) | An immutable collection of entities, indexed by nickname. | 6259903d287bf620b6272e0d |
class SessionContext(InstanceContext): <NEW_LINE> <INDENT> def __init__(self, version, service_sid, sid): <NEW_LINE> <INDENT> super(SessionContext, self).__init__(version) <NEW_LINE> self._solution = {'service_sid': service_sid, 'sid': sid, } <NEW_LINE> self._uri = '/Services/{service_sid}/Sessions/{sid}'.format(**self._solution) <NEW_LINE> self._interactions = None <NEW_LINE> self._participants = None <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> payload = self._version.fetch(method='GET', uri=self._uri, ) <NEW_LINE> return SessionInstance( self._version, payload, service_sid=self._solution['service_sid'], sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> return self._version.delete(method='DELETE', uri=self._uri, ) <NEW_LINE> <DEDENT> def update(self, date_expiry=values.unset, ttl=values.unset, status=values.unset, fail_on_participant_conflict=values.unset): <NEW_LINE> <INDENT> data = values.of({ 'DateExpiry': serialize.iso8601_datetime(date_expiry), 'Ttl': ttl, 'Status': status, 'FailOnParticipantConflict': fail_on_participant_conflict, }) <NEW_LINE> payload = self._version.update(method='POST', uri=self._uri, data=data, ) <NEW_LINE> return SessionInstance( self._version, payload, service_sid=self._solution['service_sid'], sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def interactions(self): <NEW_LINE> <INDENT> if self._interactions is None: <NEW_LINE> <INDENT> self._interactions = InteractionList( self._version, service_sid=self._solution['service_sid'], session_sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._interactions <NEW_LINE> <DEDENT> @property <NEW_LINE> def participants(self): <NEW_LINE> <INDENT> if self._participants is None: <NEW_LINE> <INDENT> self._participants = ParticipantList( self._version, service_sid=self._solution['service_sid'], session_sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._participants <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items()) <NEW_LINE> return '<Twilio.Proxy.V1.SessionContext {}>'.format(context) | PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. | 6259903d21bff66bcd723e8c |
class Rescale(object): <NEW_LINE> <INDENT> def __init__(self, output_size): <NEW_LINE> <INDENT> assert isinstance(output_size, (int, tuple)) <NEW_LINE> self.output_size = output_size <NEW_LINE> <DEDENT> def __call__(self, sample): <NEW_LINE> <INDENT> image, labels = sample['image'], sample['label'] <NEW_LINE> h, w = image.shape[:2] <NEW_LINE> if isinstance(self.output_size, int): <NEW_LINE> <INDENT> if h > w: <NEW_LINE> <INDENT> new_h, new_w = self.output_size * h / w, self.output_size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_h, new_w = self.output_size, self.output_size * w / h <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> new_h, new_w = self.output_size <NEW_LINE> <DEDENT> new_h, new_w = int(new_h), int(new_w) <NEW_LINE> img = transform.resize(image, (new_h, new_w)) <NEW_LINE> tsfm_labels = np.zeros((img.shape[0], img.shape[1], 2)) <NEW_LINE> for i in range(labels.shape[-1]): <NEW_LINE> <INDENT> landmarks = labels[..., i] <NEW_LINE> points = list(np.where(landmarks == 1)) <NEW_LINE> points[0] = np.array(points[0] * new_h / h, dtype=np.int32) <NEW_LINE> points[1] = np.array(points[1] * new_w / w, dtype=np.int32) <NEW_LINE> buff = np.zeros((img.shape[0], img.shape[1])) <NEW_LINE> try: <NEW_LINE> <INDENT> buff[tuple(points)] = 1 <NEW_LINE> tsfm_labels[..., i] = buff <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> import ipdb; ipdb.set_trace() <NEW_LINE> <DEDENT> <DEDENT> return {'image': img, 'label': tsfm_labels} | Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same. | 6259903d21a7993f00c67190 |
class LinuxBannerDetector(DetectionMethod): <NEW_LINE> <INDENT> name = "linux" <NEW_LINE> LINUX_TEMPLATE = re.compile( r"Linux version (\d+\.\d+\.\d+[^ ]+)") <NEW_LINE> find_dtb_impl = linux_common.LinuxFindDTB <NEW_LINE> def Keywords(self): <NEW_LINE> <INDENT> return ["Linux version "] <NEW_LINE> <DEDENT> def DetectFromHit(self, hit, offset, address_space): <NEW_LINE> <INDENT> guess = address_space.read(offset-100, 300) <NEW_LINE> m = self.LINUX_TEMPLATE.search(guess) <NEW_LINE> if m: <NEW_LINE> <INDENT> distribution = "LinuxGeneric" <NEW_LINE> if "Ubuntu" in guess: <NEW_LINE> <INDENT> distribution = "Ubuntu" <NEW_LINE> <DEDENT> if "Debian" in guess: <NEW_LINE> <INDENT> distribution = "Debian" <NEW_LINE> <DEDENT> profile_name = "%s/%s" % (distribution, m.group(1)) <NEW_LINE> profile = self.VerifyProfile(profile_name) <NEW_LINE> if profile: <NEW_LINE> <INDENT> self.session.logging.info( "Detected %s: %s", profile_name, m.group(0)) <NEW_LINE> return profile | Detect a linux kernel from its banner text. | 6259903ddc8b845886d547d8 |
class ReportModelSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> def check_job_answers(self, job): <NEW_LINE> <INDENT> questions = job.questions.all() <NEW_LINE> answers_list = [] <NEW_LINE> request = self.context["request"] <NEW_LINE> for question in questions: <NEW_LINE> <INDENT> category = question.category <NEW_LINE> field_name = "question_{}".format(question.id) <NEW_LINE> field_value = request.data.get(field_name) <NEW_LINE> if not field_value: <NEW_LINE> <INDENT> raise serializers.ValidationError("问题(ID:{}):{},回答未填写".format(question.id, question.title)) <NEW_LINE> <DEDENT> if question.is_unique: <NEW_LINE> <INDENT> if category == "text": <NEW_LINE> <INDENT> answer_exist = Answer.objects.filter(question=question, answer=field_value).first() <NEW_LINE> if answer_exist: <NEW_LINE> <INDENT> raise serializers.ValidationError( "问题(ID:{}):{},回答值需唯一!值({})已经提交".format(question.id, question.title, field_value)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if category == "radio": <NEW_LINE> <INDENT> choice = question.choices.filter(option=field_value).first() <NEW_LINE> if not choice: <NEW_LINE> <INDENT> raise serializers.ValidationError( "问题(ID:{}):{},没有{}这个选项".format(question.id, question.title, field_value)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> answer = Answer(question=question, option=field_value, answer=choice.value) <NEW_LINE> answers_list.append(answer) <NEW_LINE> <DEDENT> <DEDENT> elif category == "checkbox": <NEW_LINE> <INDENT> option_field = field_value.join(",") <NEW_LINE> answer_field_list = [] <NEW_LINE> for v in field_value: <NEW_LINE> <INDENT> choice = question.choices.filter(option=v).first() <NEW_LINE> if not choice: <NEW_LINE> <INDENT> raise serializers.ValidationError( "问题(ID:{}):{},没有{}这个选项".format(question.id, question.title, v)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> answer_field_list.append(choice.value) <NEW_LINE> <DEDENT> <DEDENT> answer_field = answer_field_list.join(",") <NEW_LINE> answer = Answer(question=question, option=option_field, answer=answer_field) <NEW_LINE> answers_list.append(answer) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> answer = Answer(question=question, answer=field_value) <NEW_LINE> answers_list.append(answer) <NEW_LINE> <DEDENT> <DEDENT> return answers_list <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> job = validated_data["job"] <NEW_LINE> request = self.context["request"] <NEW_LINE> if 'HTTP_X_FORWARDED_FOR' in request.META: <NEW_LINE> <INDENT> ip = request.META['HTTP_X_FORWARDED_FOR'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ip = request.META['REMOTE_ADDR'] <NEW_LINE> <DEDENT> validated_data["ip"] = ip <NEW_LINE> user = request.user <NEW_LINE> if job.is_authenticated: <NEW_LINE> <INDENT> if not user.is_authenticated: <NEW_LINE> <INDENT> raise serializers.ValidationError("需要登录才可以回答") <NEW_LINE> <DEDENT> validated_data["user"] = user <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if user.is_authenticated: <NEW_LINE> <INDENT> validated_data["user"] = user <NEW_LINE> <DEDENT> <DEDENT> answers = self.check_job_answers(job=job) <NEW_LINE> instance = super().create(validated_data=validated_data) <NEW_LINE> for answer in answers: <NEW_LINE> <INDENT> answer.save() <NEW_LINE> instance.answers.add(answer) <NEW_LINE> <DEDENT> return instance <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Report <NEW_LINE> fields = ("id", "job", "user", "ip", "time_added", "answers") | Report Model Serializer | 6259903dd10714528d69ef9d |
class BroadcastServerFactory(WebSocketServerFactory): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> WebSocketServerFactory.__init__(self, url) <NEW_LINE> self.clients = [] <NEW_LINE> self.tickcount = 0 <NEW_LINE> self.tick() <NEW_LINE> <DEDENT> def tick(self): <NEW_LINE> <INDENT> self.tickcount += 1 <NEW_LINE> self.broadcast("tick %d from server" % self.tickcount) <NEW_LINE> reactor.callLater(1, self.tick) <NEW_LINE> <DEDENT> def register(self, client): <NEW_LINE> <INDENT> if client not in self.clients: <NEW_LINE> <INDENT> print("registered client {}".format(client.peer)) <NEW_LINE> self.clients.append(client) <NEW_LINE> <DEDENT> <DEDENT> def unregister(self, client): <NEW_LINE> <INDENT> if client in self.clients: <NEW_LINE> <INDENT> print("unregistered client {}".format(client.peer)) <NEW_LINE> self.clients.remove(client) <NEW_LINE> <DEDENT> <DEDENT> def broadcast(self, msg): <NEW_LINE> <INDENT> print("broadcasting message '{}' ..".format(msg)) <NEW_LINE> for c in self.clients: <NEW_LINE> <INDENT> c.sendMessage(msg.encode('utf8')) <NEW_LINE> print("message sent to {}".format(c.peer)) | Simple broadcast server broadcasting any message it receives to all
currently connected clients. | 6259903d6fece00bbacccbd2 |
class Employee (models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=256) <NEW_LINE> address = models.CharField(max_length=256) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name + ' @' + self.address | A simple Person model | 6259903dd6c5a102081e3349 |
class Client(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100, default='') <NEW_LINE> email = models.EmailField(max_length=70) <NEW_LINE> phone_number = models.TextField(max_length=9, blank=True) <NEW_LINE> newsletter_agreement = models.BooleanField(default=False, blank=True) <NEW_LINE> signup_agreement = models.BooleanField(default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{self.name.title()} {self.email}' | Client/Customer class - atm created after filling signing up for training | 6259903d07f4c71912bb0656 |
class TestCustomersApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = swagger_client.api.customers_api.CustomersApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_customer_by_id(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_customers(self): <NEW_LINE> <INDENT> pass | CustomersApi unit test stubs | 6259903d23849d37ff8522dd |
class UnbindIpsFromNatGatewayResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TaskId = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TaskId = params.get("TaskId") <NEW_LINE> self.RequestId = params.get("RequestId") | UnbindIpsFromNatGateway返回参数结构体
| 6259903d30c21e258be99a30 |
class CreatedByBaseAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> readonly_fields = ('created_by', 'created_date') <NEW_LINE> def save_formset(self, request, form, formset, change): <NEW_LINE> <INDENT> instances = formset.save(commit=False) <NEW_LINE> for instance in instances: <NEW_LINE> <INDENT> if not change: <NEW_LINE> <INDENT> instance.created_by = request.user <NEW_LINE> instance.save() <NEW_LINE> <DEDENT> <DEDENT> formset.save() <NEW_LINE> <DEDENT> def save_model(self, request, obj, form, change): <NEW_LINE> <INDENT> if not change: <NEW_LINE> <INDENT> obj.created_by = request.user <NEW_LINE> <DEDENT> obj.save() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> abstract = True | Base class for handling created by stuff | 6259903d24f1403a926861df |
class Transf_gen(distributions.rv_continuous): <NEW_LINE> <INDENT> def __init__(self, kls, func, funcinv, *args, **kwargs): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.funcinv = funcinv <NEW_LINE> self.numargs = kwargs.pop('numargs', 0) <NEW_LINE> name = kwargs.pop('name','transfdist') <NEW_LINE> longname = kwargs.pop('longname','Non-linear transformed distribution') <NEW_LINE> extradoc = kwargs.pop('extradoc',None) <NEW_LINE> a = kwargs.pop('a', -np.inf) <NEW_LINE> b = kwargs.pop('b', np.inf) <NEW_LINE> self.decr = kwargs.pop('decr', False) <NEW_LINE> self.u_args, self.u_kwargs = get_u_argskwargs(**kwargs) <NEW_LINE> self.kls = kls <NEW_LINE> super(Transf_gen,self).__init__(a=a, b=b, name = name, longname = longname, extradoc = extradoc) <NEW_LINE> <DEDENT> def _rvs(self, *args, **kwargs): <NEW_LINE> <INDENT> self.kls._size = self._size <NEW_LINE> return self.funcinv(self.kls._rvs(*args)) <NEW_LINE> <DEDENT> def _cdf(self,x,*args, **kwargs): <NEW_LINE> <INDENT> if not self.decr: <NEW_LINE> <INDENT> return self.kls._cdf(self.funcinv(x),*args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1.0 - self.kls._cdf(self.funcinv(x),*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def _ppf(self, q, *args, **kwargs): <NEW_LINE> <INDENT> if not self.decr: <NEW_LINE> <INDENT> return self.func(self.kls._ppf(q,*args, **kwargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.func(self.kls._ppf(1-q,*args, **kwargs)) | a class for non-linear monotonic transformation of a continuous random variable
| 6259903d63f4b57ef0086686 |
class TextPairParser(CorpusParser): <NEW_LINE> <INDENT> class Class(SamplePairClass): <NEW_LINE> <INDENT> UNSPECIFIED = -1 <NEW_LINE> DIFFERENT_AUTHORS = 0 <NEW_LINE> SAME_AUTHOR = 1 <NEW_LINE> <DEDENT> def __init__(self, chunk_tokenizer: Tokenizer, corpus_path: str = None): <NEW_LINE> <INDENT> super().__init__(chunk_tokenizer, corpus_path) <NEW_LINE> self._input_files = {} <NEW_LINE> self._input_authors = {} <NEW_LINE> self._is_prepared = False <NEW_LINE> <DEDENT> async def _prepare(self): <NEW_LINE> <INDENT> if self._is_prepared: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not os.path.isdir(self.corpus_path): <NEW_LINE> <INDENT> raise IOError("Corpus '{}' not found".format(self.corpus_path)) <NEW_LINE> <DEDENT> dirs = os.listdir(self.corpus_path) <NEW_LINE> for d in dirs: <NEW_LINE> <INDENT> dir_path = os.path.realpath(os.path.join(self.corpus_path, d)) <NEW_LINE> if not os.path.isdir(dir_path): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> files = sorted(os.listdir(dir_path)) <NEW_LINE> for f in files: <NEW_LINE> <INDENT> file_path = os.path.realpath(os.path.join(dir_path, f)) <NEW_LINE> if not os.path.isfile(file_path) or not f.endswith(".txt"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self._input_files[file_path] = d <NEW_LINE> if d not in self._input_authors: <NEW_LINE> <INDENT> self._input_authors[d] = [] <NEW_LINE> <DEDENT> self._input_authors[d].append(file_path) <NEW_LINE> <DEDENT> <DEDENT> self._is_prepared = True <NEW_LINE> <DEDENT> async def __aiter__(self) -> AsyncGenerator[SamplePair, None]: <NEW_LINE> <INDENT> await self._prepare() <NEW_LINE> num_combinations = math.factorial(len(self._input_files)) // 2 // math.factorial(len(self._input_files) - 2) <NEW_LINE> pair_num = 0 <NEW_LINE> for f1, f2 in combinations(self._input_files.keys(), 2): <NEW_LINE> <INDENT> f1_contents = await self.await_file(f1) <NEW_LINE> f2_contents = await self.await_file(f2) <NEW_LINE> cls = self.Class.SAME_AUTHOR if self._input_files[f1] == self._input_files[f2] else self.Class.DIFFERENT_AUTHORS <NEW_LINE> pair = SamplePairImpl(cls, self.chunk_tokenizer) <NEW_LINE> await pair.chunk([f1_contents], [f2_contents]) <NEW_LINE> group_id = PairBuildingProgressEvent.generate_group_id(["a:" + f1] + ["b:" + f2]) <NEW_LINE> await EventBroadcaster().publish("onPairGenerated", PairBuildingProgressEvent(group_id, pair_num, num_combinations, pair, [f1], [f2]), self.__class__) <NEW_LINE> yield pair <NEW_LINE> pair_num += 1 | Parser for generating all possible combinations of text pairs and labeling them according
to whether they were written by the same author or not.
Expects a directory structure where there is one folder for each author containing
at least two samples of their work.
Example:
+ Ernest_Hemingway
|__ + The_Torrents_of_Spring.txt
|__ + Islands_in_the_Stream.txt
|__ + The_Garden_of_Eden.txt
+ William_Faulkner
|__ + Soldier's_Pay.txt
|__ + Light_in_August.txt
File and folder names can be chosen arbitrarily, but the book sample files must end in .txt.
Events published by this class:
* `onPairGenerated`: [type PairBuildingProgressEvent]
fired when a pair has been generated | 6259903d94891a1f408ba009 |
class Model(model.Model): <NEW_LINE> <INDENT> def __init__(self, graph=None, embedding_size=1024): <NEW_LINE> <INDENT> super(Model, self).__init__(graph=graph, embedding_size=embedding_size) <NEW_LINE> <DEDENT> def make_embedding(self, x): <NEW_LINE> <INDENT> with self.graph.as_default(): <NEW_LINE> <INDENT> x = players.Convolution1D(128, 5, border_mode='valid', subsample_length=2, activation='relu')(x) <NEW_LINE> x = players.Convolution1D(512, 5, border_mode='valid', subsample_length=2, activation='relu')(x) <NEW_LINE> x = players.Convolution1D(self.embedding_size, 5, border_mode='valid', activation='relu')(x) <NEW_LINE> x = tf.reduce_max(x, reduction_indices=1) <NEW_LINE> <DEDENT> return x <NEW_LINE> <DEDENT> def conjecture_embedding(self, conjectures): <NEW_LINE> <INDENT> return self.make_embedding(conjectures) <NEW_LINE> <DEDENT> def axiom_embedding(self, axioms): <NEW_LINE> <INDENT> return self.make_embedding(axioms) <NEW_LINE> <DEDENT> def classifier(self, conjecture_embedding, axiom_embedding): <NEW_LINE> <INDENT> with self.graph.as_default(): <NEW_LINE> <INDENT> net = tf.concat((conjecture_embedding, axiom_embedding), 1) <NEW_LINE> net = layers.relu(net, 1024) <NEW_LINE> logits = layers.linear(net, 2) <NEW_LINE> <DEDENT> return logits | Convolutional model for word embedded input. | 6259903d21a7993f00c67192 |
class AttrDict(dict): <NEW_LINE> <INDENT> def __init__(self, init={}): <NEW_LINE> <INDENT> dict.__init__(self, init) <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.__dict__.items() <NEW_LINE> <DEDENT> def __setstate__(self, items): <NEW_LINE> <INDENT> for key, val in items: <NEW_LINE> <INDENT> self.__dict__[key] = val <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%s)" % (self.__class__.__name__, dict.__repr__(self)) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> return super(AttrDict, self).__setitem__(key, value) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return super(AttrDict, self).__getitem__(name) <NEW_LINE> <DEDENT> def __delitem__(self, name): <NEW_LINE> <INDENT> return super(AttrDict, self).__delitem__(name) <NEW_LINE> <DEDENT> __getattr__ = __getitem__ <NEW_LINE> __setattr__ = __setitem__ <NEW_LINE> def copy(self): <NEW_LINE> <INDENT> ch = AttrDict(self) <NEW_LINE> return ch | A dictionary with attribute-style access.
It maps attribute access to the real dictionary.
Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/473786 | 6259903ddc8b845886d547da |
class PrintTagsPrimary(Primary): <NEW_LINE> <INDENT> def __call__(self, context): <NEW_LINE> <INDENT> path = context['path'] <NEW_LINE> tag_name = context.get('args', None) <NEW_LINE> verbosity = context.get('verbosity', 0) <NEW_LINE> try: <NEW_LINE> <INDENT> m = context['metadata'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> m = read_exif(path, verbosity) <NEW_LINE> context['exif.metadata'] = m <NEW_LINE> <DEDENT> if m is None: <NEW_LINE> <INDENT> return context <NEW_LINE> <DEDENT> if tag_name is not None: <NEW_LINE> <INDENT> tags = [(k, m[k].raw_value) for k in fnmatch.filter(m.exif_keys, tag_name)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tags = [(k, m[k].raw_value) for k in m.exif_keys] <NEW_LINE> <DEDENT> pairs = ["%(k)s: %(v)s" % {'k': k, 'v': v} for k, v in tags] <NEW_LINE> context['buffer'].append('\n'.join(pairs)) <NEW_LINE> return context | Print all tags available in an image
Always return context | 6259903d23e79379d538d724 |
class LikeAPIView(LikeDislikeMixin): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated,) <NEW_LINE> def post(self, request, **kwargs): <NEW_LINE> <INDENT> article = self.get_object() <NEW_LINE> article.like(request.user) <NEW_LINE> return Response( self.get_response('You like this article.'), status=status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> def delete(self, request, **kwargs): <NEW_LINE> <INDENT> article = self.get_object() <NEW_LINE> article.un_like(request.user) <NEW_LINE> return Response( self.get_response('You no longer like this article.'), status=status.HTTP_200_OK) | This view enables liking and un-liking articles. | 6259903db57a9660fecd2ca0 |
class ResolveDomainResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Data = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Data = params.get("Data") <NEW_LINE> self.RequestId = params.get("RequestId") | ResolveDomain返回参数结构体
| 6259903d379a373c97d9a24e |
@method_decorator(csrf_exempt, name="dispatch") <NEW_LINE> class WebHook(View): <NEW_LINE> <INDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> body = smart_str(request.body) <NEW_LINE> data = json.loads(body) <NEW_LINE> if data['id'] == TEST_EVENT_ID: <NEW_LINE> <INDENT> logger.info("Test webhook received: {}".format(data['type'])) <NEW_LINE> return HttpResponse() <NEW_LINE> <DEDENT> if Event.stripe_objects.exists_by_json(data): <NEW_LINE> <INDENT> EventProcessingException.objects.create( data=data, message="Duplicate event record", traceback="" ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> event = Event._create_from_stripe_object(data, save=False) <NEW_LINE> event.validate() <NEW_LINE> if djstripe_settings.WEBHOOK_EVENT_CALLBACK: <NEW_LINE> <INDENT> djstripe_settings.WEBHOOK_EVENT_CALLBACK(event) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> event.process() | A view used to handle webhooks. | 6259903d596a897236128ec3 |
class TestPaymentsSurcharge(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return PaymentsSurcharge( id = '09f0b4cc-7880-11e9-8f9e-2a86e4085a59', value = null ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return PaymentsSurcharge( ) <NEW_LINE> <DEDENT> <DEDENT> def testPaymentsSurcharge(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | PaymentsSurcharge unit test stubs | 6259903d24f1403a926861e0 |
class ETradeAlerts(object): <NEW_LINE> <INDENT> def __init__( self, client_key, client_secret, resource_owner_key, resource_owner_secret, dev=True, ): <NEW_LINE> <INDENT> self.client_key = client_key <NEW_LINE> self.client_secret = client_secret <NEW_LINE> self.resource_owner_key = resource_owner_key <NEW_LINE> self.resource_owner_secret = resource_owner_secret <NEW_LINE> suffix = "apisb" if dev else "api" <NEW_LINE> self.base_url = r"https://%s.etrade.com/v1/user/alerts" % suffix <NEW_LINE> self.session = OAuth1Session( self.client_key, self.client_secret, self.resource_owner_key, self.resource_owner_secret, signature_type="AUTH_HEADER", ) <NEW_LINE> <DEDENT> def list_alerts(self, resp_format="xml") -> dict: <NEW_LINE> <INDENT> api_url = "%s%s" % (self.base_url, ".json" if resp_format == "json" else "",) <NEW_LINE> LOGGER.debug(api_url) <NEW_LINE> req = self.session.get(api_url) <NEW_LINE> req.raise_for_status() <NEW_LINE> LOGGER.debug(req.text) <NEW_LINE> return xmltodict.parse(req.text) if resp_format.lower() == "xml" else req.json() <NEW_LINE> <DEDENT> def list_alert_details(self, alert_id, resp_format="xml") -> dict: <NEW_LINE> <INDENT> api_url = "%s%s/%s" % ( self.base_url, ".json" if resp_format == "json" else "", alert_id, ) <NEW_LINE> LOGGER.debug(api_url) <NEW_LINE> req = self.session.get(api_url) <NEW_LINE> req.raise_for_status() <NEW_LINE> LOGGER.debug(req.text) <NEW_LINE> return xmltodict.parse(req.text) if resp_format.lower() == "xml" else req.json() <NEW_LINE> <DEDENT> def delete_alert(self, alert_id, resp_format="xml"): <NEW_LINE> <INDENT> api_url = "%s%s/%s" % ( self.base_url, ".json" if resp_format == "json" else "", alert_id, ) <NEW_LINE> LOGGER.debug(api_url) <NEW_LINE> req = self.session.delete(api_url) <NEW_LINE> req.raise_for_status() <NEW_LINE> LOGGER.debug(req.text) <NEW_LINE> return xmltodict.parse(req.text) if resp_format.lower() == "xml" else req.json() | :description: Object to retrieve alerts
:param client_key: Client key provided by Etrade
:type client_key: str, required
:param client_secret: Client secret provided by Etrade
:type client_secret: str, required
:param resource_owner_key: Resource key from :class:`pyetrade.authorization.ETradeOAuth`
:type resource_owner_key: str, required
:param resource_owner_secret: Resource secret from
:class:`pyetrade.authorization.ETradeOAuth`
:type resource_owner_secret: str, required
:param dev: Defines Sandbox (True) or Live (False) ETrade, defaults to True
:type dev: bool, optional
:EtradeRef: https://apisb.etrade.com/docs/api/user/api-alert-v1.html | 6259903d287bf620b6272e12 |
class EnemiesFactory: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._enemies = {} <NEW_LINE> <DEDENT> def get_enemy(self, enemy_type, power): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> enemy = self._enemies[enemy_type] <NEW_LINE> enemy.set_power(power) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if enemy_type == EnemyTypes.vampire: <NEW_LINE> <INDENT> enemy = Vampire(power) <NEW_LINE> <DEDENT> elif enemy_type == EnemyTypes.zombie: <NEW_LINE> <INDENT> enemy = Zombie(power) <NEW_LINE> <DEDENT> self._enemies[enemy_type] = enemy <NEW_LINE> <DEDENT> return enemy | Generates enemies with different powers | 6259903db5575c28eb7135dc |
class AttemptEvent(models.Model): <NEW_LINE> <INDENT> time_created = models.DateTimeField(auto_now_add=True) <NEW_LINE> username = models.CharField(max_length=USERNAME_MAX_LENGTH) <NEW_LINE> ip_address = models.GenericIPAddressField(max_length=30) <NEW_LINE> user_agent = models.CharField(max_length=200, null=True, blank=True) <NEW_LINE> result = models.CharField(max_length=4, choices=AUTHENTICATION_RESULT) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Authentication attempt' <NEW_LINE> verbose_name_plural = 'Authentication attempts' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[%s] Authentication attempt from \"%s\" using username \"%s\"" % (self.time_created, self.ip_address, self.username) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_request(request): <NEW_LINE> <INDENT> from .util import get_user_ip_address <NEW_LINE> attempt_event = AttemptEvent() <NEW_LINE> if 'HTTP_USER_AGENT' in request.META: <NEW_LINE> <INDENT> attempt_event.user_agent = request.META['HTTP_USER_AGENT'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attempt_event.user_agent = None <NEW_LINE> <DEDENT> attempt_event.ip_address = get_user_ip_address(request) <NEW_LINE> return attempt_event | Authentication attempt events are created when credentials are validated through
the authentication system. | 6259903d21a7993f00c67194 |
class LCApplication(LCUtilityApplication): <NEW_LINE> <INDENT> def __init__(self, paramdict = None): <NEW_LINE> <INDENT> super(LCApplication, self).__init__(paramdict) <NEW_LINE> self.steeringFileVersion = "" <NEW_LINE> self.forgetAboutInput = False <NEW_LINE> self._importLocation = "ILCDIRAC.Workflow.Modules" <NEW_LINE> <DEDENT> def setSteeringFileVersion(self, version): <NEW_LINE> <INDENT> self.steeringFileVersion = version <NEW_LINE> return S_OK() <NEW_LINE> <DEDENT> def setForgetAboutInput(self, flag = True): <NEW_LINE> <INDENT> self.forgetAboutInput = flag <NEW_LINE> return S_OK() <NEW_LINE> <DEDENT> def _getSpecificAppParameters(self, stepdef): <NEW_LINE> <INDENT> stepdef.addParameter(Parameter("ForgetInput", False, "boolean", "", "", False, False, "Do not overwrite input steering")) <NEW_LINE> if self.steeringFileVersion: <NEW_LINE> <INDENT> stepdef.addParameter(Parameter("SteeringFileVers", "", "string", "", "", False, False, "SteeringFile version to use")) <NEW_LINE> <DEDENT> return S_OK() <NEW_LINE> <DEDENT> def _setSpecificAppParameters(self, stepinst): <NEW_LINE> <INDENT> stepinst.setValue( "ForgetInput", self.forgetAboutInput) <NEW_LINE> if self.steeringFileVersion: <NEW_LINE> <INDENT> stepinst.setValue("SteeringFileVers", self.steeringFileVersion) <NEW_LINE> <DEDENT> return S_OK() <NEW_LINE> <DEDENT> def _doSomethingWithJob(self): <NEW_LINE> <INDENT> if self.steeringFileVersion: <NEW_LINE> <INDENT> self._job._addSoftware( "steeringfiles", self.steeringFileVersion ) <NEW_LINE> <DEDENT> return S_OK() | LC specific implementation of the applications
| 6259903d004d5f362081f8f8 |
class EarlyStoppingMin(EarlyStopping): <NEW_LINE> <INDENT> def __init__(self, min_epochs=0, max_epochs=None, **kwargs): <NEW_LINE> <INDENT> super(EarlyStoppingMin, self).__init__(**kwargs) <NEW_LINE> if not isinstance(min_epochs, int) or min_epochs < 0: <NEW_LINE> <INDENT> raise ValueError('min_epochs must be an integer >= 0') <NEW_LINE> <DEDENT> self.min_epochs = int(min_epochs) <NEW_LINE> self.max_epochs = int(max_epochs) if max_epochs is not None else None <NEW_LINE> <DEDENT> def on_epoch_end(self, epoch, logs=None): <NEW_LINE> <INDENT> if epoch < self.min_epochs: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> current = self.get_monitor_value(logs) <NEW_LINE> if current is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.monitor_op(current - self.min_delta, self.best): <NEW_LINE> <INDENT> self.best = current <NEW_LINE> self.wait = 0 <NEW_LINE> if self.restore_best_weights: <NEW_LINE> <INDENT> self.best_weights = self.model.get_weights() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.wait += 1 <NEW_LINE> if self.wait >= self.patience: <NEW_LINE> <INDENT> self.stopped_epoch = epoch <NEW_LINE> self.model.stop_training = True <NEW_LINE> if self.restore_best_weights: <NEW_LINE> <INDENT> if self.verbose > 0: <NEW_LINE> <INDENT> print('Restoring model weights from the end of ' 'the best epoch') <NEW_LINE> <DEDENT> self.model.set_weights(self.best_weights) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self.max_epochs is not None and epoch >= self.max_epochs: <NEW_LINE> <INDENT> self.stopped_epoch = epoch <NEW_LINE> self.model.stop_training = True <NEW_LINE> if self.restore_best_weights: <NEW_LINE> <INDENT> if self.verbose > 0: <NEW_LINE> <INDENT> print('Maximum epochs reached; restoring model weights from the end of ' 'the best epoch') <NEW_LINE> <DEDENT> self.model.set_weights(self.best_weights) <NEW_LINE> <DEDENT> <DEDENT> if self.verbose > 1: <NEW_LINE> <INDENT> print('EarlyStoppingMin: %d epochs since last minimum' % self.wait) | Extends the keras.callbacks.EarlyStopping class to provide the option to force training for a minimum number of
epochs or restore the best weights after the maximum epochs have been reached. | 6259903dbe383301e0254a3e |
class Chrome(DumperTemplate): <NEW_LINE> <INDENT> procnames = ["chrome.exe"] <NEW_LINE> def run(self): <NEW_LINE> <INDENT> self.commands('chromehistory', 'chromevisits', 'chromedownloadchains', 'chromesearchterms', 'chromedownloads', 'chromecookies', options="--output=csv", failmode="warn") | Dumper for the common application Google Chrome. | 6259903db57a9660fecd2ca2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.