code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class TestSubmissionOrder(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testSubmissionOrder(self): <NEW_LINE> <INDENT> pass
SubmissionOrder unit test stubs
62599084fff4ab517ebcf352
class DescribeStackRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(DescribeStackRequest, self).__init__( '/regions/{regionId}/stacks/{stackId}', 'GET', header, version) <NEW_LINE> self.parameters = parameters
查询资源栈详情
62599084a8370b77170f1f08
class TimeStampedModel(models.Model): <NEW_LINE> <INDENT> date_created = AutoCreatedField(_('created')) <NEW_LINE> date_modified = AutoLastModifiedField(_('modified')) <NEW_LINE> is_valid = models.BooleanField(_('is_valid'), default=True) <NEW_LINE> is_private = models.BooleanField(_('is_private')) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def natural_key(self): <NEW_LINE> <INDENT> return self.id <NEW_LINE> <DEDENT> def display_date(self): <NEW_LINE> <INDENT> return localtime(self.date_created) <NEW_LINE> <DEDENT> display_date.short_description = _('Created Time')
An abstract base class model that provides self-updating ``created`` and ``modified`` fields.
625990847c178a314d78e988
class FlipbookPagePolyLineAnnotator(FlipbookPageAnnotator): <NEW_LINE> <INDENT> TYPE_FIELD_CLASSES = FlipbookPageAnnotator.TYPE_FIELD_CLASSES.copy() <NEW_LINE> TYPE_FIELD_CLASSES[PolyLinePointPicker.POINT_LIST_TYPE] = _PointListField <NEW_LINE> def closeEvent(self, e): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del self.fields <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass
Ex: from ris_widget.ris_widget import RisWidget from ris_widget.examples.flipbook_page_poly_line_annotator import PolyLinePointPicker, FlipbookPagePolyLineAnnotator import numpy rw = RisWidget() xr = numpy.linspace(0, 2*numpy.pi, 65536, True) xg = xr + 2*numpy.pi/3 xb = xr + 4*numpy.pi/3 im = (((numpy.dstack(list(map(numpy.sin, (xr, xg, xb)))) + 1) / 2) * 65535).astype(numpy.uint16) rw.flipbook_pages.append(im.swapaxes(0,1).reshape(256,256,3)) fpa = FlipbookPagePolyLineAnnotator( rw.flipbook, 'annotation', ( ('foo', str, 'default_text'), ('bar', int, -11, -20, 35), ('baz', float, -1.1, -1000, 1101.111), ('choice', tuple, 'za', list('aaaa basd casder eadf ZZza aasdfer lo ad bas za e12 1'.split())), ('toggle', bool, False), ('line_points', PolyLinePointPicker.POINT_LIST_TYPE, [(10,100),(100,10)], rw.main_scene.layer_stack_item, rw.main_view) ) ) fpa.show()
625990843346ee7daa338400
class BinaryExpr(Expr): <NEW_LINE> <INDENT> def __init__(self, e1, e2): <NEW_LINE> <INDENT> self.e1 = e1 <NEW_LINE> self.e2 = e2 <NEW_LINE> <DEDENT> def Eval(self, context): <NEW_LINE> <INDENT> evaled_e1 = self.e1.Eval(context) <NEW_LINE> evaled_e2 = self.e2.Eval(context) <NEW_LINE> method_name = self.__class__.method_name <NEW_LINE> return evaled_e1.InvokeMethod(method_name, [evaled_e2])
A general expression for binary operations like +, *, etc. This class should be extended with a class field 'method_name' containing the name of the attribute that should be called.
62599084099cdd3c63676198
class NewTicketForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Ticket <NEW_LINE> fields = ('subject', 'description', 'fk_status', 'fk_priority', 'fk_requester', 'fk_agent', 'fk_attachments')
New ticket form
62599084bf627c535bcb300f
class Usuario(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> nombre = models.CharField(max_length=60, blank=True) <NEW_LINE> apellido = models.CharField(max_length=100, blank=True) <NEW_LINE> tipo = models.PositiveSmallIntegerField(default=0) <NEW_LINE> genero_codigo = models.BooleanField(default=False) <NEW_LINE> codigo_gen = models.IntegerField(null=True, blank=True) <NEW_LINE> fecha_gen = models.DateField(null=True, blank=True) <NEW_LINE> fecha_nacimiento = models.DateField(null=True, blank=True) <NEW_LINE> last_login = models.DateTimeField(null=True, blank=True) <NEW_LINE> token = models.CharField(max_length=255, null=True, blank=True) <NEW_LINE> password = models.CharField(max_length=128) <NEW_LINE> cia = models.ForeignKey('Cia', related_name='cias', null=True, blank=True, on_delete=models.PROTECT) <NEW_LINE> tz = models.CharField(max_length=40, blank=True) <NEW_LINE> objects = UsuarioManager() <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = ['name'] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = "usuario" <NEW_LINE> <DEDENT> def get_full_name(self): <NEW_LINE> <INDENT> return self.nombre + ' ' + self.apellido <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email
A user profile in our system.
625990843317a56b869bf2e2
class CreatePort(CreateCommand): <NEW_LINE> <INDENT> resource = 'port' <NEW_LINE> log = logging.getLogger(__name__ + '.CreatePort') <NEW_LINE> def add_known_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( '--name', help='name of this port') <NEW_LINE> parser.add_argument( '--admin-state-down', default=True, action='store_false', help='set admin state up to false') <NEW_LINE> parser.add_argument( '--admin_state_down', action='store_false', help=argparse.SUPPRESS) <NEW_LINE> parser.add_argument( '--mac-address', help='mac address of this port') <NEW_LINE> parser.add_argument( '--mac_address', help=argparse.SUPPRESS) <NEW_LINE> parser.add_argument( '--device-id', help='device id of this port') <NEW_LINE> parser.add_argument( '--device_id', help=argparse.SUPPRESS) <NEW_LINE> parser.add_argument( '--fixed-ip', action='append', help='desired IP for this port: ' 'subnet_id=<name_or_id>,ip_address=<ip>, ' '(This option can be repeated.)') <NEW_LINE> parser.add_argument( '--fixed_ip', action='append', help=argparse.SUPPRESS) <NEW_LINE> parser.add_argument( '--security-group', metavar='SECURITY_GROUP', default=[], action='append', dest='security_groups', help='security group associated with the port ' '(This option can be repeated)') <NEW_LINE> parser.add_argument( 'network_id', metavar='NETWORK', help='Network id or name this port belongs to') <NEW_LINE> <DEDENT> def args2body(self, parsed_args): <NEW_LINE> <INDENT> _network_id = quantumv20.find_resourceid_by_name_or_id( self.get_client(), 'network', parsed_args.network_id) <NEW_LINE> body = {'port': {'admin_state_up': parsed_args.admin_state_down, 'network_id': _network_id, }, } <NEW_LINE> if parsed_args.mac_address: <NEW_LINE> <INDENT> body['port'].update({'mac_address': parsed_args.mac_address}) <NEW_LINE> <DEDENT> if parsed_args.device_id: <NEW_LINE> <INDENT> body['port'].update({'device_id': parsed_args.device_id}) <NEW_LINE> <DEDENT> if parsed_args.tenant_id: <NEW_LINE> <INDENT> body['port'].update({'tenant_id': parsed_args.tenant_id}) <NEW_LINE> <DEDENT> if parsed_args.name: <NEW_LINE> <INDENT> body['port'].update({'name': parsed_args.name}) <NEW_LINE> <DEDENT> ips = [] <NEW_LINE> if parsed_args.fixed_ip: <NEW_LINE> <INDENT> for ip_spec in parsed_args.fixed_ip: <NEW_LINE> <INDENT> ip_dict = utils.str2dict(ip_spec) <NEW_LINE> if 'subnet_id' in ip_dict: <NEW_LINE> <INDENT> subnet_name_id = ip_dict['subnet_id'] <NEW_LINE> _subnet_id = quantumv20.find_resourceid_by_name_or_id( self.get_client(), 'subnet', subnet_name_id) <NEW_LINE> ip_dict['subnet_id'] = _subnet_id <NEW_LINE> <DEDENT> ips.append(ip_dict) <NEW_LINE> <DEDENT> <DEDENT> if ips: <NEW_LINE> <INDENT> body['port'].update({'fixed_ips': ips}) <NEW_LINE> <DEDENT> _sgids = [] <NEW_LINE> for sg in parsed_args.security_groups: <NEW_LINE> <INDENT> _sgids.append(quantumv20.find_resourceid_by_name_or_id( self.get_client(), 'security_group', sg)) <NEW_LINE> <DEDENT> if _sgids: <NEW_LINE> <INDENT> body['port']['security_groups'] = _sgids <NEW_LINE> <DEDENT> return body
Create a port for a given tenant.
625990845fdd1c0f98e5fabd
class Profile(object): <NEW_LINE> <INDENT> def __init__(self, name='New Profile', path=None, current=False): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.tracks = [] <NEW_LINE> self.path = path <NEW_LINE> self.to_delete = False <NEW_LINE> <DEDENT> def new_track(self, options): <NEW_LINE> <INDENT> new = Track(options) <NEW_LINE> return self.add_track(new) <NEW_LINE> <DEDENT> def add_track(self, track): <NEW_LINE> <INDENT> self.tracks.append(track) <NEW_LINE> return track <NEW_LINE> <DEDENT> def remove_track(self, track): <NEW_LINE> <INDENT> self.tracks.remove(track) <NEW_LINE> return track <NEW_LINE> <DEDENT> def reorder_tracks(self, order=[]): <NEW_LINE> <INDENT> new_order = [] <NEW_LINE> for index in range(len(order)): <NEW_LINE> <INDENT> new_order.append(self.tracks(order[index]).copy()) <NEW_LINE> <DEDENT> for track in self.tracks: <NEW_LINE> <INDENT> if self.tracks.index(track) not in order: <NEW_LINE> <INDENT> new_order.append(track.copy()) <NEW_LINE> <DEDENT> <DEDENT> self.tracks = new_order <NEW_LINE> <DEDENT> def set_path(self, path): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> <DEDENT> def import_tracks_from_parser(self, parser): <NEW_LINE> <INDENT> for section in parser.sections(): <NEW_LINE> <INDENT> if section.count('track'): <NEW_LINE> <INDENT> self.tracks.append(Track(parser.items(section))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def import_from_file(self, filepath): <NEW_LINE> <INDENT> parser = ConfigParser.ConfigParser() <NEW_LINE> parser.read(filepath) <NEW_LINE> self.name = parser.get('data','name') <NEW_LINE> self.path = filepath <NEW_LINE> self.import_tracks_from_parser(parser) <NEW_LINE> <DEDENT> def export_to_file(self, filepath=None): <NEW_LINE> <INDENT> if not filepath: <NEW_LINE> <INDENT> filepath = self.path <NEW_LINE> <DEDENT> parser = ConfigParser.ConfigParser() <NEW_LINE> parser.add_section('data') <NEW_LINE> parser.set('data','name',self.name) <NEW_LINE> index = 1 <NEW_LINE> for track in self.tracks: <NEW_LINE> <INDENT> section = 'track'+str(index) <NEW_LINE> parser.add_section(section) <NEW_LINE> for key in track.itemlist: <NEW_LINE> <INDENT> parser.set(section,key,track[key]) <NEW_LINE> <DEDENT> index+=1 <NEW_LINE> <DEDENT> configfile = open(filepath, 'wb') <NEW_LINE> parser.write(configfile) <NEW_LINE> configfile.close() <NEW_LINE> <DEDENT> def get_video_areas(self): <NEW_LINE> <INDENT> areas = {} <NEW_LINE> index = 1 <NEW_LINE> for track in self.tracks: <NEW_LINE> <INDENT> if track.device not in ['pulse', 'audiotest']: <NEW_LINE> <INDENT> areas[index] = track.name <NEW_LINE> index +=1 <NEW_LINE> <DEDENT> <DEDENT> return areas
Contains the name, location and tracks of a profile Allows to import and export to files, and also import from conf Tracks can be created Other features are reordering tracks
6259908426068e7796d4e47e
class email_type(models.Model): <NEW_LINE> <INDENT> TYPE_LIST = ( ('internet', _('Internet')), ('x400', _('x400')), ('pref', _('Preferred')), ('other', _('Other IANA address type')), ) <NEW_LINE> name = models.CharField( _('Email type'), max_length=8, choices=TYPE_LIST, default='internet' ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Email Type') <NEW_LINE> verbose_name_plural = _('Email Types') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.get_name_display()
Represents a type of email in the hCard microformat. See: http://microformats.org/wiki/hcard#adr_tel_email_types Also see: http://www.ietf.org/rfc/rfc2426.txt (quoted below) Used to specify the format or preference of the electronic mail address. The TYPE parameter values can include: "internet" to indicate an Internet addressing type, "x400" to indicate a X.400 addressing type or "pref" to indicate a preferred-use email address when more than one is specified. Another IANA registered address type can also be specified. The default email type is "internet". A non-standard value can also be specified.
6259908450812a4eaa621963
class TestImageUsage(DefaultSiteTestCase): <NEW_LINE> <INDENT> cached = True <NEW_LINE> @property <NEW_LINE> def imagepage(self): <NEW_LINE> <INDENT> if hasattr(self.__class__, '_image_page'): <NEW_LINE> <INDENT> return self.__class__._image_page <NEW_LINE> <DEDENT> mysite = self.get_site() <NEW_LINE> page = pywikibot.Page(mysite, mysite.siteinfo['mainpage']) <NEW_LINE> try: <NEW_LINE> <INDENT> imagepage = next(iter(page.imagelinks())) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> raise unittest.SkipTest( 'No images on the main page of site {0!r}'.format(mysite)) <NEW_LINE> <DEDENT> pywikibot.output(u'site_tests.TestImageUsage found %s on %s' % (imagepage, page)) <NEW_LINE> self.__class__._image_page = imagepage <NEW_LINE> return imagepage <NEW_LINE> <DEDENT> def test_image_usage(self): <NEW_LINE> <INDENT> mysite = self.get_site() <NEW_LINE> imagepage = self.imagepage <NEW_LINE> iu = list(mysite.imageusage(imagepage, total=10)) <NEW_LINE> self.assertLessEqual(len(iu), 10) <NEW_LINE> self.assertTrue(all(isinstance(link, pywikibot.Page) for link in iu)) <NEW_LINE> <DEDENT> def test_image_usage_in_namespaces(self): <NEW_LINE> <INDENT> mysite = self.get_site() <NEW_LINE> imagepage = self.imagepage <NEW_LINE> for using in mysite.imageusage(imagepage, namespaces=[3, 4], total=5): <NEW_LINE> <INDENT> self.assertIsInstance(using, pywikibot.Page) <NEW_LINE> self.assertIn(imagepage, list(using.imagelinks())) <NEW_LINE> <DEDENT> <DEDENT> @allowed_failure_if(os.environ.get('TRAVIS', 'false') == 'true') <NEW_LINE> def test_image_usage_in_redirects(self): <NEW_LINE> <INDENT> mysite = self.get_site() <NEW_LINE> imagepage = self.imagepage <NEW_LINE> for using in mysite.imageusage(imagepage, filterredir=True, total=5): <NEW_LINE> <INDENT> self.assertIsInstance(using, pywikibot.Page) <NEW_LINE> self.assertTrue(using.isRedirectPage()) <NEW_LINE> <DEDENT> <DEDENT> def test_image_usage_no_redirect_filter(self): <NEW_LINE> <INDENT> mysite = self.get_site() <NEW_LINE> imagepage = self.imagepage <NEW_LINE> for using in mysite.imageusage(imagepage, filterredir=False, total=5): <NEW_LINE> <INDENT> self.assertIsInstance(using, pywikibot.Page) <NEW_LINE> if using.isRedirectPage(): <NEW_LINE> <INDENT> unittest_print( '{0} is a redirect, although just non-redirects were ' 'searched. See also T75120'.format(using)) <NEW_LINE> <DEDENT> self.assertFalse(using.isRedirectPage())
Test cases for Site.imageusage method.
62599084d8ef3951e32c8bfd
class LogEntry(): <NEW_LINE> <INDENT> __slots__ = ['_timestamp', '_key', '_node_id', '_src', '_path', ] <NEW_LINE> def __init__(self, timestamp, key, node_id, source, pathToDoc): <NEW_LINE> <INDENT> self._timestamp = timestamp <NEW_LINE> if key is None: <NEW_LINE> <INDENT> raise UpaxError('LogEntry key may not be None') <NEW_LINE> <DEDENT> hashtype = len(key) == 40 <NEW_LINE> self._key = key <NEW_LINE> if hashtype == HashTypes.SHA1: <NEW_LINE> <INDENT> check_hex_node_id_160(self._key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> check_hex_node_id_256(self._key) <NEW_LINE> <DEDENT> if node_id is None: <NEW_LINE> <INDENT> raise UpaxError('LogEntry nodeID may not be None') <NEW_LINE> <DEDENT> self._node_id = node_id <NEW_LINE> if hashtype == HashTypes.SHA1: <NEW_LINE> <INDENT> check_hex_node_id_160(self._node_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> check_hex_node_id_256(self._node_id) <NEW_LINE> <DEDENT> self._src = source <NEW_LINE> self._path = pathToDoc <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return self._key <NEW_LINE> <DEDENT> @property <NEW_LINE> def node_id(self): <NEW_LINE> <INDENT> return self._node_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> return self._path <NEW_LINE> <DEDENT> @property <NEW_LINE> def src(self): <NEW_LINE> <INDENT> return self._src <NEW_LINE> <DEDENT> @property <NEW_LINE> def timestamp(self): <NEW_LINE> <INDENT> return self._timestamp <NEW_LINE> <DEDENT> @property <NEW_LINE> def hashtype(self): <NEW_LINE> <INDENT> return len(self._key) == 40 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.hashtype == HashTypes.SHA1: <NEW_LINE> <INDENT> fmt = '%013u %40s %40s "%s" %s\n' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fmt = '%013u %64s %64s "%s" %s\n' <NEW_LINE> <DEDENT> return fmt % (self._timestamp, self._key, self._node_id, self._src, self._path) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, LogEntry) and self._timestamp == other.timestamp and self._key == other.key and self._node_id == other.node_id and self._src == other.src and self._path == other.path <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def equals(self, other): <NEW_LINE> <INDENT> return self.__eq__(other)
The entry made upon adding a file to the Upax content-keyed data store. This consists of a timestamp; an SHA content key, the hash of the contents of the file, the NodeID identifying the contributor, its source (which may be a program name, and a UNIX/POSIX path associated with the file. The path will normally be relative.
625990847c178a314d78e989
class FindSymbolSystemFunction(SystemFunction): <NEW_LINE> <INDENT> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> cls.__name__ = 'FIND-SYMBOL' <NEW_LINE> return object.__new__(cls) <NEW_LINE> <DEDENT> def __call__(self, forms, var_env, func_env, macro_env): <NEW_LINE> <INDENT> args = self.eval_forms(forms, var_env, func_env, macro_env) <NEW_LINE> symbol_designator = args.car <NEW_LINE> package_designator = args.cdr.car <NEW_LINE> if package_designator is Null(): <NEW_LINE> <INDENT> return PackageManager.find_symbol(symbol_designator=symbol_designator) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return PackageManager.find_symbol(symbol_designator=symbol_designator, package_designator=package_designator)
FindSymbol locates a symbol whose name is symbol_designator in a package. If a symbol named symbol_designator is found in package, directly or by inheritance, the symbol found is returned as the first value; the second value is as follows: :INTERNAL If the symbol is present in package as an internal symbol. :EXTERNAL If the symbol is present in package as an external symbol. :INHERITED If the symbol is inherited by package through use-package, but is not present in package. If no such symbol is accessible in package, both values are nil.
625990843346ee7daa338401
class PersonModelViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Person.objects.all() <NEW_LINE> serializer_class = PersonModelSerializer <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> filter_backends = (DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter) <NEW_LINE> search_fields = ('^name', '^last_name', '^mother_last_name') <NEW_LINE> filter_fields = ('gender', 'marital_status') <NEW_LINE> ordering_fields = ('last_name', 'mother_last_name', 'name')
API endpoint that allows users to be viewed or edited.
62599084099cdd3c63676199
class OpenStorageClusterServicer(object): <NEW_LINE> <INDENT> def InspectCurrent(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
OpenStorageCluster service provides the methods to manage the cluster
62599084be7bc26dc9252bf5
class ResearchMeasurementYearListAPIView(ListAPIView): <NEW_LINE> <INDENT> queryset = ResearchMeasurementYear.objects.all() <NEW_LINE> serializer_class = research_measurement_year_serializers['ResearchMeasurementYearListSerializer'] <NEW_LINE> filter_backends = (DjangoFilterBackend,) <NEW_LINE> filter_class = ResearchMeasurementYearListFilter <NEW_LINE> pagination_class = APILimitOffsetPagination
API list view. Gets all records API.
62599084283ffb24f3cf53df
class ObjectViewSet(ViewSet, ListObjectMixin, CreateObjectMixin, UpdateObjectMixin, DeleteObjectMixin): <NEW_LINE> <INDENT> pass
Interactions for database objects: List, Create, Update, Delete.
62599084aad79263cf4302f9
class LinearClassifier(BinaryClassifier): <NEW_LINE> <INDENT> def __init__(self, opts): <NEW_LINE> <INDENT> self.opts = opts <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.weights = 0 <NEW_LINE> <DEDENT> def online(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "w=" + repr(self.weights) <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> if type(self.weights) == int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return dot(self.weights, X) <NEW_LINE> <DEDENT> <DEDENT> def getRepresentation(self): <NEW_LINE> <INDENT> return self.weights <NEW_LINE> <DEDENT> def train(self, X, Y): <NEW_LINE> <INDENT> lossFn = self.opts['lossFunction'] <NEW_LINE> lambd = self.opts['lambda'] <NEW_LINE> numIter = self.opts['numIter'] <NEW_LINE> stepSize = self.opts['stepSize'] <NEW_LINE> self.weights = zeros(X.T.shape[0]) <NEW_LINE> def func(w): <NEW_LINE> <INDENT> Yhat = dot(w,X.T) <NEW_LINE> obj = lossFn.loss(Y,Yhat) + (lambd/2)*power(norm(w),2) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def grad(w): <NEW_LINE> <INDENT> Yhat = dot(w,X.T) <NEW_LINE> gr = lossFn.lossGradient(X,Y,Yhat) + lambd*w <NEW_LINE> return gr <NEW_LINE> <DEDENT> w, trajectory = gd(func, grad, self.weights, numIter, stepSize) <NEW_LINE> self.weights = w <NEW_LINE> self.trajectory = trajectory
This class defines an arbitrary linear classifier parameterized by a loss function and a ||w||^2 regularizer.
625990847b180e01f3e49e04
class APIClientArgs: <NEW_LINE> <INDENT> def __init__(self, port=None, fingerprint=None, sid=None, server="127.0.0.1", http_debug_level=0, api_calls=None, debug_file="", proxy_host=None, proxy_port=8080, api_version="1.1", unsafe=False, unsafe_auto_accept=False): <NEW_LINE> <INDENT> self.port = port <NEW_LINE> self.fingerprint = fingerprint <NEW_LINE> self.sid = sid <NEW_LINE> self.server = server <NEW_LINE> self.http_debug_level = http_debug_level <NEW_LINE> self.api_calls = api_calls if api_calls else [] <NEW_LINE> self.debug_file = debug_file <NEW_LINE> self.proxy_host = proxy_host <NEW_LINE> self.proxy_port = proxy_port <NEW_LINE> self.api_version = api_version <NEW_LINE> self.unsafe = unsafe <NEW_LINE> self.unsafe_auto_accept = unsafe_auto_accept
This class provides arguments for APIClient configuration. All the arguments are configured with their default values.
625990844c3428357761bdf9
class ComponentTests(ossie.utils.testing.ScaComponentTestCase): <NEW_LINE> <INDENT> def testScaBasicBehavior(self): <NEW_LINE> <INDENT> execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False) <NEW_LINE> execparams = dict([(x.id, any.from_any(x.value)) for x in execparams]) <NEW_LINE> self.launch(execparams) <NEW_LINE> self.assertNotEqual(self.comp, None) <NEW_LINE> self.assertEqual(self.comp.ref._non_existent(), False) <NEW_LINE> self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True) <NEW_LINE> expectedProps = [] <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True)) <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True)) <NEW_LINE> props = self.comp.query([]) <NEW_LINE> props = dict((x.id, any.from_any(x.value)) for x in props) <NEW_LINE> for expectedProp in expectedProps: <NEW_LINE> <INDENT> self.assertEquals(props.has_key(expectedProp.id), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_uses(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_usesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_provides(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_providesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a(port.get_repid()), True) <NEW_LINE> <DEDENT> self.comp.start() <NEW_LINE> self.comp.stop() <NEW_LINE> self.comp.releaseObject()
Test for all component implementations in skiphead_cc
6259908492d797404e3898fc
class GraphQLNonNull(GraphQLWrappingType): <NEW_LINE> <INDENT> is_non_null_type = True <NEW_LINE> kind = "NON_NULL" <NEW_LINE> def __eq__(self, other: Any) -> bool: <NEW_LINE> <INDENT> return self is other or ( isinstance(other, GraphQLNonNull) and self.gql_type == other.gql_type ) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "GraphQLNonNull(gql_type={!r})".format(self.gql_type) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return "{!s}!".format(self.gql_type)
Definition of a GraphQL non-null container.
6259908455399d3f05628054
class DataFormatCreator(DataFormatReader): <NEW_LINE> <INDENT> implements(IDataFormatReader, IDataFormatCreator) <NEW_LINE> __allow_loadable__ = True <NEW_LINE> def serialize(self, request): <NEW_LINE> <INDENT> collection = self.__parent__ <NEW_LINE> model = collection.create_transient_subitem() <NEW_LINE> return self.serialize_item(model, request) <NEW_LINE> <DEDENT> def create_and_deserialize(self, params, request): <NEW_LINE> <INDENT> structure = self.structure <NEW_LINE> collection = self.__parent__ <NEW_LINE> def _setter(resource): <NEW_LINE> <INDENT> self._default_item_deserializer(resource, structure, params, request) <NEW_LINE> <DEDENT> return collection.create_subitem(setter_fn=_setter, wrap=True) <NEW_LINE> <DEDENT> def create(self, request): <NEW_LINE> <INDENT> data = {} <NEW_LINE> if hasattr(self.structure, "create"): <NEW_LINE> <INDENT> data = self.structure.create(self, request) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resource = self.do_create(request) <NEW_LINE> data = {'item_id': resource.model.id} <NEW_LINE> <DEDENT> if getattr(self.structure, '__return_updated_data__', False): <NEW_LINE> <INDENT> data.update(self.read(request)) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def do_create(self, request): <NEW_LINE> <INDENT> params = request.json_body <NEW_LINE> params = dottedish.api.unflatten(params.items()) <NEW_LINE> if hasattr(self.structure, "before_item_created"): <NEW_LINE> <INDENT> self.structure.before_item_created(self, params, request) <NEW_LINE> <DEDENT> resource = self.create_and_deserialize(params, request) <NEW_LINE> if hasattr(self.structure, "after_item_created"): <NEW_LINE> <INDENT> self.structure.after_item_created(resource, params, request) <NEW_LINE> <DEDENT> if hasattr(resource, "after_item_created"): <NEW_LINE> <INDENT> resource.after_item_created(self, params, request) <NEW_LINE> <DEDENT> return resource
A data format which can create subitems in a collection. It also implements IDataFormatReader because the client needs to be able to load defaults etc.
62599084a05bb46b3848bec7
class WritableSerializerMethodField(serializers.SerializerMethodField): <NEW_LINE> <INDENT> def __init__(self, get_method_name, set_method_name, *args, **kwargs): <NEW_LINE> <INDENT> self.read_only = False <NEW_LINE> self.get_method_name = get_method_name <NEW_LINE> self.method_name = get_method_name <NEW_LINE> self.set_method_name = set_method_name <NEW_LINE> super(WritableSerializerMethodField, self).__init__(get_method_name, *args, **kwargs) <NEW_LINE> <DEDENT> def field_from_native(self, data, files, field_name, into): <NEW_LINE> <INDENT> return getattr(self.parent, self.set_method_name)(data, files, field_name, into)
A field that gets and sets its value by calling a method on the serializer it's attached to.
6259908444b2445a339b76fc
class ControllerStates(object): <NEW_LINE> <INDENT> STATE_STOPPED = 0 <NEW_LINE> STATE_RUNNING = 1
The various states that an interface can experience.
6259908499fddb7c1ca63b7a
class TerraSimpleUserSerializer(TerraStaffUserSerializer): <NEW_LINE> <INDENT> is_staff = serializers.BooleanField(read_only=True)
A simple user cannot edit is_staff and is_superuser status
62599084a8370b77170f1f0c
class Inference(object): <NEW_LINE> <INDENT> def __init__(self, output_layer, parameters): <NEW_LINE> <INDENT> topo = topology.Topology(output_layer) <NEW_LINE> gm = api.GradientMachine.createFromConfigProto( topo.proto(), api.CREATE_MODE_TESTING, [api.PARAMETER_VALUE]) <NEW_LINE> for param in gm.getParameters(): <NEW_LINE> <INDENT> val = param.getBuf(api.PARAMETER_VALUE) <NEW_LINE> name = param.getName() <NEW_LINE> assert isinstance(val, api.Vector) <NEW_LINE> val.copyFromNumpyArray(parameters.get(name).flatten()) <NEW_LINE> <DEDENT> self.__gradient_machine__ = gm <NEW_LINE> self.__data_types__ = topo.data_type() <NEW_LINE> <DEDENT> def iter_infer(self, input, feeding=None): <NEW_LINE> <INDENT> feeder = DataFeeder(self.__data_types__, feeding) <NEW_LINE> batch_size = len(input) <NEW_LINE> def __reader_impl__(): <NEW_LINE> <INDENT> for each_sample in input: <NEW_LINE> <INDENT> yield each_sample <NEW_LINE> <DEDENT> <DEDENT> reader = minibatch.batch(__reader_impl__, batch_size=batch_size) <NEW_LINE> self.__gradient_machine__.start() <NEW_LINE> for data_batch in reader(): <NEW_LINE> <INDENT> yield self.__gradient_machine__.forwardTest(feeder(data_batch)) <NEW_LINE> <DEDENT> self.__gradient_machine__.finish() <NEW_LINE> <DEDENT> def iter_infer_field(self, field, **kwargs): <NEW_LINE> <INDENT> if not isinstance(field, list) and not isinstance(field, tuple): <NEW_LINE> <INDENT> field = [field] <NEW_LINE> <DEDENT> for result in self.iter_infer(**kwargs): <NEW_LINE> <INDENT> for each_result in result: <NEW_LINE> <INDENT> item = [each_result[each_field] for each_field in field] <NEW_LINE> yield item <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def infer(self, field='value', **kwargs): <NEW_LINE> <INDENT> retv = None <NEW_LINE> for result in self.iter_infer_field(field=field, **kwargs): <NEW_LINE> <INDENT> if retv is None: <NEW_LINE> <INDENT> retv = [[] for i in xrange(len(result))] <NEW_LINE> <DEDENT> for i, item in enumerate(result): <NEW_LINE> <INDENT> retv[i].append(item) <NEW_LINE> <DEDENT> <DEDENT> retv = [numpy.concatenate(out) for out in retv] <NEW_LINE> if len(retv) == 1: <NEW_LINE> <INDENT> return retv[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return retv
Inference combines neural network output and parameters together to do inference. :param outptut_layer: The neural network that should be inferenced. :type output_layer: paddle.v2.config_base.Layer or the sequence of paddle.v2.config_base.Layer :param parameters: The parameters dictionary. :type parameters: paddle.v2.parameters.Parameters
625990845fdd1c0f98e5fac0
class ResourceIdentifier(HeatIdentifier): <NEW_LINE> <INDENT> RESOURCE_NAME = 'resource_name' <NEW_LINE> def __init__(self, tenant, stack_name, stack_id, path, resource_name=None): <NEW_LINE> <INDENT> if resource_name is not None: <NEW_LINE> <INDENT> if '/' in resource_name: <NEW_LINE> <INDENT> raise ValueError(_('Resource name may not contain "/"')) <NEW_LINE> <DEDENT> path = '/'.join([path.rstrip('/'), 'resources', resource_name]) <NEW_LINE> <DEDENT> super(ResourceIdentifier, self).__init__(tenant, stack_name, stack_id, path) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if attr == self.RESOURCE_NAME: <NEW_LINE> <INDENT> return self._path_components()[-1] <NEW_LINE> <DEDENT> return HeatIdentifier.__getattr__(self, attr) <NEW_LINE> <DEDENT> def stack(self): <NEW_LINE> <INDENT> return HeatIdentifier(self.tenant, self.stack_name, self.stack_id, '/'.join(self._path_components()[:-2]))
An identifier for a resource.
625990844527f215b58eb740
class TestIsotropicGaussian(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.model = IsotropicGaussian() <NEW_LINE> [gx, gy] = np.meshgrid(np.arange(35.5, 40., 0.5), np.arange(40.5, 45., 0.5)) <NEW_LINE> ngp = np.shape(gx)[0] * np.shape(gx)[1] <NEW_LINE> gx = np.reshape(gx, [ngp, 1]) <NEW_LINE> gy = np.reshape(gy, [ngp, 1]) <NEW_LINE> depths = 10. * np.ones(ngp) <NEW_LINE> self.data = np.column_stack([gx, gy, depths, np.zeros(ngp, dtype=float)]) <NEW_LINE> <DEDENT> def test_kernel_single_event(self): <NEW_LINE> <INDENT> self.data[50, 3] = 1. <NEW_LINE> config = {'Length_Limit': 3.0, 'BandWidth': 30.0} <NEW_LINE> expected_array = np.genfromtxt(os.path.join(BASE_PATH, TEST_1_VALUE_FILE)) <NEW_LINE> (smoothed_array, sum_data, sum_smooth) = self.model.smooth_data(self.data, config) <NEW_LINE> np.testing.assert_array_almost_equal(expected_array, smoothed_array) <NEW_LINE> self.assertAlmostEqual(sum_data, 1.) <NEW_LINE> self.assertAlmostEqual(sum_data, sum_smooth, 3) <NEW_LINE> <DEDENT> def test_kernel_multiple_event(self): <NEW_LINE> <INDENT> self.data[[5, 30, 65], 3] = 1. <NEW_LINE> config = {'Length_Limit': 3.0, 'BandWidth': 30.0} <NEW_LINE> expected_array = np.genfromtxt(os.path.join(BASE_PATH, TEST_3_VALUE_FILE)) <NEW_LINE> (smoothed_array, sum_data, sum_smooth) = self.model.smooth_data(self.data, config) <NEW_LINE> np.testing.assert_array_almost_equal(expected_array, smoothed_array) <NEW_LINE> self.assertAlmostEqual(sum_data, 3.) <NEW_LINE> self.assertAlmostEqual(sum_data, sum_smooth, 2) <NEW_LINE> <DEDENT> def test_kernel_single_event_3d(self): <NEW_LINE> <INDENT> self.data[50, 3] = 1. <NEW_LINE> self.data[50, 2] = 20. <NEW_LINE> config = {'Length_Limit': 3.0, 'BandWidth': 30.0} <NEW_LINE> expected_array = np.genfromtxt(os.path.join(BASE_PATH, TEST_1_VALUE_3D_FILE)) <NEW_LINE> (smoothed_array, sum_data, sum_smooth) = self.model.smooth_data(self.data, config, is_3d=True) <NEW_LINE> np.testing.assert_array_almost_equal(expected_array, smoothed_array) <NEW_LINE> self.assertAlmostEqual(sum_data, 1.) <NEW_LINE> self.assertAlmostEqual(sum_data, sum_smooth, 2)
Simple tests the of Isotropic Gaussian Kernel (as implemented by Frankel (1995))
62599084656771135c48add1
class Network(object): <NEW_LINE> <INDENT> def __init__(self, neurons, links, num_inputs): <NEW_LINE> <INDENT> if not neurons: <NEW_LINE> <INDENT> neurons = [] <NEW_LINE> <DEDENT> self.neurons = neurons <NEW_LINE> self.synapses = [] <NEW_LINE> self._num_inputs = num_inputs <NEW_LINE> if links is not None: <NEW_LINE> <INDENT> nodes = {} <NEW_LINE> for n in self.neurons: <NEW_LINE> <INDENT> nodes[n.ID] = n <NEW_LINE> <DEDENT> for c in links: <NEW_LINE> <INDENT> self.synapses.append(Synapse(nodes[c[0]], nodes[c[1]], c[2])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def set_integration_step(self, step): <NEW_LINE> <INDENT> for neuron in self.neurons: <NEW_LINE> <INDENT> neuron.set_integration_step(step) <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> for neuron in self.neurons: <NEW_LINE> <INDENT> neuron.output = 0.0 <NEW_LINE> <DEDENT> <DEDENT> def add_neuron(self, neuron): <NEW_LINE> <INDENT> self.neurons.append(neuron) <NEW_LINE> <DEDENT> def add_synapse(self, synapse): <NEW_LINE> <INDENT> self.synapses.append(synapse) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{0:d} nodes and {1:d} synapses'.format(len(self.neurons), len(self.synapses)) <NEW_LINE> <DEDENT> def serial_activate(self, inputs): <NEW_LINE> <INDENT> assert len(inputs) == self._num_inputs, "Wrong number of inputs." <NEW_LINE> input_neurons = [n for n in self.neurons[:self._num_inputs] if n.type == 'INPUT'] <NEW_LINE> for v, n in zip(inputs, input_neurons): <NEW_LINE> <INDENT> n.output = v <NEW_LINE> <DEDENT> net_output = [] <NEW_LINE> for n in self.neurons[self._num_inputs:]: <NEW_LINE> <INDENT> n.output = n.activate() <NEW_LINE> if n.type == 'OUTPUT': <NEW_LINE> <INDENT> net_output.append(n.output) <NEW_LINE> <DEDENT> <DEDENT> return net_output <NEW_LINE> <DEDENT> def parallel_activate(self, inputs=None): <NEW_LINE> <INDENT> if inputs is not None: <NEW_LINE> <INDENT> assert len(inputs) == self._num_inputs, "Wrong number of inputs." <NEW_LINE> input_neurons = [n for n in self.neurons[:self._num_inputs] if n.type == 'INPUT'] <NEW_LINE> for v, n in zip(inputs, input_neurons): <NEW_LINE> <INDENT> n.output = v <NEW_LINE> <DEDENT> <DEDENT> current_state = [] <NEW_LINE> for n in self.neurons: <NEW_LINE> <INDENT> if n.type != 'INPUT': <NEW_LINE> <INDENT> current_state.append(n.activate()) <NEW_LINE> <DEDENT> <DEDENT> net_output = [] <NEW_LINE> for n, state in zip(self.neurons[self._num_inputs:], current_state): <NEW_LINE> <INDENT> n.output = state <NEW_LINE> if n.type == 'OUTPUT': <NEW_LINE> <INDENT> net_output.append(n.output) <NEW_LINE> <DEDENT> <DEDENT> return net_output
A neural network has a list of neurons linked by synapses
6259908471ff763f4b5e92ee
@functools.total_ordering <NEW_LINE> class User(object): <NEW_LINE> <INDENT> def __init__(self, nick, user, host): <NEW_LINE> <INDENT> assert isinstance(nick, Identifier) <NEW_LINE> self.nick = nick <NEW_LINE> self.user = user <NEW_LINE> self.host = host <NEW_LINE> self.channels = {} <NEW_LINE> self.account = None <NEW_LINE> self.away = None <NEW_LINE> <DEDENT> hostmask = property(lambda self: '{}!{}@{}'.format(self.nick, self.user, self.host)) <NEW_LINE> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, User): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return self.nick == other.nick <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, User): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return self.nick < other.nick
A representation of a user Sopel is aware of.
625990845fcc89381b266efe
class OrganisationAdmin(reversion.VersionAdmin): <NEW_LINE> <INDENT> def queryset(self, request): <NEW_LINE> <INDENT> qs = self.model.objects <NEW_LINE> ordering = self.get_ordering(request) <NEW_LINE> if ordering: <NEW_LINE> <INDENT> qs = qs.order_by(*ordering) <NEW_LINE> <DEDENT> return qs
Admin for the organisation model.
625990844a966d76dd5f0a28
class TestV1CephFSVolumeSource(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1CephFSVolumeSource(self): <NEW_LINE> <INDENT> pass
V1CephFSVolumeSource unit test stubs
62599084bf627c535bcb3015
class Solution(object): <NEW_LINE> <INDENT> def isValidSudoku(self, board): <NEW_LINE> <INDENT> if board is None or len(board) == 0 or len(board[0]) == 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> l = len(board) <NEW_LINE> row = [0 for i in xrange(l)] <NEW_LINE> col = [0 for i in xrange(l)] <NEW_LINE> box = [0 for i in xrange(l)] <NEW_LINE> for i in xrange(l): <NEW_LINE> <INDENT> for j in xrange(l): <NEW_LINE> <INDENT> if board[i][j] != '.': <NEW_LINE> <INDENT> num = int(board[i][j]) <NEW_LINE> if row[i] & 1 << num > 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if col[j] & 1 << num > 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> box_ind = i / 3 * 3 + j / 3 <NEW_LINE> if box[box_ind] & 1 << num > 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> row[i] |= 1 << num <NEW_LINE> col[j] |= 1 << num <NEW_LINE> box[box_ind] |= 1 << num <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def isValidSudoku_separate_check(self, board): <NEW_LINE> <INDENT> if board is None or len(board) == 0 or len(board[0]) == 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> row_valid = True <NEW_LINE> col_valid = True <NEW_LINE> box_valid = True <NEW_LINE> l = len(board) <NEW_LINE> for j in xrange(l): <NEW_LINE> <INDENT> nums = set() <NEW_LINE> for i in xrange(l): <NEW_LINE> <INDENT> if board[i][j] != '.': <NEW_LINE> <INDENT> val = int(board[i][j]) <NEW_LINE> if val in nums: <NEW_LINE> <INDENT> row_valid = False <NEW_LINE> break <NEW_LINE> <DEDENT> nums.add(val) <NEW_LINE> <DEDENT> <DEDENT> if not row_valid: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> for i in xrange(l): <NEW_LINE> <INDENT> nums = set() <NEW_LINE> for j in xrange(l): <NEW_LINE> <INDENT> if board[i][j] != '.': <NEW_LINE> <INDENT> val = int(board[i][j]) <NEW_LINE> if val in nums: <NEW_LINE> <INDENT> col_valid = False <NEW_LINE> break <NEW_LINE> <DEDENT> nums.add(val) <NEW_LINE> <DEDENT> <DEDENT> if not col_valid: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> for a in xrange(0, l, 3): <NEW_LINE> <INDENT> for b in xrange(0, l, 3): <NEW_LINE> <INDENT> nums = set() <NEW_LINE> for i in xrange(a, a + 3): <NEW_LINE> <INDENT> for j in xrange(b, b + 3): <NEW_LINE> <INDENT> if board[i][j] != '.': <NEW_LINE> <INDENT> val = int(board[i][j]) <NEW_LINE> if val in nums: <NEW_LINE> <INDENT> box_valid = False <NEW_LINE> break <NEW_LINE> <DEDENT> nums.add(val) <NEW_LINE> <DEDENT> <DEDENT> if not box_valid: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if not box_valid: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if not box_valid: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return row_valid and col_valid and box_valid
Idea: (1) separate checking: - check 3 booleans: row, col, box - have a set to keep track for each row, col, box checking - return 3 booleans AND together - Runtime: O(N^2), Space: O(1) (2) bitmap checking: - 3 int arrays for bitmaps
625990848a349b6b43687da1
class LocalizedMusicInfo(object): <NEW_LINE> <INDENT> deserialized_types = { 'prompt_name': 'str', 'aliases': 'list[ask_smapi_model.v1.skill.manifest.music_alias.MusicAlias]', 'features': 'list[ask_smapi_model.v1.skill.manifest.music_feature.MusicFeature]', 'wordmark_logos': 'list[ask_smapi_model.v1.skill.manifest.music_wordmark.MusicWordmark]' } <NEW_LINE> attribute_map = { 'prompt_name': 'promptName', 'aliases': 'aliases', 'features': 'features', 'wordmark_logos': 'wordmarkLogos' } <NEW_LINE> supports_multiple_types = False <NEW_LINE> def __init__(self, prompt_name=None, aliases=None, features=None, wordmark_logos=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.prompt_name = prompt_name <NEW_LINE> self.aliases = aliases <NEW_LINE> self.features = features <NEW_LINE> self.wordmark_logos = wordmark_logos <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, LocalizedMusicInfo): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Defines the structure of localized music information in the skill manifest. :param prompt_name: Name to be used when Alexa renders the music skill name. :type prompt_name: (optional) str :param aliases: Defines the structure of the music prompt name information in the skill manifest. :type aliases: (optional) list[ask_smapi_model.v1.skill.manifest.music_alias.MusicAlias] :param features: :type features: (optional) list[ask_smapi_model.v1.skill.manifest.music_feature.MusicFeature] :param wordmark_logos: :type wordmark_logos: (optional) list[ask_smapi_model.v1.skill.manifest.music_wordmark.MusicWordmark]
62599084656771135c48add2
class PhotovoltaicSystemNumberOfArrays(BSElement): <NEW_LINE> <INDENT> element_type = "xs:integer"
Number of arrays in a photovoltaic system.
625990844c3428357761bdfd
class TSOverScore(object): <NEW_LINE> <INDENT> def __init__(self, bucket_size=100, a=1, b=1): <NEW_LINE> <INDENT> self.bucket_size = bucket_size <NEW_LINE> self.buckets = [{'a': a, 'b': b} for i in range(bucket_size)] <NEW_LINE> self.exp_times_each = [0 for i in range(bucket_size)] <NEW_LINE> self.exp_times = 0 <NEW_LINE> <DEDENT> def thompson_sample(self, pred_ctr, k, max_pos, weight_type='propensity'): <NEW_LINE> <INDENT> assert 10 >= max_pos > k > 1 <NEW_LINE> active_arms = [] <NEW_LINE> for i, score in enumerate(pred_ctr[k-1: max_pos]): <NEW_LINE> <INDENT> assert 0 <= score <= 1 <NEW_LINE> temp = dict() <NEW_LINE> temp['exp_idx'] = i + k - 1 <NEW_LINE> bucket_idx = int(score * self.bucket_size) % self.bucket_size <NEW_LINE> temp['bucket_idx'] = bucket_idx <NEW_LINE> temp['ts_score'] = beta.rvs(self.buckets[bucket_idx]['a'], self.buckets[bucket_idx]['b']) <NEW_LINE> active_arms.append(temp) <NEW_LINE> <DEDENT> arm_chosen = max(active_arms, key=lambda x: x['ts_score']) <NEW_LINE> self.exp_times += 1 <NEW_LINE> self.exp_times_each[arm_chosen['bucket_idx']] += 1 <NEW_LINE> if weight_type == 'propensity': <NEW_LINE> <INDENT> weight = 1.0 * self.exp_times / self.exp_times_each[arm_chosen['bucket_idx']] <NEW_LINE> <DEDENT> elif weight_type == 'multinomial': <NEW_LINE> <INDENT> weight = 1.0 * sum(pred_ctr[k-1: max_pos]) / pred_ctr[arm_chosen['exp_idx']] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> weight = 1 <NEW_LINE> <DEDENT> return arm_chosen['exp_idx'], arm_chosen['bucket_idx'], weight <NEW_LINE> <DEDENT> def batch_update(self, bucket_idx, label): <NEW_LINE> <INDENT> if label == 1: <NEW_LINE> <INDENT> self.buckets[bucket_idx]['a'] += 1 <NEW_LINE> <DEDENT> elif label == 0: <NEW_LINE> <INDENT> self.buckets[bucket_idx]['b'] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("label must be either 0 or 1.") <NEW_LINE> <DEDENT> <DEDENT> def show_explore_times(self): <NEW_LINE> <INDENT> plt.bar(np.arange(self.bucket_size), self.exp_times_each) <NEW_LINE> plt.title("explore times of each bucket(TS over score)") <NEW_LINE> plt.xlabel("bucket index") <NEW_LINE> plt.ylabel("times") <NEW_LINE> plt.show() <NEW_LINE> <DEDENT> def show_distribution(self, bucket_idx): <NEW_LINE> <INDENT> if type(bucket_idx) == int: <NEW_LINE> <INDENT> bucket_idx = [bucket_idx] <NEW_LINE> <DEDENT> legend = [] <NEW_LINE> for i in bucket_idx: <NEW_LINE> <INDENT> a, b = self.buckets[i]['a'], self.buckets[i]['b'] <NEW_LINE> x = np.linspace(0, 1, 100) <NEW_LINE> line, = plt.plot(x, beta.pdf(x, a, b), label="bucket idx=%d" % i) <NEW_LINE> legend.append(line) <NEW_LINE> <DEDENT> plt.title("beta distribution of buckets") <NEW_LINE> plt.legend(handles=legend) <NEW_LINE> plt.show()
Thompson Sampling over scores policy.
6259908460cbc95b06365b0d
class DeleteAddressHandler(AbstractAddressHandler): <NEW_LINE> <INDENT> @tornado.gen.coroutine <NEW_LINE> def post(self): <NEW_LINE> <INDENT> result = SUCCESS <NEW_LINE> try: <NEW_LINE> <INDENT> ownerEmail = self.get_argument("email","") <NEW_LINE> addressID = self.get_argument("addressID", "") <NEW_LINE> self.db.deleteAddress(ownerEmail, addressID) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> result = FAILURE <NEW_LINE> <DEDENT> self.write(result)
-Requests are posted here when a user is creating an account. -All arguments are submitted as strings.
6259908423849d37ff852bfd
class AccountManager(BaseManager): <NEW_LINE> <INDENT> endpoint = '/accounts' <NEW_LINE> @autoparse <NEW_LINE> def all(self, page=None): <NEW_LINE> <INDENT> if page: <NEW_LINE> <INDENT> return self._client.get('/accounts?page=%s' % page) <NEW_LINE> <DEDENT> return self._client.get('/accounts') <NEW_LINE> <DEDENT> @autoparse <NEW_LINE> def active(self): <NEW_LINE> <INDENT> return self._client.get('/accounts?show=active_subscribers') <NEW_LINE> <DEDENT> @autoparse <NEW_LINE> def past_due(self): <NEW_LINE> <INDENT> return self._client.get('/accounts?show=pastdue_subscribers') <NEW_LINE> <DEDENT> @autoparse <NEW_LINE> def non_subscribers(self): <NEW_LINE> <INDENT> return self._client.get('/accounts?show=non_subscribers') <NEW_LINE> <DEDENT> @autoparse <NEW_LINE> def get(self, account_code): <NEW_LINE> <INDENT> return self._client.get('/accounts/%s' % account_code)
This class handles all of the API calls for Account objects. You most likely don't want to instantiate it directly, as it is made available automatically when you create a Recurly API client and is exposed through a client instance's client.accounts member.
62599084a05bb46b3848bec9
class VMWareVlanBridgeDriver(vif.VIFDriver): <NEW_LINE> <INDENT> def plug(self, instance, network, mapping): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ensure_vlan_bridge(self, session, network): <NEW_LINE> <INDENT> vlan_num = network['vlan'] <NEW_LINE> bridge = network['bridge'] <NEW_LINE> vlan_interface = FLAGS.vmwareapi_vlan_interface <NEW_LINE> if not network_utils.check_if_vlan_interface_exists(session, vlan_interface): <NEW_LINE> <INDENT> raise exception.NetworkAdapterNotFound(adapter=vlan_interface) <NEW_LINE> <DEDENT> vswitch_associated = network_utils.get_vswitch_for_vlan_interface( session, vlan_interface) <NEW_LINE> if vswitch_associated is None: <NEW_LINE> <INDENT> raise exception.SwitchNotFoundForNetworkAdapter( adapter=vlan_interface) <NEW_LINE> <DEDENT> network_ref = network_utils.get_network_with_the_name(session, bridge) <NEW_LINE> if network_ref is None: <NEW_LINE> <INDENT> network_utils.create_port_group(session, bridge, vswitch_associated, vlan_num) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _get_pg_info = network_utils.get_vlanid_and_vswitch_for_portgroup <NEW_LINE> pg_vlanid, pg_vswitch = _get_pg_info(session, bridge) <NEW_LINE> if pg_vswitch != vswitch_associated: <NEW_LINE> <INDENT> raise exception.InvalidVLANPortGroup( bridge=bridge, expected=vswitch_associated, actual=pg_vswitch) <NEW_LINE> <DEDENT> if pg_vlanid != vlan_num: <NEW_LINE> <INDENT> raise exception.InvalidVLANTag(bridge=bridge, tag=vlan_num, pgroup=pg_vlanid) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def unplug(self, instance, network, mapping): <NEW_LINE> <INDENT> pass
VIF Driver to setup bridge/VLAN networking using VMWare API.
6259908499fddb7c1ca63b7c
class SessionFactory(Session): <NEW_LINE> <INDENT> def request( self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None): <NEW_LINE> <INDENT> req = Request( method=method.upper(), url=url, headers=headers, files=files, data=data or {}, json=json, params=params or {}, auth=auth, cookies=cookies, hooks=hooks, ) <NEW_LINE> prep = self.prepare_request(req) <NEW_LINE> proxies = proxies or {} <NEW_LINE> settings = self.merge_environment_settings( prep.url, proxies, stream, verify, cert ) <NEW_LINE> return prep
SessionFactory. Class overides requests.sessions Session.request method to return the PreparedRequest object instead of attempting to send it. useful for testing with django.test client
6259908463b5f9789fe86cac
class ImageRemoveEvent(Event): <NEW_LINE> <INDENT> pass
This event is emitted when an |image| is removed. :param image: the removed |image|
625990845fc7496912d4900d
class SingleClientWithAKickPerspective(pb.Perspective): <NEW_LINE> <INDENT> client = None <NEW_LINE> def __getstate__(self): <NEW_LINE> <INDENT> state = styles.Versioned.__getstate__(self) <NEW_LINE> try: <NEW_LINE> <INDENT> del state['client'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return state <NEW_LINE> <DEDENT> def attached(self, client, identity): <NEW_LINE> <INDENT> if self.client is not None: <NEW_LINE> <INDENT> self.detached(client, identity) <NEW_LINE> <DEDENT> self.client = client <NEW_LINE> return self <NEW_LINE> <DEDENT> def detached(self, client, identity): <NEW_LINE> <INDENT> self.client = None
One client may attach to me at a time. If a new client requests to be attached to me, any currently connected perspective will be disconnected.
62599084bf627c535bcb3017
class Missile_rtilt(Collider): <NEW_LINE> <INDENT> image = games.load_image("missile_rtilt.bmp") <NEW_LINE> sound = games.load_sound("missile.wav") <NEW_LINE> SPEED = 10 <NEW_LINE> def __init__(self, ship): <NEW_LINE> <INDENT> Missile.sound.play() <NEW_LINE> games.Sprite.__init__(self, image = Missile.image, x = ship.x + 20, bottom = ship.top - 1, dx = Missile_rtilt.SPEED, dy = Missile.SPEED) <NEW_LINE> self.ship = ship <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> Collider.update(self) <NEW_LINE> if self.top < 0: <NEW_LINE> <INDENT> self.destroy() <NEW_LINE> <DEDENT> <DEDENT> def die(self): <NEW_LINE> <INDENT> self.ship.score.value += 30 <NEW_LINE> Collider.die(self)
A missile launched by the player's ship to the right diagonal.
625990845fdd1c0f98e5fac4
class TestPageRepr(TestPageBaseUnicode): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestPageRepr, self).setUp() <NEW_LINE> self._old_encoding = config.console_encoding <NEW_LINE> config.console_encoding = 'utf8' <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> config.console_encoding = self._old_encoding <NEW_LINE> super(TestPageRepr, self).tearDown() <NEW_LINE> <DEDENT> def test_mainpage_type(self): <NEW_LINE> <INDENT> mainpage = self.get_mainpage() <NEW_LINE> self.assertIsInstance(repr(mainpage), str) <NEW_LINE> <DEDENT> def test_unicode_type(self): <NEW_LINE> <INDENT> page = pywikibot.Page(self.get_site(), u'Ō') <NEW_LINE> self.assertIsInstance(repr(page), str) <NEW_LINE> <DEDENT> @unittest.skipIf(not PY2, 'Python 2 specific test') <NEW_LINE> def test_unicode_value(self): <NEW_LINE> <INDENT> page = pywikibot.Page(self.get_site(), u'Ō') <NEW_LINE> self.assertEqual(repr(page), b'Page(\xc5\x8c)') <NEW_LINE> <DEDENT> @unittest.skipIf(not PY2, 'Python 2 specific test') <NEW_LINE> def test_unicode_percent_r_failure(self): <NEW_LINE> <INDENT> page = pywikibot.Page(self.get_site(), u'Ō') <NEW_LINE> self.assertRaisesRegex(UnicodeDecodeError, '', unicode.format, u'{0!r}', page) <NEW_LINE> <DEDENT> @unittest.skipIf(PY2, 'Python 3+ specific test') <NEW_LINE> def test_unicode_value_py3(self): <NEW_LINE> <INDENT> self.assertEqual(repr(self.page), "Page('Ō')") <NEW_LINE> self.assertEqual('%r' % self.page, "Page('Ō')") <NEW_LINE> self.assertEqual('{0!r}'.format(self.page), "Page('Ō')") <NEW_LINE> <DEDENT> @unittest.skipIf(not PY2, 'Python 2 specific test') <NEW_LINE> @unittest.expectedFailure <NEW_LINE> def test_ASCII_comatible(self): <NEW_LINE> <INDENT> page = pywikibot.Page(self.site, 'ä') <NEW_LINE> repr(page).decode('ascii')
Test for Page's repr implementation.
6259908450812a4eaa621967
class Connection(MongoClient): <NEW_LINE> <INDENT> def __init__(self, host=None, port=None, max_pool_size=None, network_timeout=None, document_class=dict, tz_aware=False, _connect=True, **kwargs): <NEW_LINE> <INDENT> if network_timeout is not None: <NEW_LINE> <INDENT> if (not isinstance(network_timeout, (int, float)) or network_timeout <= 0): <NEW_LINE> <INDENT> raise ConfigurationError("network_timeout must " "be a positive integer") <NEW_LINE> <DEDENT> kwargs['socketTimeoutMS'] = network_timeout * 1000 <NEW_LINE> <DEDENT> kwargs['auto_start_request'] = kwargs.get('auto_start_request', True) <NEW_LINE> kwargs['safe'] = kwargs.get('safe', False) <NEW_LINE> super(Connection, self).__init__(host, port, max_pool_size, document_class, tz_aware, _connect, **kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if len(self.nodes) == 1: <NEW_LINE> <INDENT> return "Connection(%r, %r)" % (self.host, self.port) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "Connection(%r)" % ["%s:%d" % n for n in self.nodes] <NEW_LINE> <DEDENT> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> raise TypeError("'Connection' object is not iterable")
Connection to MongoDB.
625990844428ac0f6e65a073
class Scale(object): <NEW_LINE> <INDENT> def __init__(self, n, inters): <NEW_LINE> <INDENT> self.name = n <NEW_LINE> self.intervals = inters <NEW_LINE> <DEDENT> def voice(self, key): <NEW_LINE> <INDENT> chromatic = Scales.make_chromatic(key) <NEW_LINE> steps = [] <NEW_LINE> for i in self.intervals: <NEW_LINE> <INDENT> note = chromatic[i.distance] <NEW_LINE> steps.append({ 'note':note, 'interval':i }) <NEW_LINE> <DEDENT> return steps <NEW_LINE> <DEDENT> def make_mode(self, n, mode): <NEW_LINE> <INDENT> key_dist = self.intervals[mode - 1].distance <NEW_LINE> adj_dist = [] <NEW_LINE> for i in range(len(self.intervals)): <NEW_LINE> <INDENT> step_idx = (i + (mode - 1)) % len(self.intervals) <NEW_LINE> step_dist = self.intervals[step_idx].distance - key_dist <NEW_LINE> if step_dist < 0: <NEW_LINE> <INDENT> step_dist += 12 <NEW_LINE> <DEDENT> adj_dist.append(Intervals.by_distance(step_dist)) <NEW_LINE> <DEDENT> return Scale(n, adj_dist) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> names = [i.short_name for i in self.intervals] <NEW_LINE> return self.name + ': ' + ','.join(names)
Defines a collection of intervals
6259908455399d3f0562805a
class ApplicationViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Application.objects.all() <NEW_LINE> serializer_class = ApplicationSerializer
API endpoint that allows groups to be viewed or edited.
62599084a05bb46b3848beca
class AccessLogReader(Reader): <NEW_LINE> <INDENT> def __init__(self, filename, headers=None, http_ver='1.1', use_cache=True, **kwargs): <NEW_LINE> <INDENT> super(AccessLogReader, self).__init__(filename, use_cache) <NEW_LINE> self.warned = False <NEW_LINE> self.headers = set(headers) if headers else set() <NEW_LINE> self.log = logging.getLogger(__name__) <NEW_LINE> <DEDENT> def warn(self, message): <NEW_LINE> <INDENT> if not self.warned: <NEW_LINE> <INDENT> self.warned = True <NEW_LINE> self.log.warning( "There are some skipped lines. See full log for details.") <NEW_LINE> <DEDENT> self.log.debug(message) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> opener = resource.get_opener(self.filename) <NEW_LINE> with opener(self.use_cache) as ammo_file: <NEW_LINE> <INDENT> info.status.af_size = opener.data_length <NEW_LINE> while True: <NEW_LINE> <INDENT> for line in ammo_file: <NEW_LINE> <INDENT> info.status.af_position = ammo_file.tell() <NEW_LINE> try: <NEW_LINE> <INDENT> request = line.split('"')[1] <NEW_LINE> method, uri, proto = request.split() <NEW_LINE> http_ver = proto.split('/')[1] <NEW_LINE> if method == "GET": <NEW_LINE> <INDENT> yield ( HttpAmmo( uri, headers=self.headers, http_ver=http_ver, ).to_s(), None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.warn( "Skipped line: %s (unsupported method)" % line) <NEW_LINE> <DEDENT> <DEDENT> except (ValueError, IndexError) as e: <NEW_LINE> <INDENT> self.warn("Skipped line: %s (%s)" % (line, e)) <NEW_LINE> <DEDENT> <DEDENT> ammo_file.seek(0) <NEW_LINE> info.status.af_position = 0 <NEW_LINE> info.status.inc_loop_count()
Missiles from access log
6259908499fddb7c1ca63b7d
class TeamRobot(object): <NEW_LINE> <INDENT> swagger_types = { 'year': 'int', 'robot_name': 'str', 'key': 'str', 'team_key': 'str' } <NEW_LINE> attribute_map = { 'year': 'year', 'robot_name': 'robot_name', 'key': 'key', 'team_key': 'team_key' } <NEW_LINE> def __init__(self, year=None, robot_name=None, key=None, team_key=None): <NEW_LINE> <INDENT> self._year = None <NEW_LINE> self._robot_name = None <NEW_LINE> self._key = None <NEW_LINE> self._team_key = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.year = year <NEW_LINE> self.robot_name = robot_name <NEW_LINE> self.key = key <NEW_LINE> self.team_key = team_key <NEW_LINE> <DEDENT> @property <NEW_LINE> def year(self): <NEW_LINE> <INDENT> return self._year <NEW_LINE> <DEDENT> @year.setter <NEW_LINE> def year(self, year): <NEW_LINE> <INDENT> if year is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `year`, must not be `None`") <NEW_LINE> <DEDENT> self._year = year <NEW_LINE> <DEDENT> @property <NEW_LINE> def robot_name(self): <NEW_LINE> <INDENT> return self._robot_name <NEW_LINE> <DEDENT> @robot_name.setter <NEW_LINE> def robot_name(self, robot_name): <NEW_LINE> <INDENT> if robot_name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `robot_name`, must not be `None`") <NEW_LINE> <DEDENT> self._robot_name = robot_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return self._key <NEW_LINE> <DEDENT> @key.setter <NEW_LINE> def key(self, key): <NEW_LINE> <INDENT> if key is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `key`, must not be `None`") <NEW_LINE> <DEDENT> self._key = key <NEW_LINE> <DEDENT> @property <NEW_LINE> def team_key(self): <NEW_LINE> <INDENT> return self._team_key <NEW_LINE> <DEDENT> @team_key.setter <NEW_LINE> def team_key(self, team_key): <NEW_LINE> <INDENT> if team_key is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `team_key`, must not be `None`") <NEW_LINE> <DEDENT> self._team_key = team_key <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TeamRobot): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599084167d2b6e312b8339
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = util.Counter() <NEW_LINE> for p in self.legalPositions: self.beliefs[p] = 1.0 <NEW_LINE> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observe(self, observation, gameState): <NEW_LINE> <INDENT> noisyDistance = observation <NEW_LINE> emissionModel = busters.getObservationDistribution(noisyDistance) <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> for p in self.legalPositions: <NEW_LINE> <INDENT> Dist = util.manhattanDistance(p, pacmanPosition) <NEW_LINE> allPossible[p] = self.beliefs[p] * emissionModel[Dist] <NEW_LINE> <DEDENT> allPossible.normalize() <NEW_LINE> if noisyDistance == None: <NEW_LINE> <INDENT> pos = self.getJailPosition() <NEW_LINE> allPossible[pos] = 1 <NEW_LINE> <DEDENT> self.beliefs = allPossible <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> "*** YOUR CODE HERE ***" <NEW_LINE> allPosible = util.Counter() <NEW_LINE> for old in self.legalPositions: <NEW_LINE> <INDENT> newpd = self.getPositionDistribution(self.setGhostPosition(gameState, old)) <NEW_LINE> for new, probability in newpd.items(): <NEW_LINE> <INDENT> allPosible[new] += probability * self.beliefs[old] <NEW_LINE> <DEDENT> <DEDENT> allPosible.normalize() <NEW_LINE> self.beliefs = allPosible <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs
The exact dynamic inference module should use forward-algorithm updates to compute the exact belief function at each time step.
625990847cff6e4e811b7587
class UserLevel(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_null(self): <NEW_LINE> <INDENT> nullUserLevel_data = 'code=&name=&enable=' <NEW_LINE> nullUserLevel_res = requests.request('POST', insertUserLevel_url, data=nullUserLevel_data, headers=headers) <NEW_LINE> self.assertEqual('SUCCESS',nullUserLevel_res.json()['status'], msg=nullUserLevel_res.json()) <NEW_LINE> <DEDENT> def test_type(self): <NEW_LINE> <INDENT> typeUserLevel_data = 'code=test&name=test&enable=test' <NEW_LINE> typeUserLevel_res = requests.request('POST', insertUserLevel_url, data=typeUserLevel_data, headers=headers) <NEW_LINE> self.assertEqual(200,typeUserLevel_res.status_code) <NEW_LINE> <DEDENT> def test_insert(self): <NEW_LINE> <INDENT> insertUserLevel_data = 'code=123&name=perry&enable=true' <NEW_LINE> insertUserLevel_res = requests.request('POST', insertUserLevel_url, data=insertUserLevel_data, headers=headers) <NEW_LINE> self.assertEqual('SUCCESS',insertUserLevel_res.json()['status']) <NEW_LINE> <DEDENT> def test_search(self): <NEW_LINE> <INDENT> searchUserLevel_data = 'darw=&orderBy=add_time&orderType=desc&queryPage=1&pageSize=10&nameLike=perry&enable=' <NEW_LINE> searchUserLevel_res = requests.request('POST', searchUserLevel_url, data=searchUserLevel_data,headers=headers) <NEW_LINE> self.assertEqual('SUCCESS',searchUserLevel_res.json()['status']) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass
會員級別新增
6259908476e4537e8c3f10c5
class IContactInfo(form.Schema): <NEW_LINE> <INDENT> contactName = schema.TextLine( title=_(u"Contact Name"), description=u"", required=False, ) <NEW_LINE> contactEmail = schema.TextLine( title=_(u"Contact Email"), description=u"", required=False, ) <NEW_LINE> contactPhone = schema.TextLine( title=_(u"Contact Phone"), description=u"", required=False, )
Marker/Form interface for Contact Info
625990847047854f46340efb
class Record(object): <NEW_LINE> <INDENT> def __init__ (self): <NEW_LINE> <INDENT> self.sequences = [] <NEW_LINE> self.version = "" <NEW_LINE> self.database = "" <NEW_LINE> self.diagrams = {} <NEW_LINE> self.alphabet = None <NEW_LINE> self.motifs = [] <NEW_LINE> <DEDENT> def get_motif_by_name (self, name): <NEW_LINE> <INDENT> for m in self.motifs: <NEW_LINE> <INDENT> if m.name == name: <NEW_LINE> <INDENT> return m
The class for holding the results from a MAST run. A MAST.Record holds data about matches between motifs and sequences. The motifs held by the Record are objects of the class MEMEMotif. Methods: get_motif_by_name (motif_name): returns a MEMEMotif with the given name.
625990844a966d76dd5f0a2c
class TransformNode(VLibNode): <NEW_LINE> <INDENT> def __init__(self, func=None, **kwargs): <NEW_LINE> <INDENT> VLibNode.__init__(self, **kwargs) <NEW_LINE> self._func = func <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> done = 0 <NEW_LINE> parent = self.GetParents()[0] <NEW_LINE> args = [] <NEW_LINE> try: <NEW_LINE> <INDENT> for parent in self.GetParents(): <NEW_LINE> <INDENT> args.append(parent.next()) <NEW_LINE> <DEDENT> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> args = tuple(args) <NEW_LINE> if self._func is not None: <NEW_LINE> <INDENT> res = self._func(*args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = args <NEW_LINE> <DEDENT> return res
base class for nodes which filter their input Assumptions: - transform function takes a number of arguments equal to the number of inputs we have. We return whatever it returns - inputs (parents) can be stepped through in lockstep Usage Example: >>> from rdkit.VLib.Supply import SupplyNode >>> def func(a,b): ... return a+b >>> tform = TransformNode(func) >>> suppl1 = SupplyNode(contents=[1,2,3,3]) >>> suppl2 = SupplyNode(contents=[1,2,3,1]) >>> tform.AddParent(suppl1) >>> tform.AddParent(suppl2) >>> v = [x for x in tform] >>> v [2, 4, 6, 4] >>> tform.reset() >>> v = [x for x in tform] >>> v [2, 4, 6, 4] If we don't provide a function, just return the inputs: >>> tform = TransformNode() >>> suppl1 = SupplyNode(contents=[1,2,3,3]) >>> suppl2 = SupplyNode(contents=[1,2,3,1]) >>> tform.AddParent(suppl1) >>> tform.AddParent(suppl2) >>> v = [x for x in tform] >>> v [(1, 1), (2, 2), (3, 3), (3, 1)]
6259908466673b3332c31f46
class OnSessionInitRequest(object): <NEW_LINE> <INDENT> def __init__(self, sess): <NEW_LINE> <INDENT> _check_type(sess, (session.BaseSession, monitored_session.MonitoredSession)) <NEW_LINE> self.session = sess
Request to an on-session-init callback. This callback is invoked during the __init__ call to a debug-wrapper session.
625990843617ad0b5ee07c97
class DistortedSliceModel(object): <NEW_LINE> <INDENT> def __init__(self, flat_slices, slice_lower_edges, slice_upper_edges, normalize_factor=None, auto_normalize_threshold=5000, degree=5): <NEW_LINE> <INDENT> self.slice_model = np.zeros_like(flat_slices) <NEW_LINE> for slice_lower, slice_upper in zip(slice_lower_edges, slice_upper_edges): <NEW_LINE> <INDENT> lower_idx = np.int(np.round(slice_lower)) <NEW_LINE> upper_idx = np.int(np.round(slice_upper)) <NEW_LINE> self.slice_model[lower_idx:upper_idx] = 1.0 <NEW_LINE> <DEDENT> if normalize_factor is None: <NEW_LINE> <INDENT> normalize_factor = np.median( flat_slices[flat_slices > auto_normalize_threshold]) <NEW_LINE> <DEDENT> self.slice_model *= normalize_factor <NEW_LINE> self.y = np.indices(self.slice_model.shape)[0] <NEW_LINE> self.flat_slices = flat_slices <NEW_LINE> self.p_coef = np.ones(degree) * 1e-14 <NEW_LINE> self.p_coef[1] = 1.0 <NEW_LINE> self.polynomial = np.polynomial.Polynomial(self.p_coef, domain=[0, len(self.slice_model)], window=[0, len(self.slice_model)]) <NEW_LINE> <DEDENT> def __call__(self, p_coef): <NEW_LINE> <INDENT> self.polynomial.coef = p_coef <NEW_LINE> y_distorted = self.polynomial(self.y) <NEW_LINE> return np.interp(self.y, y_distorted, self.slice_model) <NEW_LINE> <DEDENT> def fit(self, p_coef): <NEW_LINE> <INDENT> self.p_coef = p_coef <NEW_LINE> return np.sum((self(p_coef) - self.flat_slices)**2)
Class to make a distorted slice model for fitting Parameters ---------- flat_slices: ~np.ndarray slice_lower_edges: ~np.ndarray lower edges of the slices slice_upper_edges: ~np.ndarray upper edges of the slices normalize_factor: ~float
62599084be7bc26dc9252bf9
class CaseHandler(BaseInteractiveCaseHandler): <NEW_LINE> <INDENT> def handle_case(self, i): <NEW_LINE> <INDENT> n = int(self.read()) <NEW_LINE> moves_str = self.read() <NEW_LINE> soln = self.solve(n, moves_str) <NEW_LINE> self.write('Case #{}: {}'.format(i, soln)) <NEW_LINE> <DEDENT> def solve(self, n, moves): <NEW_LINE> <INDENT> soln_chars = [] <NEW_LINE> for char in moves: <NEW_LINE> <INDENT> soln_chars.append('E' if char == 'S' else 'S') <NEW_LINE> <DEDENT> return ''.join(soln_chars)
https://codingcompetitions.withgoogle.com/codejam/round/0000000000051705/00000000000881da
62599084283ffb24f3cf53e7
class _BaseImageServiceTests(test.TestCase): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(_BaseImageServiceTests, self).__init__(*args, **kwargs) <NEW_LINE> self.service = None <NEW_LINE> self.context = None <NEW_LINE> <DEDENT> def test_create(self): <NEW_LINE> <INDENT> fixture = self._make_fixture('test image') <NEW_LINE> num_images = len(self.service.index(self.context)) <NEW_LINE> image_id = self.service.create(self.context, fixture)['id'] <NEW_LINE> self.assertNotEquals(None, image_id) <NEW_LINE> self.assertEquals(num_images + 1, len(self.service.index(self.context))) <NEW_LINE> <DEDENT> def test_create_and_show_non_existing_image(self): <NEW_LINE> <INDENT> fixture = self._make_fixture('test image') <NEW_LINE> num_images = len(self.service.index(self.context)) <NEW_LINE> image_id = self.service.create(self.context, fixture)['id'] <NEW_LINE> self.assertNotEquals(None, image_id) <NEW_LINE> self.assertRaises(exception.NotFound, self.service.show, self.context, 'bad image id') <NEW_LINE> <DEDENT> def test_create_and_show_non_existing_image_by_name(self): <NEW_LINE> <INDENT> fixture = self._make_fixture('test image') <NEW_LINE> num_images = len(self.service.index(self.context)) <NEW_LINE> image_id = self.service.create(self.context, fixture)['id'] <NEW_LINE> self.assertNotEquals(None, image_id) <NEW_LINE> self.assertRaises(exception.ImageNotFound, self.service.show_by_name, self.context, 'bad image id') <NEW_LINE> <DEDENT> def test_update(self): <NEW_LINE> <INDENT> fixture = self._make_fixture('test image') <NEW_LINE> image_id = self.service.create(self.context, fixture)['id'] <NEW_LINE> fixture['status'] = 'in progress' <NEW_LINE> self.service.update(self.context, image_id, fixture) <NEW_LINE> new_image_data = self.service.show(self.context, image_id) <NEW_LINE> self.assertEquals('in progress', new_image_data['status']) <NEW_LINE> <DEDENT> def test_delete(self): <NEW_LINE> <INDENT> fixture1 = self._make_fixture('test image 1') <NEW_LINE> fixture2 = self._make_fixture('test image 2') <NEW_LINE> fixtures = [fixture1, fixture2] <NEW_LINE> num_images = len(self.service.index(self.context)) <NEW_LINE> self.assertEquals(0, num_images, str(self.service.index(self.context))) <NEW_LINE> ids = [] <NEW_LINE> for fixture in fixtures: <NEW_LINE> <INDENT> new_id = self.service.create(self.context, fixture)['id'] <NEW_LINE> ids.append(new_id) <NEW_LINE> <DEDENT> num_images = len(self.service.index(self.context)) <NEW_LINE> self.assertEquals(2, num_images, str(self.service.index(self.context))) <NEW_LINE> self.service.delete(self.context, ids[0]) <NEW_LINE> num_images = len(self.service.index(self.context)) <NEW_LINE> self.assertEquals(1, num_images) <NEW_LINE> <DEDENT> def test_index(self): <NEW_LINE> <INDENT> fixture = self._make_fixture('test image') <NEW_LINE> image_id = self.service.create(self.context, fixture)['id'] <NEW_LINE> image_metas = self.service.index(self.context) <NEW_LINE> expected = [{'id': 'DONTCARE', 'name': 'test image'}] <NEW_LINE> self.assertDictListMatch(image_metas, expected) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _make_fixture(name): <NEW_LINE> <INDENT> fixture = {'name': name, 'updated': None, 'created': None, 'status': None, 'is_public': True} <NEW_LINE> return fixture
Tasks to test for all image services
625990845fdd1c0f98e5fac7
class User(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Użytkownika" <NEW_LINE> verbose_name_plural = "Użytkownicy" <NEW_LINE> <DEDENT> login = models.CharField(max_length=30, verbose_name="Nazwa użytkownika") <NEW_LINE> password = models.CharField(max_length=30, verbose_name="Hasło") <NEW_LINE> full_name = models.CharField(max_length=100, verbose_name="Imię i nazwisko") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.full_name
Klasa użytkownika wysyłającego testy - nauczyciela Opis pól: **login** Login, identyfikator nauczyciela (30 znaków) **password** Hasło (30 znaków) **full_name** Imię i nazwisko (100 znaków)
6259908426068e7796d4e488
class StdErrToHTMLConverter(): <NEW_LINE> <INDENT> formatChars = { '\e[1m': '<strong>', '\e[21m': '</strong>', '\e[2m': '<span class="dark">', '\e[22m': '</span>', '\n': '<br>', '\x1b[34m': '<span class="alert alert-info" style="overflow-wrap: break-word">', '\x1b[35m': '<span class="alert alert-warning" style="overflow-wrap: break-word">', '\x1b[31m': '<span class="alert alert-error" style="overflow-wrap: break-word">', '\x1b[0m': '</span>', 'Warning:': '<strong>Warning:</strong>', 'Info:': '<strong>Info:</strong>', 'Error:': '<strong>Error:</strong>', } <NEW_LINE> def __init__(self, error): <NEW_LINE> <INDENT> if isinstance(error, Exception): <NEW_LINE> <INDENT> self.error_message = str(error) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.error_message = error <NEW_LINE> <DEDENT> <DEDENT> def replace_tjp_ids(self, message): <NEW_LINE> <INDENT> import re <NEW_LINE> pattern = r"Task[\w0-9\._]+[0-9]" <NEW_LINE> all_tjp_ids = re.findall(pattern, message) <NEW_LINE> new_message = message <NEW_LINE> for tjp_id in all_tjp_ids: <NEW_LINE> <INDENT> entity_type_and_id = tjp_id.split('.')[-1] <NEW_LINE> entity_type = entity_type_and_id.split('_')[0] <NEW_LINE> entity_id = entity_type_and_id.split('_')[1] <NEW_LINE> link = '/%(class_name)ss/%(id)s/view' % { 'class_name': entity_type.lower(), 'id': entity_id } <NEW_LINE> name = '%(name)s' % { 'name': entity_type_and_id, } <NEW_LINE> path = '<a href="%(link)s">%(name)s</a>' % { 'link': link, 'name': name } <NEW_LINE> new_message = new_message.replace(tjp_id, path) <NEW_LINE> <DEDENT> return new_message <NEW_LINE> <DEDENT> def html(self, replace_links=False): <NEW_LINE> <INDENT> if isinstance(self.error_message, list): <NEW_LINE> <INDENT> output_buffer = [] <NEW_LINE> for msg in self.error_message: <NEW_LINE> <INDENT> output_buffer.append('%s' % msg.strip()) <NEW_LINE> <DEDENT> str_buffer = '\n'.join(output_buffer) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> str_buffer = self.error_message <NEW_LINE> <DEDENT> if replace_links: <NEW_LINE> <INDENT> str_buffer = self.replace_tjp_ids(str_buffer) <NEW_LINE> <DEDENT> for key in self.formatChars.keys(): <NEW_LINE> <INDENT> str_buffer = str_buffer.replace(key, self.formatChars[key]) <NEW_LINE> <DEDENT> str_buffer = '<p>%s</p>' % str_buffer <NEW_LINE> return str_buffer
Converts stderr, stdout messages of TaskJuggler to html :param error: An exception
625990844c3428357761be01
class BackgroundSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.Background <NEW_LINE> fields = "__all__"
Serializer class for the Background model
6259908460cbc95b06365b0f
class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ["id", "email", "password", "current_password", "is_staff"]
Serializer meta information.
6259908497e22403b383ca3f
class Entries(Base): <NEW_LINE> <INDENT> __tablename__ = 'entries' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> title = Column(Text) <NEW_LINE> body = Column(Text) <NEW_LINE> creation_date = Column(DateTime, default=datetime.datetime.now()) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Entries: {}>'.format(self.title) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return {'id': self.id, 'title': self.title, 'body': self.body, 'creation_date': self.creation_date.strftime('%A, %d %B, %Y, %I:%M %p') }
Class for journal entries.
6259908492d797404e389900
class Solution: <NEW_LINE> <INDENT> def findMin(self, nums): <NEW_LINE> <INDENT> start, end = 0, len(nums) - 1 <NEW_LINE> target = nums[len(nums) - 1] <NEW_LINE> while start + 1 < end: <NEW_LINE> <INDENT> mid = start + (end - start) // 2 <NEW_LINE> if nums[mid] < target: <NEW_LINE> <INDENT> end = mid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = mid <NEW_LINE> <DEDENT> <DEDENT> return min(nums[start], nums[end])
@param: nums: a rotated sorted array @return: the minimum number in the array
62599084adb09d7d5dc0c0a2
class FunctionFieldIdeal_rational(FunctionFieldIdeal): <NEW_LINE> <INDENT> def __init__(self, ring, gen): <NEW_LINE> <INDENT> FunctionFieldIdeal.__init__(self, ring) <NEW_LINE> self._gen = gen <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash( (self._ring, self._gen) ) <NEW_LINE> <DEDENT> def __contains__(self, element): <NEW_LINE> <INDENT> return (element / self._gen) in self._ring <NEW_LINE> <DEDENT> def _richcmp_(self, other, op): <NEW_LINE> <INDENT> return richcmp(self._gen, other._gen, op) <NEW_LINE> <DEDENT> def _add_(self, other): <NEW_LINE> <INDENT> return self._ring.ideal([self._gen, other._gen]) <NEW_LINE> <DEDENT> def _mul_(self, other): <NEW_LINE> <INDENT> return self._ring.ideal([self._gen * other._gen]) <NEW_LINE> <DEDENT> def _acted_upon_(self, other, on_left): <NEW_LINE> <INDENT> return self._ring.ideal([other * self._gen]) <NEW_LINE> <DEDENT> def __invert__(self): <NEW_LINE> <INDENT> return self._ring.ideal([~(self._gen)]) <NEW_LINE> <DEDENT> def denominator(self): <NEW_LINE> <INDENT> return self._gen.denominator() <NEW_LINE> <DEDENT> def is_prime(self): <NEW_LINE> <INDENT> return self._gen.denominator() == 1 and self._gen.numerator().is_prime() <NEW_LINE> <DEDENT> @cached_method <NEW_LINE> def module(self): <NEW_LINE> <INDENT> V, fr, to = self.ring().fraction_field().vector_space() <NEW_LINE> return V.span([to(g) for g in self.gens()], base_ring=self.ring()) <NEW_LINE> <DEDENT> def gen(self): <NEW_LINE> <INDENT> return self._gen <NEW_LINE> <DEDENT> def gens(self): <NEW_LINE> <INDENT> return (self._gen,) <NEW_LINE> <DEDENT> def gens_over_base(self): <NEW_LINE> <INDENT> return (self._gen,) <NEW_LINE> <DEDENT> def valuation(self, ideal): <NEW_LINE> <INDENT> if not self.is_prime(): <NEW_LINE> <INDENT> raise TypeError("not a prime ideal") <NEW_LINE> <DEDENT> O = self.ring() <NEW_LINE> d = ideal.denominator() <NEW_LINE> return self._valuation(d*ideal) - self._valuation(O.ideal(d)) <NEW_LINE> <DEDENT> def _valuation(self, ideal): <NEW_LINE> <INDENT> return ideal.gen().valuation(self.gen()) <NEW_LINE> <DEDENT> def _factor(self): <NEW_LINE> <INDENT> factors = [] <NEW_LINE> for f,m in self._gen.factor(): <NEW_LINE> <INDENT> factors.append( (self.ring().ideal(f), m) ) <NEW_LINE> <DEDENT> return factors
Fractional ideals of the maximal order of a rational function field. INPUT: - ``ring`` -- the maximal order of the rational function field. - ``gen`` -- generator of the ideal, an element of the function field. EXAMPLES:: sage: K.<x> = FunctionField(QQ) sage: O = K.maximal_order() sage: I = O.ideal(1/(x^2+x)); I Ideal (1/(x^2 + x)) of Maximal order of Rational function field in x over Rational Field
625990847cff6e4e811b7589
class TextService(QtCore.QObject): <NEW_LINE> <INDENT> change_img = QtCore.Signal() <NEW_LINE> def __init__(self, text, window, lang_en, def_counter): <NEW_LINE> <INDENT> QtCore.QObject.__init__(self) <NEW_LINE> self.word_list = re.split('\s', text) <NEW_LINE> self.window = window <NEW_LINE> self.sentence_list = regex.split("(?V1)(?<=\.|:|;|-|,|\!)", text) <NEW_LINE> self.sentence_list = self.join_short_sentences() <NEW_LINE> self.keyword_list = [] <NEW_LINE> self.timing_list = [] <NEW_LINE> self.pool = ThreadPool(4) <NEW_LINE> self.keyword_list = self.pool.map(derive_keyword, self.sentence_list) <NEW_LINE> self.pool.close() <NEW_LINE> self.pool.join() <NEW_LINE> self.audio_service = AudioService(window) <NEW_LINE> self.audio_thread = threading.Thread(target=self.audio_service.prepare_voice, args=(self.sentence_list, def_counter)) <NEW_LINE> self.audio_thread.setDaemon(True) <NEW_LINE> self.audio_thread.start() <NEW_LINE> self.image_thread = threading.Thread(target=image_from_keyword_list, args=(self.keyword_list, window, lang_en)) <NEW_LINE> self.image_thread.setDaemon(True) <NEW_LINE> self.image_thread.start() <NEW_LINE> <DEDENT> def start_story(self, wait_seconds=5): <NEW_LINE> <INDENT> self.audio_service.set_clip_callback(self.window.switch_to_next_image) <NEW_LINE> sleep(wait_seconds) <NEW_LINE> self.audio_service.start_audio() <NEW_LINE> <DEDENT> def get_sentence_list(self): <NEW_LINE> <INDENT> return self.sentence_list <NEW_LINE> <DEDENT> def pause_play(self): <NEW_LINE> <INDENT> self.audio_service.pause_play() <NEW_LINE> <DEDENT> def stop_play(self): <NEW_LINE> <INDENT> self.pool.terminate() <NEW_LINE> self.audio_service.stop_play() <NEW_LINE> <DEDENT> def join_short_sentences(self): <NEW_LINE> <INDENT> result_list = [] <NEW_LINE> for sentence in self.sentence_list: <NEW_LINE> <INDENT> if len(sentence.split()) > 4: <NEW_LINE> <INDENT> result_list.append(sentence) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result_list[-1] = result_list[-1] + sentence <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> result_list.append(sentence) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result_list
A TextService which handles all text processing including the fetching of images and voice
62599084e1aae11d1e7cf5b7
class BlobInventoryPolicy(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'system_data': {'readonly': True}, 'last_modified_time': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, 'policy': {'key': 'properties.policy', 'type': 'BlobInventoryPolicySchema'}, } <NEW_LINE> def __init__( self, *, policy: Optional["BlobInventoryPolicySchema"] = None, **kwargs ): <NEW_LINE> <INDENT> super(BlobInventoryPolicy, self).__init__(**kwargs) <NEW_LINE> self.system_data = None <NEW_LINE> self.last_modified_time = None <NEW_LINE> self.policy = policy
The storage account blob inventory policy. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts". :vartype type: str :ivar system_data: Metadata pertaining to creation and last modification of the resource. :vartype system_data: ~azure.mgmt.storage.v2020_08_01_preview.models.SystemData :ivar last_modified_time: Returns the last modified date and time of the blob inventory policy. :vartype last_modified_time: ~datetime.datetime :ivar policy: The storage account blob inventory policy object. It is composed of policy rules. :vartype policy: ~azure.mgmt.storage.v2020_08_01_preview.models.BlobInventoryPolicySchema
62599084aad79263cf430302
class BudgetLevel(Choice): <NEW_LINE> <INDENT> zero_five = "0-500k" <NEW_LINE> five_one = "500k-1M" <NEW_LINE> one = "1M+"
For search affiliates by budget
625990843617ad0b5ee07c99
class TestBuyerParticipant(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return BuyerParticipant( company_name = 'Allegro', login = 'User_login', first_name = 'Jan', last_name = 'Nowak', address = allegro_api.models.operation_participant_address.OperationParticipantAddress( street = 'Grunwaldzka 108', city = 'Poznań', post_code = '60-166', ), id = '0' ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return BuyerParticipant( login = 'User_login', first_name = 'Jan', last_name = 'Nowak', id = '0', ) <NEW_LINE> <DEDENT> <DEDENT> def testBuyerParticipant(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
BuyerParticipant unit test stubs
6259908597e22403b383ca41
class Order(BaseModel, db.Model): <NEW_LINE> <INDENT> __tablename__ = "ih_order_info" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey("ih_user_profile.id"), nullable=False) <NEW_LINE> house_id = db.Column(db.Integer, db.ForeignKey("ih_house_info.id"), nullable=False) <NEW_LINE> begin_date = db.Column(db.DateTime, nullable=False) <NEW_LINE> end_date = db.Column(db.DateTime, nullable=False) <NEW_LINE> days = db.Column(db.Integer, nullable=False) <NEW_LINE> house_price = db.Column(db.Integer, nullable=False) <NEW_LINE> amount = db.Column(db.Integer, nullable=False) <NEW_LINE> status = db.Column( db.Enum( "WAIT_ACCEPT", "WAIT_PAYMENT", "PAID", "WAIT_COMMENT", "COMPLETE", "CANCELED", "REJECTED" ), default="WAIT_ACCEPT", index=True) <NEW_LINE> comment = db.Column(db.Text) <NEW_LINE> trade_no=db.Column(db.String(80)) <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> order_dict = { "order_id": self.id, "title": self.house.title, "img_url": QINIU_URL + "/" + self.house.index_image_url if self.house.index_image_url else "", "start_date": self.begin_date.strftime("%Y-%m-%d"), "end_date": self.end_date.strftime("%Y-%m-%d"), "ctime": self.create_time.strftime("%Y-%m-%d %H:%M:%S"), "days": self.days, "amount": self.amount, "status": self.status, "comment": self.comment if self.comment else "" } <NEW_LINE> return order_dict
订单
6259908592d797404e389901
class Region(object): <NEW_LINE> <INDENT> __slots__ = ('id', 'nodes', 'anchors') <NEW_LINE> def __init__(self, id, *anchors): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.nodes = [] <NEW_LINE> self.anchors = anchors <NEW_LINE> if len(anchors) == 1: <NEW_LINE> <INDENT> raise ValueError('Regions must be defined by at least 2 anchors') <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "RegionID = " + self.id <NEW_LINE> <DEDENT> def __iadd__(self, offset): <NEW_LINE> <INDENT> for i in range(len(self.anchors)): <NEW_LINE> <INDENT> self.anchors[i] += offset <NEW_LINE> <DEDENT> <DEDENT> def __isub__(self, offset): <NEW_LINE> <INDENT> for i in range(len(self.anchors)): <NEW_LINE> <INDENT> self.anchors[i] -= offset <NEW_LINE> <DEDENT> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if len(self.anchors) == len(other.anchors): <NEW_LINE> <INDENT> return self.anchors < other.anchors <NEW_LINE> <DEDENT> return len(self.anchors) < len(other.anchors) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Region) or other is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.anchors == other.anchors <NEW_LINE> <DEDENT> def _get_end(self): <NEW_LINE> <INDENT> return self.anchors[-1] <NEW_LINE> <DEDENT> def _set_end(self, val): <NEW_LINE> <INDENT> self.anchors[-1] = val <NEW_LINE> <DEDENT> end = property(_get_end, _set_end) <NEW_LINE> def _get_start(self): <NEW_LINE> <INDENT> return self.anchors[0] <NEW_LINE> <DEDENT> def _set_start(self, val): <NEW_LINE> <INDENT> self.anchors[0] = val <NEW_LINE> <DEDENT> start = property(_get_start, _set_start)
The area in the text file being annotated. A region is defined by a sequence of anchor values.
6259908544b2445a339b7701
class StartUpPage(Frame): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.username= " " <NEW_LINE> self.password=" " <NEW_LINE> Frame.__init__(self) <NEW_LINE> self.master.title("Start Up Page") <NEW_LINE> self.master.rowconfigure( 0, weight = 1 ) <NEW_LINE> self.master.columnconfigure( 0, weight = 1 ) <NEW_LINE> self.grid( sticky = W+E+N+S ) <NEW_LINE> self.label0=Label(self,text="LOGIN " ) <NEW_LINE> self.label0.grid(row=1,column=6,sticky=W+E+N+S) <NEW_LINE> self.label1=Label(self,text="Username",width=30) <NEW_LINE> self.label1.grid(row=2,column=5,sticky=W) <NEW_LINE> self.label2=Label(self,text="Password",width=30) <NEW_LINE> self.label2.grid(row=3,column=5,sticky=W) <NEW_LINE> self.entry1=Entry(self,name="username",width=40) <NEW_LINE> self.entry1.grid(row=2,column=6) <NEW_LINE> self.entry2=Entry(self,name="password",show="*",width=40) <NEW_LINE> self.entry2.grid(row=3,column=6) <NEW_LINE> self.master.geometry("600x600") <NEW_LINE> self.entry1.bind("<Return>",self.getUserName) <NEW_LINE> self.entry2.bind("<Return>",self.getPassword) <NEW_LINE> self.variable1=BooleanVar() <NEW_LINE> self.variable2=BooleanVar() <NEW_LINE> self.KeepLogIn=Checkbutton(self,text="Remember Login",variable=self.variable1,command=self.SaveLogin) <NEW_LINE> self.ShowPassword=Checkbutton(self,text="Show Password",variable=self.variable2) <NEW_LINE> self.KeepLogIn.grid(row =4) <NEW_LINE> self.ShowPassword.grid(row=5) <NEW_LINE> self.master.rowconfigure(1,weight=1) <NEW_LINE> self.master.columnconfigure(1,weight=1) <NEW_LINE> self.button=Button(self,text="Login",command=self.Login) <NEW_LINE> self.button.grid( row =4 ,column=5,rowspan=2) <NEW_LINE> <DEDENT> def Login(self): <NEW_LINE> <INDENT> if self.doQuery(): <NEW_LINE> <INDENT> self.grid_forget() <NEW_LINE> EnergyMeterControl().mainloop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> showinfo("Wrong UserName or Password Please Login Again") <NEW_LINE> <DEDENT> <DEDENT> def getUserName(self,event): <NEW_LINE> <INDENT> self.username=event.widget.get() <NEW_LINE> showinfo(self.username) <NEW_LINE> <DEDENT> def getPassword(self,event): <NEW_LINE> <INDENT> self.password=event.widget.get() <NEW_LINE> showinfo(self.password) <NEW_LINE> <DEDENT> def doQuery(self): <NEW_LINE> <INDENT> conn=pymysql.connect(host=hostname,user=username,passwd=password,db=database) <NEW_LINE> cur=conn.cursor() <NEW_LINE> cur.execute("SELECT username,password FROM Employees") <NEW_LINE> for usernm,passwrd in cur.fetchall(): <NEW_LINE> <INDENT> if self.username==usernm: <NEW_LINE> <INDENT> if self.password==passwrd: <NEW_LINE> <INDENT> conn.close() <NEW_LINE> return 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> conn.close() <NEW_LINE> return 0 <NEW_LINE> <DEDENT> def SaveLogin(self): <NEW_LINE> <INDENT> self.entry1.insert(INSERT,self.username) <NEW_LINE> self.entry2.insert(INSERT,self.password)
This is the initial start up page
625990857cff6e4e811b758b
class ExitStatus: <NEW_LINE> <INDENT> OK = 0 <NEW_LINE> ERROR = 1 <NEW_LINE> ERROR_TIMEOUT = 2
Exit status code constants.
62599085099cdd3c6367619f
class aMSNGroupInputWindow(object): <NEW_LINE> <INDENT> def __init__(self, message, callback, contactviews, title = "aMSN Group Input"): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def set_title(self, title): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> raise NotImplementedError
This Interface represent a window used to get a new group.
625990858a349b6b43687da9
class SqlManagedInstanceK8SSpec(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'scheduling': {'key': 'scheduling', 'type': 'K8SScheduling'}, 'replicas': {'key': 'replicas', 'type': 'int'}, } <NEW_LINE> def __init__( self, *, additional_properties: Optional[Dict[str, Any]] = None, scheduling: Optional["K8SScheduling"] = None, replicas: Optional[int] = None, **kwargs ): <NEW_LINE> <INDENT> super(SqlManagedInstanceK8SSpec, self).__init__(**kwargs) <NEW_LINE> self.additional_properties = additional_properties <NEW_LINE> self.scheduling = scheduling <NEW_LINE> self.replicas = replicas
The kubernetes spec information. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, any] :param scheduling: The kubernetes scheduling information. :type scheduling: ~azure_arc_data_management_client.models.K8SScheduling :param replicas: This option specifies the number of SQL Managed Instance replicas that will be deployed in your Kubernetes cluster for high availability purposes. If sku.tier is BusinessCritical, allowed values are '2' or '3' with default of '3'. If sku.tier is GeneralPurpose, replicas must be '1'. :type replicas: int
6259908597e22403b383ca43
class ComponentContext(object): <NEW_LINE> <INDENT> __slots__ = ("factory_context", "name", "properties", "__hidden_properties") <NEW_LINE> def __init__(self, factory_context, name, properties): <NEW_LINE> <INDENT> self.factory_context = factory_context <NEW_LINE> self.name = name <NEW_LINE> properties[constants.IPOPO_INSTANCE_NAME] = name <NEW_LINE> hidden_props_keys = set(properties).intersection( factory_context.hidden_properties ) <NEW_LINE> self.__hidden_properties = factory_context.hidden_properties.copy() <NEW_LINE> self.__hidden_properties.update( { key: value for key, value in properties.items() if key in hidden_props_keys } ) <NEW_LINE> self.properties = factory_context.properties.copy() <NEW_LINE> self.properties.update( { key: value for key, value in properties.items() if key not in hidden_props_keys } ) <NEW_LINE> <DEDENT> def get_bundle_context(self): <NEW_LINE> <INDENT> return self.factory_context.bundle_context <NEW_LINE> <DEDENT> def get_callback(self, event): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.factory_context.callbacks.get(event) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_field_callback(self, field, event): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.factory_context.field_callbacks[field][event] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_factory_name(self): <NEW_LINE> <INDENT> return self.factory_context.name <NEW_LINE> <DEDENT> def get_handler(self, handler_id): <NEW_LINE> <INDENT> return self.factory_context.get_handler(handler_id, None) <NEW_LINE> <DEDENT> def has_hidden_properties(self): <NEW_LINE> <INDENT> return bool(self.__hidden_properties) <NEW_LINE> <DEDENT> def grab_hidden_properties(self): <NEW_LINE> <INDENT> result = self.__hidden_properties.copy() <NEW_LINE> self.__hidden_properties.clear() <NEW_LINE> del self.__hidden_properties <NEW_LINE> return result
Represents the data stored in a component instance
6259908592d797404e389902
class LoadModel: <NEW_LINE> <INDENT> def __init__(self, model_name): <NEW_LINE> <INDENT> self.model_name = model_name <NEW_LINE> <DEDENT> def load_model(self): <NEW_LINE> <INDENT> model_load = joblib.load(os.path.dirname(__file__) + self.model_name) <NEW_LINE> return model_load
Class to load model
6259908599fddb7c1ca63b80
class AuctionAuctionPeriod(Period): <NEW_LINE> <INDENT> @serializable(serialize_when_none=False) <NEW_LINE> def shouldStartAfter(self): <NEW_LINE> <INDENT> if self.endDate: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> auction = self.__parent__ <NEW_LINE> if auction.status not in ['active.tendering', 'active.auction']: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.startDate and get_now() > calc_auction_end_time(NUMBER_OF_STAGES, self.startDate): <NEW_LINE> <INDENT> start_after = calc_auction_end_time(NUMBER_OF_STAGES, self.startDate) <NEW_LINE> <DEDENT> elif auction.enquiryPeriod and auction.enquiryPeriod.endDate: <NEW_LINE> <INDENT> start_after = auction.enquiryPeriod.endDate <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return rounding_shouldStartAfter_after_midnigth(start_after, auction).isoformat() <NEW_LINE> <DEDENT> def validate_startDate(self, data, startDate): <NEW_LINE> <INDENT> auction = get_auction(data['__parent__']) <NEW_LINE> if not auction.revisions and not startDate: <NEW_LINE> <INDENT> raise ValidationError(u'This field is required.')
The auction period.
625990852c8b7c6e89bd5331
class BitFlips(OneOf): <NEW_LINE> <INDENT> def __init__(self, value, bits_range=range(1, 5), fuzzable=True, name=None): <NEW_LINE> <INDENT> field_name = name + '_%d' if name else 'bitflip_%d' <NEW_LINE> fields = [BitFlip(value, i, fuzzable, field_name % i) for i in bits_range] <NEW_LINE> super(BitFlips, self).__init__(fields=fields, fuzzable=fuzzable, name=name)
Perform bit-flip mutations of (N..) sequential bits on the value
625990855fcc89381b266f03
@GlueDevEndpoint.action_registry.register('delete') <NEW_LINE> class DeleteDevEndpoint(BaseAction): <NEW_LINE> <INDENT> schema = type_schema('delete') <NEW_LINE> permissions = ('glue:DeleteDevEndpoint',) <NEW_LINE> def delete_dev_endpoint(self, client, endpoint_set): <NEW_LINE> <INDENT> for e in endpoint_set: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> client.delete_dev_endpoint(EndpointName=e['EndpointName']) <NEW_LINE> <DEDENT> except client.exceptions.AlreadyExistsException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def process(self, resources): <NEW_LINE> <INDENT> futures = [] <NEW_LINE> client = local_session(self.manager.session_factory).client('glue') <NEW_LINE> with self.executor_factory(max_workers=2) as w: <NEW_LINE> <INDENT> for endpoint_set in chunks(resources, size=5): <NEW_LINE> <INDENT> futures.append(w.submit(self.delete_dev_endpoint, client, endpoint_set)) <NEW_LINE> <DEDENT> for f in as_completed(futures): <NEW_LINE> <INDENT> if f.exception(): <NEW_LINE> <INDENT> self.log.error( "Exception deleting glue dev endpoint \n %s", f.exception())
Deletes public Glue Dev Endpoints :example: .. code-block:: yaml policies: - name: delete-public-dev-endpoints resource: glue-dev-endpoint filters: - PublicAddress: present actions: - type: delete
625990853346ee7daa338408
class Meta: <NEW_LINE> <INDENT> verbose_name = u"Login Attempts" <NEW_LINE> verbose_name_plural = u"Login Attempts"
App labels.
6259908526068e7796d4e48e
class EpisodeFinish(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { "reason": {"key": "reason", "type": "str"}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(EpisodeFinish, self).__init__(**kwargs) <NEW_LINE> self.reason = kwargs.get("reason", None)
EpisodeFinish event signalling current episode is finished. :param reason: Reason for episodeFinish. Possible values include: "Invalid", "Unspecified", "LessonChanged", "Terminal", "Interrupted", "InvalidStateValue", "InternalError", "EpisodeComplete", "TrainingComplete". :type reason: str or ~microsoft_bonsai_api.simulator.generated.models.EpisodeFinishReason
625990854527f215b58eb746
class FormulaInvalidOperator(Exception): <NEW_LINE> <INDENT> pass
Raise when an invalid operator is encountered.
6259908523849d37ff852c07
class Keywords(object): <NEW_LINE> <INDENT> def __init__(self, md): <NEW_LINE> <INDENT> self.theme = [] <NEW_LINE> self.place = [] <NEW_LINE> self.temporal = [] <NEW_LINE> for i in md.findall('theme'): <NEW_LINE> <INDENT> theme = {} <NEW_LINE> val = i.find('themekt') <NEW_LINE> theme['themekt'] = util.testXMLValue(val) <NEW_LINE> theme['themekey'] = [] <NEW_LINE> for j in i.findall('themekey'): <NEW_LINE> <INDENT> theme['themekey'].append(util.testXMLValue(j)) <NEW_LINE> <DEDENT> self.theme.append(theme) <NEW_LINE> <DEDENT> for i in md.findall('place'): <NEW_LINE> <INDENT> place = {} <NEW_LINE> val = i.find('placekt') <NEW_LINE> theme['placekt'] = util.testXMLValue(val) <NEW_LINE> theme['placekey'] = [] <NEW_LINE> for j in i.findall('placekey'): <NEW_LINE> <INDENT> theme['placekey'].append(util.testXMLValue(j)) <NEW_LINE> <DEDENT> self.place.append(place) <NEW_LINE> <DEDENT> for i in md.findall('temporal'): <NEW_LINE> <INDENT> temporal = {} <NEW_LINE> val = i.find('tempkt') <NEW_LINE> theme['tempkt'] = util.testXMLValue(val) <NEW_LINE> theme['tempkey'] = [] <NEW_LINE> for j in i.findall('tempkey'): <NEW_LINE> <INDENT> theme['tempkey'].append(util.testXMLValue(j)) <NEW_LINE> <DEDENT> self.temporal.append(temporal)
Process keywords
62599085dc8b845886d55107
class GetValueTests(unittest.TestCase, AssertIsMixin): <NEW_LINE> <INDENT> def assertNotFound(self, item, key): <NEW_LINE> <INDENT> self.assertIs(_get_value(item, key), _NOT_FOUND) <NEW_LINE> <DEDENT> def test_dictionary__key_present(self): <NEW_LINE> <INDENT> item = {"foo": "bar"} <NEW_LINE> self.assertEquals(_get_value(item, "foo"), "bar") <NEW_LINE> <DEDENT> def test_dictionary__callable_not_called(self): <NEW_LINE> <INDENT> def foo_callable(self): <NEW_LINE> <INDENT> return "bar" <NEW_LINE> <DEDENT> item = {"foo": foo_callable} <NEW_LINE> self.assertNotEquals(_get_value(item, "foo"), "bar") <NEW_LINE> self.assertTrue(_get_value(item, "foo") is foo_callable) <NEW_LINE> <DEDENT> def test_dictionary__key_missing(self): <NEW_LINE> <INDENT> item = {} <NEW_LINE> self.assertNotFound(item, "missing") <NEW_LINE> <DEDENT> def test_dictionary__attributes_not_checked(self): <NEW_LINE> <INDENT> item = {} <NEW_LINE> attr_name = "keys" <NEW_LINE> self.assertEquals(getattr(item, attr_name)(), []) <NEW_LINE> self.assertNotFound(item, attr_name) <NEW_LINE> <DEDENT> def test_dictionary__dict_subclass(self): <NEW_LINE> <INDENT> class DictSubclass(dict): pass <NEW_LINE> item = DictSubclass() <NEW_LINE> item["foo"] = "bar" <NEW_LINE> self.assertEquals(_get_value(item, "foo"), "bar") <NEW_LINE> <DEDENT> def test_object__attribute_present(self): <NEW_LINE> <INDENT> item = SimpleObject() <NEW_LINE> self.assertEquals(_get_value(item, "foo"), "bar") <NEW_LINE> <DEDENT> def test_object__attribute_missing(self): <NEW_LINE> <INDENT> item = SimpleObject() <NEW_LINE> self.assertNotFound(item, "missing") <NEW_LINE> <DEDENT> def test_object__attribute_is_callable(self): <NEW_LINE> <INDENT> item = SimpleObject() <NEW_LINE> self.assertEquals(_get_value(item, "foo_callable"), "called...") <NEW_LINE> <DEDENT> def test_object__non_built_in_type(self): <NEW_LINE> <INDENT> item = datetime(2012, 1, 2) <NEW_LINE> self.assertEquals(_get_value(item, "day"), 2) <NEW_LINE> <DEDENT> def test_object__dict_like(self): <NEW_LINE> <INDENT> item = DictLike() <NEW_LINE> self.assertEquals(item["foo"], "bar") <NEW_LINE> self.assertNotFound(item, "foo") <NEW_LINE> <DEDENT> def test_built_in_type__integer(self): <NEW_LINE> <INDENT> class MyInt(int): pass <NEW_LINE> item1 = MyInt(10) <NEW_LINE> item2 = 10 <NEW_LINE> self.assertEquals(item1.real, 10) <NEW_LINE> self.assertEquals(item2.real, 10) <NEW_LINE> self.assertEquals(_get_value(item1, 'real'), 10) <NEW_LINE> self.assertNotFound(item2, 'real') <NEW_LINE> <DEDENT> def test_built_in_type__string(self): <NEW_LINE> <INDENT> class MyStr(str): pass <NEW_LINE> item1 = MyStr('abc') <NEW_LINE> item2 = 'abc' <NEW_LINE> self.assertEquals(item1.upper(), 'ABC') <NEW_LINE> self.assertEquals(item2.upper(), 'ABC') <NEW_LINE> self.assertEquals(_get_value(item1, 'upper'), 'ABC') <NEW_LINE> self.assertNotFound(item2, 'upper') <NEW_LINE> <DEDENT> def test_built_in_type__list(self): <NEW_LINE> <INDENT> class MyList(list): pass <NEW_LINE> item1 = MyList([1, 2, 3]) <NEW_LINE> item2 = [1, 2, 3] <NEW_LINE> self.assertEquals(item1.pop(), 3) <NEW_LINE> self.assertEquals(item2.pop(), 3) <NEW_LINE> self.assertEquals(_get_value(item1, 'pop'), 2) <NEW_LINE> self.assertNotFound(item2, 'pop')
Test context._get_value().
62599085a05bb46b3848bece
@toolbar_pool.register <NEW_LINE> class PersonExtensionToolbar(BaseExtensionToolbar): <NEW_LINE> <INDENT> model = Person
This extension class customizes the toolbar for the person page extension
62599085d486a94d0ba2db04
class RZThetaReactorMeshConverterByRingCompositionAxialBins( _RZThetaReactorMeshConverterByRingComposition, _RZThetaReactorMeshConverterByAxialBins, ): <NEW_LINE> <INDENT> pass
Generate a new mesh based on the radial compositions and axial bins in the core. See Also -------- _RZThetaReactorMeshConverterByRingComposition _RZThetaReactorMeshConverterByAxialBins
62599085a8370b77170f1f1a
class BrennerImageQuality(Filter): <NEW_LINE> <INDENT> def __init__(self, image, options, physical=False, verbal=False): <NEW_LINE> <INDENT> Filter.__init__(self, image, options, physical, verbal) <NEW_LINE> self.data.crop_to_rectangle() <NEW_LINE> <DEDENT> def calculate_brenner_quality(self): <NEW_LINE> <INDENT> data = self.data.get_array() <NEW_LINE> rows = data.shape[0] <NEW_LINE> columns = data.shape[1] - 2 <NEW_LINE> temp = numpy.zeros((rows, columns)) <NEW_LINE> temp[:] = ((data[:, 0:-2] - data[:, 2:]) ** 2) <NEW_LINE> return temp.sum()
Our implementation of the Brenner autofocus metric Brenner, J. F. et al (1976). An automated microscope for cytologic research a preliminary evaluation. Journal of Histochemistry & Cytochemistry, 24(1), 100–111. http://doi.org/10.1177/24.1.1254907
62599085fff4ab517ebcf364
class PdfInfo: <NEW_LINE> <INDENT> def __init__(self, infile): <NEW_LINE> <INDENT> self._infile = infile <NEW_LINE> self._pages = _pdf_get_all_pageinfo(infile) <NEW_LINE> <DEDENT> @property <NEW_LINE> def pages(self): <NEW_LINE> <INDENT> return self._pages <NEW_LINE> <DEDENT> @property <NEW_LINE> def min_version(self): <NEW_LINE> <INDENT> return max(page.min_version for page in self.pages) <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_userunit(self): <NEW_LINE> <INDENT> return any(page.userunit != 1.0 for page in self.pages) <NEW_LINE> <DEDENT> @property <NEW_LINE> def filename(self): <NEW_LINE> <INDENT> if not isinstance(self._infile, (str, Path)): <NEW_LINE> <INDENT> raise NotImplementedError("can't get filename from stream") <NEW_LINE> <DEDENT> return self._infile <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self._pages[item] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._pages) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<PdfInfo('...'), page count={}>".format(len(self))
Get summary information about a PDF
625990857cff6e4e811b758f
class DeviceInfo(object): <NEW_LINE> <INDENT> def __init__(self, _fastboot_device_controller, serial_number, location=None, provision_status=ProvisionStatus.IDLE, provision_state=ProvisionState()): <NEW_LINE> <INDENT> self._fastboot_device_controller = _fastboot_device_controller <NEW_LINE> self.serial_number = serial_number <NEW_LINE> self.location = location <NEW_LINE> self.provision_status = provision_status <NEW_LINE> self.provision_state = provision_state <NEW_LINE> self.keys_left = None <NEW_LINE> self.operation_lock = threading.Lock() <NEW_LINE> self.operation = None <NEW_LINE> self.at_attest_uuid = None <NEW_LINE> <DEDENT> def Copy(self): <NEW_LINE> <INDENT> return DeviceInfo(None, self.serial_number, self.location, self.provision_status) <NEW_LINE> <DEDENT> def Reboot(self): <NEW_LINE> <INDENT> return self._fastboot_device_controller.Reboot() <NEW_LINE> <DEDENT> def Oem(self, oem_command, err_to_out=False): <NEW_LINE> <INDENT> return self._fastboot_device_controller.Oem(oem_command, err_to_out) <NEW_LINE> <DEDENT> def Flash(self, partition, file_path): <NEW_LINE> <INDENT> return self._fastboot_device_controller.Flash(partition, file_path) <NEW_LINE> <DEDENT> def Upload(self, file_path): <NEW_LINE> <INDENT> return self._fastboot_device_controller.Upload(file_path) <NEW_LINE> <DEDENT> def Download(self, file_path): <NEW_LINE> <INDENT> return self._fastboot_device_controller.Download(file_path) <NEW_LINE> <DEDENT> def GetVar(self, var): <NEW_LINE> <INDENT> return self._fastboot_device_controller.GetVar(var) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.serial_number == other.serial_number and self.location == other.location and self.provision_status == other.provision_status) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.location: <NEW_LINE> <INDENT> return self.serial_number + ' at location: ' + self.location <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.serial_number
The class to wrap the information about a fastboot device. Attributes: serial_number: The serial number for the device. location: The physical USB location for the device.
625990855fc7496912d49012
@base.Deprecate( is_removed=False, warning=('This command is deprecated. ' 'Please use `gcloud beta ml versions delete` instead.'), error=('This command has been removed. ' 'Please use `gcloud beta ml versions delete` instead.')) <NEW_LINE> @base.ReleaseTracks(base.ReleaseTrack.BETA) <NEW_LINE> class BetaDelete(delete.BetaDelete): <NEW_LINE> <INDENT> pass
Delete an existing Cloud ML version.
62599085bf627c535bcb3021
class KeyMappingCondition(ISingleCsvFilter): <NEW_LINE> <INDENT> def __init__(self, condition: Callable[[KS001], bool]): <NEW_LINE> <INDENT> self.condition = condition <NEW_LINE> <DEDENT> def is_valid(self, path: PathStr, csv_ks001str: KS001Str, csv_ks001: "KS001", data_type: DataTypeStr, index: int) -> bool: <NEW_LINE> <INDENT> return self.condition(csv_ks001) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> pass
Accepts a csv only if the condition is valid for the KS001 structure representing the datasource considered
625990855fdd1c0f98e5face
class PCUProtocolType(Row): <NEW_LINE> <INDENT> table_name = 'pcu_protocol_type' <NEW_LINE> primary_key = 'pcu_protocol_type_id' <NEW_LINE> join_tables = [] <NEW_LINE> fields = { 'pcu_protocol_type_id': Parameter(int, "PCU protocol type identifier"), 'pcu_type_id': Parameter(int, "PCU type identifier"), 'port': Parameter(int, "PCU port"), 'protocol': Parameter(str, "Protocol"), 'supported': Parameter(bool, "Is the port/protocol supported by PLC") } <NEW_LINE> def validate_port(self, port): <NEW_LINE> <INDENT> if not port: <NEW_LINE> <INDENT> raise PLCInvalidArgument("Port must be specified") <NEW_LINE> <DEDENT> return port <NEW_LINE> <DEDENT> def validate_protocol(self, protocol): <NEW_LINE> <INDENT> if not len(protocol): <NEW_LINE> <INDENT> raise PLCInvalidArgument("protocol must be specified") <NEW_LINE> <DEDENT> return protocol
Representation of a row in the pcu_protocol_type table. To use, instantiate with a dict of values.
6259908566673b3332c31f4e
class Boss(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200) <NEW_LINE> respawn_rate = models.IntegerField('respawn rate in seconds') <NEW_LINE> zone = models.ForeignKey(Zone) <NEW_LINE> def next_spawn(self, server): <NEW_LINE> <INDENT> deaths = DeathCount.objects.in_spawn_range(self, server) <NEW_LINE> death = deaths.by_vote() <NEW_LINE> if len(death) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> death = death[0] <NEW_LINE> <DEDENT> return death.died_at.astimezone(server.tz) + timedelta(seconds=self.respawn_rate) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Object for an individual boss.
62599085283ffb24f3cf53ef
class Clipper2D(object): <NEW_LINE> <INDENT> def difference(self, poly): <NEW_LINE> <INDENT> clipper = self._prepare_clipper(poly) <NEW_LINE> if not clipper: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> differences = clipper.Execute(pc.CT_DIFFERENCE, pc.PFT_NONZERO, pc.PFT_NONZERO) <NEW_LINE> return self._process(differences) <NEW_LINE> <DEDENT> def intersect(self, poly): <NEW_LINE> <INDENT> clipper = self._prepare_clipper(poly) <NEW_LINE> if not clipper: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> intersections = clipper.Execute( pc.CT_INTERSECTION, pc.PFT_NONZERO, pc.PFT_NONZERO ) <NEW_LINE> return self._process(intersections) <NEW_LINE> <DEDENT> def union(self, poly): <NEW_LINE> <INDENT> clipper = self._prepare_clipper(poly) <NEW_LINE> if not clipper: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> unions = clipper.Execute(pc.CT_UNION, pc.PFT_NONZERO, pc.PFT_NONZERO) <NEW_LINE> return self._process(unions) <NEW_LINE> <DEDENT> def _prepare_clipper(self, poly): <NEW_LINE> <INDENT> s1 = pc.scale_to_clipper(self.vertices_list) <NEW_LINE> s2 = pc.scale_to_clipper(poly.vertices_list) <NEW_LINE> clipper = pc.Pyclipper() <NEW_LINE> clipper.AddPath(s1, poly_type=pc.PT_SUBJECT, closed=True) <NEW_LINE> clipper.AddPath(s2, poly_type=pc.PT_CLIP, closed=True) <NEW_LINE> return clipper <NEW_LINE> <DEDENT> def _process(self, results): <NEW_LINE> <INDENT> if not results: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> scaled = [pc.scale_from_clipper(r) for r in results] <NEW_LINE> polys = [self.as_2d(r) for r in scaled] <NEW_LINE> processed = [] <NEW_LINE> for poly in polys: <NEW_LINE> <INDENT> if almostequal(poly.normal_vector, self.normal_vector): <NEW_LINE> <INDENT> processed.append(poly) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> processed.append(poly.invert_orientation()) <NEW_LINE> <DEDENT> <DEDENT> return processed
This class is used to add clipping functionality to the Polygon2D class.
625990854c3428357761be09
class RequiredError(BormError): <NEW_LINE> <INDENT> pass
require error
6259908592d797404e389904
class VMwareHTTPFile(object): <NEW_LINE> <INDENT> def __init__(self, file_handle): <NEW_LINE> <INDENT> self.eof = False <NEW_LINE> self.file_handle = file_handle <NEW_LINE> <DEDENT> def set_eof(self, eof): <NEW_LINE> <INDENT> self.eof = eof <NEW_LINE> <DEDENT> def get_eof(self): <NEW_LINE> <INDENT> return self.eof <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.file_handle.close() <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> LOG.exception(exc) <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> def _build_vim_cookie_headers(self, vim_cookies): <NEW_LINE> <INDENT> cookie_header = "" <NEW_LINE> for vim_cookie in vim_cookies: <NEW_LINE> <INDENT> cookie_header = vim_cookie.name + "=" + vim_cookie.value <NEW_LINE> break <NEW_LINE> <DEDENT> return cookie_header <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def read(self, chunk_size): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_size(self): <NEW_LINE> <INDENT> raise NotImplementedError
Base class for HTTP file.
62599085a05bb46b3848becf
class ProjectManagerCli(): <NEW_LINE> <INDENT> def get_proj_list(self, root, sort_reverse=False): <NEW_LINE> <INDENT> proj_util = get_proj_util_mod(root) <NEW_LINE> python_path = _python_path <NEW_LINE> cmd = [python_path, "-E", join(root, "apps_dev", "project.py"), "--no-colorama"] <NEW_LINE> cmd.append("list") <NEW_LINE> out = self.exec_proc_sync(cmd, root) <NEW_LINE> if out[0]: <NEW_LINE> <INDENT> print("Project list internal error", file=sys.stderr) <NEW_LINE> return [] <NEW_LINE> <DEDENT> proj_strs = out[1].splitlines() <NEW_LINE> if sort_reverse: <NEW_LINE> <INDENT> proj_strs.sort() <NEW_LINE> proj_strs.reverse() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> proj_strs.sort() <NEW_LINE> <DEDENT> proj_list = [] <NEW_LINE> for s in proj_strs: <NEW_LINE> <INDENT> name = s.split("->")[0].strip(" ") <NEW_LINE> path = s.split("->")[1].strip(" ") <NEW_LINE> path_abs = normpath(join(root, path)) <NEW_LINE> proj_cfg = proj_util.get_proj_cfg(path_abs) <NEW_LINE> proj_list.append({"name": name, "path": path, "config": proj_cfg}) <NEW_LINE> <DEDENT> return proj_list <NEW_LINE> <DEDENT> def exec_proc_sync(self, cmd, root): <NEW_LINE> <INDENT> cwd = os.getcwd() <NEW_LINE> os.chdir(root) <NEW_LINE> try: <NEW_LINE> <INDENT> out = (0, subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True)) <NEW_LINE> <DEDENT> except subprocess.CalledProcessError as ex: <NEW_LINE> <INDENT> out = (ex.returncode, ex.output) <NEW_LINE> <DEDENT> os.chdir(cwd) <NEW_LINE> return out <NEW_LINE> <DEDENT> def exec_proc_async_pipe(self, cmd, root): <NEW_LINE> <INDENT> cwd = os.getcwd() <NEW_LINE> os.chdir(root) <NEW_LINE> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) <NEW_LINE> os.chdir(cwd) <NEW_LINE> console_queue = tornado.queues.Queue() <NEW_LINE> ConsoleHandler.console_queue = console_queue <NEW_LINE> t = threading.Thread(target=self.enqueue_output, args=(proc.stdout, console_queue)) <NEW_LINE> t.daemon = True <NEW_LINE> t.start() <NEW_LINE> return proc <NEW_LINE> <DEDENT> def enqueue_output(self, out, queue): <NEW_LINE> <INDENT> for line in iter(out.readline, ""): <NEW_LINE> <INDENT> queue.put(line) <NEW_LINE> <DEDENT> out.close()
Abstraction layer to project manager cli
6259908571ff763f4b5e92fd
class DBUtils(): <NEW_LINE> <INDENT> def convertAttributesToDict(self, object): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> adict = dict((key, value) for key, value in object.__dict__.iteritems() if not callable(value) and not key.startswith('_')) <NEW_LINE> return adict <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None
Help functions
62599085fff4ab517ebcf366
class JobOfferComment(models.Model): <NEW_LINE> <INDENT> text = models.TextField(verbose_name=_('Texto')) <NEW_LINE> comment_type = models.CharField( max_length=32, choices=CommentType.choices, verbose_name=_('Tipo')) <NEW_LINE> created_at = models.DateTimeField( auto_now_add=True, verbose_name=_('Rango salarial') ) <NEW_LINE> created_by = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name=_('Creado por'), related_name='created_joboffer_comments', ) <NEW_LINE> joboffer = models.ForeignKey(JobOffer, on_delete=models.CASCADE) <NEW_LINE> @classmethod <NEW_LINE> def get_options(cls): <NEW_LINE> <INDENT> return cls._meta <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"{self.joboffer.title}: {self.get_comment_type_display()}"
A comment on a JobOffer.
625990854a966d76dd5f0a36