code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class BaseAbstractConv2d(Op): <NEW_LINE> <INDENT> check_broadcast = False <NEW_LINE> __props__ = ('border_mode', 'subsample', 'filter_flip', 'imshp', 'kshp') <NEW_LINE> def __init__(self, imshp=None, kshp=None, border_mode="valid", subsample=(1, 1), filter_flip=True): <NEW_LINE> <INDENT> if isinstance(border_mode, int): <NEW_LINE> <INDENT> border_mode = (border_mode, border_mode) <NEW_LINE> <DEDENT> if isinstance(border_mode, tuple): <NEW_LINE> <INDENT> pad_h, pad_w = map(int, border_mode) <NEW_LINE> border_mode = (pad_h, pad_w) <NEW_LINE> <DEDENT> if not ((isinstance(border_mode, tuple) and min(border_mode) >= 0) or border_mode in ('valid', 'full', 'half')): <NEW_LINE> <INDENT> raise ValueError( 'invalid border_mode {}, which must be either ' '"valid", "full", "half", an integer or a pair of' ' integers'.format(border_mode)) <NEW_LINE> <DEDENT> self.imshp = tuple(imshp) if imshp else None <NEW_LINE> self.kshp = tuple(kshp) if kshp else None <NEW_LINE> self.border_mode = border_mode <NEW_LINE> self.filter_flip = filter_flip <NEW_LINE> if len(subsample) != 2: <NEW_LINE> <INDENT> raise ValueError("subsample must have two elements") <NEW_LINE> <DEDENT> self.subsample = subsample <NEW_LINE> <DEDENT> def flops(self, inp, outp): <NEW_LINE> <INDENT> inputs, filters = inp <NEW_LINE> outputs, = outp <NEW_LINE> assert inputs[1] == filters[1] <NEW_LINE> flops = filters[2] * filters[3] * 2 <NEW_LINE> flops *= outputs[2] * outputs[3] <NEW_LINE> flops *= inputs[1] * filters[0] * inputs[0] <NEW_LINE> return flops
Base class for AbstractConv Define an abstract convolution op that will be replaced with the appropriate implementation :type imshp: None, tuple/list of len 4 of int or Constant variable :param imshp: The shape of the input parameter. Optional, possibly used to choose an optimal implementation. You can give ``None`` for any element of the list to specify that this element is not known at compile time. imshp is defined w.r.t the forward conv. :type kshp: None, tuple/list of len 4 of int or Constant variable :param kshp: The shape of the filters parameter. Optional, possibly used to choose an optimal implementation. You can give ``None`` for any element of the list to specify that this element is not known at compile time. kshp is defined w.r.t the forward conv. :type border_mode: str, int or tuple of two int :param border_mode: Either of the following: * ``'valid'``: apply filter wherever it completely overlaps with the input. Generates output of shape: input shape - filter shape + 1 * ``'full'``: apply filter wherever it partly overlaps with the input. Generates output of shape: input shape + filter shape - 1 * ``'half'``: pad input with a symmetric border of ``filter rows // 2`` rows and ``filter columns // 2`` columns, then perform a valid convolution. For filters with an odd number of rows and columns, this leads to the output shape being equal to the input shape. * ``int``: pad input with a symmetric border of zeros of the given width, then perform a valid convolution. * ``(int1, int2)``: pad input with a symmetric border of ``int1`` rows and ``int2`` columns, then perform a valid convolution. :type subsample: tuple of len 2 :param subsample: factor by which to subsample the output. Also called strides elsewhere. :type filter_flip: bool :param filter_flip: If ``True``, will flip the filter rows and columns before sliding them over the input. This operation is normally referred to as a convolution, and this is the default. If ``False``, the filters are not flipped and the operation is referred to as a cross-correlation.
62599050596a897236129000
class EvtAnimationsLoaded(event.Event): <NEW_LINE> <INDENT> pass
Triggered when animations names have been received from the engine
625990508e71fb1e983bcf69
class ConcreteHandler2(Handler): <NEW_LINE> <INDENT> def check_range(self, request): <NEW_LINE> <INDENT> start, end = self.get_interval_from_db() <NEW_LINE> if start <= request < end: <NEW_LINE> <INDENT> print("request {} handled in handler 2".format(request)) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def get_interval_from_db(): <NEW_LINE> <INDENT> return (20, 30)
... With helper methods.
62599050d7e4931a7ef3d51d
class User(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> username = models.CharField(db_index=True, max_length=255, unique=True) <NEW_LINE> email = models.EmailField(db_index=True) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_at = models.DateTimeField(auto_now=True) <NEW_LINE> USERNAME_FIELD = "username" <NEW_LINE> REQUIRED_FIELDS = ["email"] <NEW_LINE> objects = UserManager() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> string = ("Username: {}" " E-mail: {}").format(self.username, self.email) <NEW_LINE> return string <NEW_LINE> <DEDENT> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> return self._generate_jwt_token() <NEW_LINE> <DEDENT> def get_full_name(self): <NEW_LINE> <INDENT> return self.username <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.username <NEW_LINE> <DEDENT> def _generate_jwt_token(self): <NEW_LINE> <INDENT> primary_key = self.pk <NEW_LINE> expiration_date = datetime.now() + timedelta(days=60) <NEW_LINE> integer_expiration_date = int(expiration_date.strftime("%s")) <NEW_LINE> jwt_data = { "id": primary_key, "exp": integer_expiration_date } <NEW_LINE> token = jwt.encode(jwt_data, settings.SECRET_KEY, algorithm="HS256") <NEW_LINE> return token.decode('utf-8')
An extension of the User class: https://docs.djangoproject.com/en/1.10/topics/auth/customizing/#django.contrib.auth.models.CustomUser Attributes: username (CharField): Unique username email (EmailField): An e-mail is_active (BooleanField): User information cannot be deleted, only deactivated is_staff (BooleanFiled): The `is_staff` flag is expected by Django to determine who can and cannot log into the Django admin site. For most users this flag will always be false. is_staff (BooleanFiled): The `is_superuser` flag is expected by Django created_at (DateTimeField): A timestamp representing when this object was created. updated_at (DateTimeField): A timestamp reprensenting when this object was last updated.
62599050d53ae8145f919907
class ReceivedBeaconMessages(ReceivedXXXPackets): <NEW_LINE> <INDENT> def __init__(self, period, simulationTime): <NEW_LINE> <INDENT> ReceivedXXXPackets.__init__(self, 'detection.message.BeaconMessage', period, simulationTime)
Total number of received beacon messages
62599050dd821e528d6da37f
class scan: <NEW_LINE> <INDENT> __slots__ = ("func", "iter") <NEW_LINE> def __init__(self, func, iterable): <NEW_LINE> <INDENT> self.func, self.iter = func, iterable <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> acc = empty_acc = _coconut.object() <NEW_LINE> for item in self.iter: <NEW_LINE> <INDENT> if acc is empty_acc: <NEW_LINE> <INDENT> acc = item <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> acc = self.func(acc, item) <NEW_LINE> <DEDENT> yield acc <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return _coconut.len(self.iter) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "scan(" + _coconut.repr(self.iter) + ")" <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return (self.__class__, (self.func, self.iter)) <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> return self.__class__(self.func, _coconut.copy.copy(self.iter)) <NEW_LINE> <DEDENT> def __fmap__(self, func): <NEW_LINE> <INDENT> return _coconut_map(func, self)
Reduce func over iterable, yielding intermediate results.
6259905055399d3f056279bf
class Block: <NEW_LINE> <INDENT> MISSING = 1 <NEW_LINE> PROCESSING = 2 <NEW_LINE> COMPLETE = 3 <NEW_LINE> def __init__(self, length, offset): <NEW_LINE> <INDENT> self.length = length <NEW_LINE> self.offset = offset <NEW_LINE> self.state = self.MISSING <NEW_LINE> self.data = bytearray() <NEW_LINE> <DEDENT> def fill_block_with_data(self, data: bytes): <NEW_LINE> <INDENT> if len(self.data) == self.length: <NEW_LINE> <INDENT> self.data.extend(data) <NEW_LINE> self.state = self.COMPLETE <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.state = self.MISSING
Integral part of the piece as a whole
6259905024f1403a92686320
class NotebookHandler(PatternMatchingEventHandler): <NEW_LINE> <INDENT> patterns = ["*.ipynb"] <NEW_LINE> def process(self, event): <NEW_LINE> <INDENT> if "untitled" not in event.src_path.lower() and ".~" not in event.src_path: <NEW_LINE> <INDENT> render_notebooks(reload_config=True) <NEW_LINE> <DEDENT> <DEDENT> def on_modified(self, event): <NEW_LINE> <INDENT> self.process(event) <NEW_LINE> <DEDENT> def on_created(self, event): <NEW_LINE> <INDENT> self.process(event)
Handle notebook changes.
62599050dc8b845886d54a64
@inherit_doc <NEW_LINE> class LogisticRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasMaxIter, HasRegParam): <NEW_LINE> <INDENT> _java_class = "org.apache.spark.ml.classification.LogisticRegression" <NEW_LINE> def _create_model(self, java_model): <NEW_LINE> <INDENT> return LogisticRegressionModel(java_model)
Logistic regression. >>> from pyspark.sql import Row >>> from pyspark.mllib.linalg import Vectors >>> dataset = sqlCtx.inferSchema(sc.parallelize([ Row(label=1.0, features=Vectors.dense(1.0)), Row(label=0.0, features=Vectors.sparse(1, [], []))])) >>> lr = LogisticRegression() .setMaxIter(5) .setRegParam(0.01) >>> model = lr.fit(dataset) >>> test0 = sqlCtx.inferSchema(sc.parallelize([Row(features=Vectors.dense(-1.0))])) >>> print model.transform(test0).head().prediction 0.0 >>> test1 = sqlCtx.inferSchema(sc.parallelize([Row(features=Vectors.sparse(1, [0], [1.0]))])) >>> print model.transform(test1).head().prediction 1.0
62599050498bea3a75a58fc6
class Paragraph(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.school = None <NEW_LINE> self.department = None <NEW_LINE> self.specialized_study = None <NEW_LINE> self.location = None <NEW_LINE> self.year = None <NEW_LINE> self.program_study_type = None <NEW_LINE> self.subject = None <NEW_LINE> self.credit_count = None <NEW_LINE> self.subject_required_type = False <NEW_LINE> self.lecturer = [] <NEW_LINE> self.subject_summary = None
Lọc những đoạn văn bản
62599050f7d966606f749308
class SafeCookieError(Exception): <NEW_LINE> <INDENT> def __init__(self, error_message): <NEW_LINE> <INDENT> super().__init__(error_message) <NEW_LINE> log.error(error_message)
An exception class for safe cookie related errors.
62599050009cb60464d029df
class URLParameterHandler(urllib2.BaseHandler): <NEW_LINE> <INDENT> def __init__(self, url_param): <NEW_LINE> <INDENT> self._url_parameter = url_param <NEW_LINE> <DEDENT> def http_request(self, req): <NEW_LINE> <INDENT> url_instance = URL(req.get_full_url()) <NEW_LINE> url_instance.set_param(self._url_parameter) <NEW_LINE> new_request = HTTPRequest(url_instance, method=req.get_method(), data=req.get_data(), headers=req.get_headers(), origin_req_host=req.get_origin_req_host(), unverifiable=req.is_unverifiable(), retries=req.retries_left, cookies=req.cookies, cache=req.get_from_cache, new_connection=req.new_connection, follow_redirects=req.follow_redirects, use_basic_auth=req.use_basic_auth, use_proxy=req.use_proxy, timeout=req.timeout) <NEW_LINE> return new_request <NEW_LINE> <DEDENT> https_request = http_request
Appends a user configured URL parameter to the request URL. e.g.: http://www.myserver.com/index.html;jsessionid=dd18fa45014ce4fc?id=5 See Section 2.1 URL Syntactic Components of RFC 1808 <scheme>://<net_loc>/<path>;<params>?<query>#<fragment> See Section 3.2.2 of RFC 1738 :author: Kevin Denver ( [email protected] )
62599050cad5886f8bdc5ad1
class MeasurementTimeseriesTVPResultMixin(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._result_points = [] <NEW_LINE> self._unit_of_measurement = None <NEW_LINE> super(MeasurementTimeseriesTVPResultMixin, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def result_points(self) -> List['TimeValuePair']: <NEW_LINE> <INDENT> return self._result_points <NEW_LINE> <DEDENT> @result_points.setter <NEW_LINE> def result_points(self, value: List['TimeValuePair']): <NEW_LINE> <INDENT> self._result_points = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self) -> str: <NEW_LINE> <INDENT> return self._unit_of_measurement <NEW_LINE> <DEDENT> @unit_of_measurement.setter <NEW_LINE> def unit_of_measurement(self, value: str): <NEW_LINE> <INDENT> self._unit_of_measurement = value
Result Mixin: Measurement Timeseries TimeValuePair
6259905076d4e153a661dccb
class sets(): <NEW_LINE> <INDENT> def __init__( self, log, ra, dec, radius, sourceList, convertToArray=True ): <NEW_LINE> <INDENT> self.log = log <NEW_LINE> log.debug("instansiating a new 'sets' object") <NEW_LINE> self.ra = ra <NEW_LINE> self.dec = dec <NEW_LINE> self.radius = radius <NEW_LINE> self.sourceList = sourceList <NEW_LINE> self.convertToArray = convertToArray <NEW_LINE> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def match( self): <NEW_LINE> <INDENT> return self._extract_all_sets_from_list() <NEW_LINE> <DEDENT> def _extract_all_sets_from_list( self): <NEW_LINE> <INDENT> self.log.info('starting the ``_extract_all_sets_from_list`` method') <NEW_LINE> from HMpTy import HTM <NEW_LINE> mesh = HTM( depth=12, log=self.log ) <NEW_LINE> matchIndices1, matchIndices2, seps = mesh.match( ra1=self.ra, dec1=self.dec, ra2=self.ra, dec2=self.dec, radius=self.radius, maxmatch=0, convertToArray=self.convertToArray ) <NEW_LINE> anchorIndicies = [] <NEW_LINE> childIndicies = [] <NEW_LINE> allMatches = [] <NEW_LINE> thisMatch = None <NEW_LINE> for m1, m2, s in zip(matchIndices1, matchIndices2, seps): <NEW_LINE> <INDENT> if m1 not in anchorIndicies and m1 not in childIndicies: <NEW_LINE> <INDENT> if thisMatch: <NEW_LINE> <INDENT> allMatches.append(thisMatch) <NEW_LINE> <DEDENT> thisMatch = [self.sourceList[m1]] <NEW_LINE> anchorIndicies.append(m1) <NEW_LINE> <DEDENT> if m2 not in anchorIndicies and m2 not in childIndicies: <NEW_LINE> <INDENT> childIndicies.append(m2) <NEW_LINE> thisMatch.append(self.sourceList[m2]) <NEW_LINE> <DEDENT> <DEDENT> if thisMatch: <NEW_LINE> <INDENT> allMatches.append(thisMatch) <NEW_LINE> <DEDENT> self.log.info('completed the ``_extract_all_sets_from_list`` method') <NEW_LINE> return allMatches
*Given a list of coordinates and a crossmatch radius, split the list up into sets of associated locations* **Key Arguments:** - ``log`` -- logger - ``ra`` -- a list of the corrdinate right ascensions - ``dec`` -- a list of the corrdinate declinations (same length as ``ra``) - ``radius`` -- the radius to crossmatch the list of coordinates against itself (degrees) - ``sourceList`` -- the list of source imformation to be divided into associated sets (same length as ``ra`` and ``dec``) - ``convertToArray`` -- convert the coordinates into an array. Default *True*. Can bypass the conversion check if you are sure coordinates in numpy array **Usage:** Given a list of transient metadata (any list, possibly a list of dictionaries) you can divide the list to assoicated sets of transients by running the following code: .. code-block:: python from HMpTy.htm import sets xmatcher = sets( log=log, ra=raList, dec=decList, radius=10 / (60. * 60.), sourceList=transientList ) allMatches = xmatcher.match ``raList`` and ``decList`` are the coordinates for the sources found in the ``transientList`` and are therefore the same length as the `transientList`` (it's up to the user to create these lists). This code will group the sources into set of assocated transients which are within a radius of 10 arcsecs from one-another. ``allMatches`` is a list of lists, each contained list being an associate group of sources. .. image:: https://i.imgur.com/hHExDqR.png :width: 800px :alt: divide a list of sources into associated sets
62599050cb5e8a47e493cbd9
class nakedTwin(__nakedN): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = "Naked Twin" <NEW_LINE> self.minSize = None <NEW_LINE> self.maxSize = None <NEW_LINE> self.rank = 20 <NEW_LINE> <DEDENT> def solve(self, puzzle): <NEW_LINE> <INDENT> return super(nakedTwin, self).solve(puzzle, 2)
Naked Twin This plugin looks for 2 cells in every intersection that have 2, and only 2 candidates in common. If these are found, these 2 candidates are removed from every other location in the intersection.
62599050a79ad1619776b50f
class Condition(_TimeoutGarbageCollector): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Condition, self).__init__() <NEW_LINE> self.io_loop = ioloop.IOLoop.current() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> result = '<%s' % (self.__class__.__name__, ) <NEW_LINE> if self._waiters: <NEW_LINE> <INDENT> result += ' waiters[%s]' % len(self._waiters) <NEW_LINE> <DEDENT> return result + '>' <NEW_LINE> <DEDENT> def wait(self, timeout=None): <NEW_LINE> <INDENT> waiter = Future() <NEW_LINE> self._waiters.append(waiter) <NEW_LINE> if timeout: <NEW_LINE> <INDENT> def on_timeout(): <NEW_LINE> <INDENT> waiter.set_result(False) <NEW_LINE> self._garbage_collect() <NEW_LINE> <DEDENT> io_loop = ioloop.IOLoop.current() <NEW_LINE> timeout_handle = io_loop.add_timeout(timeout, on_timeout) <NEW_LINE> waiter.add_done_callback( lambda _: io_loop.remove_timeout(timeout_handle)) <NEW_LINE> <DEDENT> return waiter <NEW_LINE> <DEDENT> def notify(self, n=1): <NEW_LINE> <INDENT> waiters = [] <NEW_LINE> while n and self._waiters: <NEW_LINE> <INDENT> waiter = self._waiters.popleft() <NEW_LINE> if not waiter.done(): <NEW_LINE> <INDENT> n -= 1 <NEW_LINE> waiters.append(waiter) <NEW_LINE> <DEDENT> <DEDENT> for waiter in waiters: <NEW_LINE> <INDENT> waiter.set_result(True) <NEW_LINE> <DEDENT> <DEDENT> def notify_all(self): <NEW_LINE> <INDENT> self.notify(len(self._waiters))
A condition allows one or more coroutines to wait until notified. Like a standard `threading.Condition`, but does not need an underlying lock that is acquired and released. With a `Condition`, coroutines can wait to be notified by other coroutines: .. testcode:: from tornado import gen from tornado.ioloop import IOLoop from tornado.locks import Condition condition = Condition() @gen.coroutine def waiter(): print("I'll wait right here") yield condition.wait() # Yield a Future. print("I'm done waiting") @gen.coroutine def notifier(): print("About to notify") condition.notify() print("Done notifying") @gen.coroutine def runner(): # Yield two Futures; wait for waiter() and notifier() to finish. yield [waiter(), notifier()] IOLoop.current().run_sync(runner) .. testoutput:: I'll wait right here About to notify Done notifying I'm done waiting `wait` takes an optional ``timeout`` argument, which is either an absolute timestamp:: io_loop = IOLoop.current() # Wait up to 1 second for a notification. yield condition.wait(timeout=io_loop.time() + 1) ...or a `datetime.timedelta` for a timeout relative to the current time:: # Wait up to 1 second. yield condition.wait(timeout=datetime.timedelta(seconds=1)) The method raises `tornado.util.TimeoutError` if there's no notification before the deadline.
6259905045492302aabfd97a
class SimpleApp(wx.App): <NEW_LINE> <INDENT> def __init__(self, xrcfile, main_frame_name): <NEW_LINE> <INDENT> self.__xrcfile = xrcfile <NEW_LINE> self.__main_frame_name = main_frame_name <NEW_LINE> wx.App.__init__(self, redirect=False) <NEW_LINE> <DEDENT> def OnInit(self): <NEW_LINE> <INDENT> self.controls = wxMeta(self.__xrcfile, self.__main_frame_name) <NEW_LINE> result = self.Init() <NEW_LINE> self.controls._frame.Show() <NEW_LINE> return result <NEW_LINE> <DEDENT> def Init(self): <NEW_LINE> <INDENT> pass
Basic application class that the user can derive from for simple applications.
62599050e76e3b2f99fd9ea6
class VodPoliticalReviewSegmentItem(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.StartTimeOffset = None <NEW_LINE> self.EndTimeOffset = None <NEW_LINE> self.Confidence = None <NEW_LINE> self.Suggestion = None <NEW_LINE> self.Name = None <NEW_LINE> self.Label = None <NEW_LINE> self.Url = None <NEW_LINE> self.PicUrlExpireTimeStamp = None <NEW_LINE> self.AreaCoordSet = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.StartTimeOffset = params.get("StartTimeOffset") <NEW_LINE> self.EndTimeOffset = params.get("EndTimeOffset") <NEW_LINE> self.Confidence = params.get("Confidence") <NEW_LINE> self.Suggestion = params.get("Suggestion") <NEW_LINE> self.Name = params.get("Name") <NEW_LINE> self.Label = params.get("Label") <NEW_LINE> self.Url = params.get("Url") <NEW_LINE> self.PicUrlExpireTimeStamp = params.get("PicUrlExpireTimeStamp") <NEW_LINE> self.AreaCoordSet = params.get("AreaCoordSet")
内容审核鉴政任务结果类型
625990508e71fb1e983bcf6c
class PostDetailHandler(BlogHandler): <NEW_LINE> <INDENT> @post_exists <NEW_LINE> def get(self, post_id, post): <NEW_LINE> <INDENT> if not self.user: <NEW_LINE> <INDENT> username = "" <NEW_LINE> userSignedIn = "false" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> username = self.user.name <NEW_LINE> userSignedIn = "true" <NEW_LINE> <DEDENT> comments = db.GqlQuery("SELECT * FROM PostComment " + "WHERE post_id = :1 " + "ORDER BY commented_on DESC", int(post_id)) <NEW_LINE> liked_post = False <NEW_LINE> post_likes = db.GqlQuery( "SELECT * FROM PostLike WHERE post_id = :1", int(post_id)) <NEW_LINE> if self.user: <NEW_LINE> <INDENT> for post_like in post_likes: <NEW_LINE> <INDENT> if self.user.name == post_like.username: <NEW_LINE> <INDENT> liked_post = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.render("postdetail.html", userSignedIn=userSignedIn, username=username, post=post, comments=comments, liked_post=liked_post)
Post details request handler We would show post details even when user has not signed-in But in that case, user cannot alter post content or write any comment for the post
6259905063b5f9789fe86614
class SlicedClassificationMetrics(Artifact): <NEW_LINE> <INDENT> TYPE_NAME = 'system.SlicedClassificationMetrics' <NEW_LINE> def __init__(self, name: Optional[str] = None, uri: Optional[str] = None, metadata: Optional[Dict] = None): <NEW_LINE> <INDENT> super().__init__(uri=uri, name=name, metadata=metadata) <NEW_LINE> <DEDENT> def _upsert_classification_metrics_for_slice(self, slice: str): <NEW_LINE> <INDENT> if slice not in self._sliced_metrics: <NEW_LINE> <INDENT> self._sliced_metrics[slice] = ClassificationMetrics() <NEW_LINE> <DEDENT> <DEDENT> def _update_metadata(self, slice: str): <NEW_LINE> <INDENT> self.metadata = {} <NEW_LINE> self.metadata['evaluationSlices'] = [] <NEW_LINE> for slice in self._sliced_metrics.keys(): <NEW_LINE> <INDENT> slice_metrics = { 'slice': slice, 'sliceClassificationMetrics': self._sliced_metrics[slice].metadata } <NEW_LINE> self.metadata['evaluationSlices'].append(slice_metrics) <NEW_LINE> <DEDENT> <DEDENT> def log_roc_reading(self, slice: str, threshold: float, tpr: float, fpr: float): <NEW_LINE> <INDENT> self._upsert_classification_metrics_for_slice(slice) <NEW_LINE> self._sliced_metrics[slice].log_roc_reading(threshold, tpr, fpr) <NEW_LINE> self._update_metadata(slice) <NEW_LINE> <DEDENT> def load_roc_readings(self, slice: str, readings: List[List[float]]): <NEW_LINE> <INDENT> self._upsert_classification_metrics_for_slice(slice) <NEW_LINE> self._sliced_metrics[slice].load_roc_readings(readings) <NEW_LINE> self._update_metadata(slice) <NEW_LINE> <DEDENT> def set_confusion_matrix_categories(self, slice: str, categories: List[str]): <NEW_LINE> <INDENT> self._upsert_classification_metrics_for_slice(slice) <NEW_LINE> self._sliced_metrics[slice].set_confusion_matrix_categories(categories) <NEW_LINE> self._update_metadata(slice) <NEW_LINE> <DEDENT> def log_confusion_matrix_row(self, slice: str, row_category: str, row: List[int]): <NEW_LINE> <INDENT> self._upsert_classification_metrics_for_slice(slice) <NEW_LINE> self._sliced_metrics[slice].log_confusion_matrix_row(row_category, row) <NEW_LINE> self._update_metadata(slice) <NEW_LINE> <DEDENT> def log_confusion_matrix_cell(self, slice: str, row_category: str, col_category: str, value: int): <NEW_LINE> <INDENT> self._upsert_classification_metrics_for_slice(slice) <NEW_LINE> self._sliced_metrics[slice].log_confusion_matrix_cell( row_category, col_category, value) <NEW_LINE> self._update_metadata(slice) <NEW_LINE> <DEDENT> def load_confusion_matrix(self, slice: str, categories: List[str], matrix: List[List[int]]): <NEW_LINE> <INDENT> self._upsert_classification_metrics_for_slice(slice) <NEW_LINE> self._sliced_metrics[slice].log_confusion_matrix_cell( categories, matrix) <NEW_LINE> self._update_metadata(slice)
Metrics class representing Sliced Classification Metrics. Similar to ClassificationMetrics clients using this class are expected to use log methods of the class to log metrics with the difference being each log method takes a slice to associate the ClassificationMetrics.
625990503c8af77a43b68991
class AbstractBatchSystem(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractclassmethod <NEW_LINE> def supportsHotDeployment(cls): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractclassmethod <NEW_LINE> def supportsWorkerCleanup(cls): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def issueBatchJob(self, command, memory, cores, disk, preemptable): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def killBatchJobs(self, jobIDs): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getIssuedBatchJobIDs(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getRunningBatchJobIDs(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getUpdatedBatchJob(self, maxWait): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def shutdown(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def setEnv(self, name, value=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def getRescueBatchJobFrequency(cls): <NEW_LINE> <INDENT> raise NotImplementedError()
An abstract (as far as Python currently allows) base class to represent the interface the batch system must provide to Toil.
62599050435de62698e9d2a5
class HMCover(homematic.HMDevice, CoverDevice): <NEW_LINE> <INDENT> @property <NEW_LINE> def current_cover_position(self): <NEW_LINE> <INDENT> if self.available: <NEW_LINE> <INDENT> return int(self._hm_get_state() * 100) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def set_cover_position(self, **kwargs): <NEW_LINE> <INDENT> if self.available: <NEW_LINE> <INDENT> if ATTR_POSITION in kwargs: <NEW_LINE> <INDENT> position = float(kwargs[ATTR_POSITION]) <NEW_LINE> position = min(100, max(0, position)) <NEW_LINE> level = position / 100.0 <NEW_LINE> self._hmdevice.set_level(level, self._channel) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def is_closed(self): <NEW_LINE> <INDENT> if self.current_cover_position is not None: <NEW_LINE> <INDENT> if self.current_cover_position > 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def open_cover(self, **kwargs): <NEW_LINE> <INDENT> if self.available: <NEW_LINE> <INDENT> self._hmdevice.move_up(self._channel) <NEW_LINE> <DEDENT> <DEDENT> def close_cover(self, **kwargs): <NEW_LINE> <INDENT> if self.available: <NEW_LINE> <INDENT> self._hmdevice.move_down(self._channel) <NEW_LINE> <DEDENT> <DEDENT> def stop_cover(self, **kwargs): <NEW_LINE> <INDENT> if self.available: <NEW_LINE> <INDENT> self._hmdevice.stop(self._channel) <NEW_LINE> <DEDENT> <DEDENT> def _init_data_struct(self): <NEW_LINE> <INDENT> self._state = "LEVEL" <NEW_LINE> self._data.update({self._state: STATE_UNKNOWN})
Represents a Homematic Cover in Home Assistant.
62599050e76e3b2f99fd9ea7
class catalog_069(models.Model): <NEW_LINE> <INDENT> id = models.IntegerField(primary_key=True) <NEW_LINE> src_word = models.CharField(max_length=50) <NEW_LINE> tar_word = models.CharField(max_length=50) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.src_word + "-->" + self.tar_word
Create a table which contains catalog id and word pair. The number of this catalog table is 069 It contains three columns: id int type catalog id src_word char type the source word which needs to be subsitute tar_word char type the word that is translated from the source word
625990506e29344779b01aeb
class DummyProcess(object): <NEW_LINE> <INDENT> pid = 1 <NEW_LINE> proto = None <NEW_LINE> _terminationDelay = 1 <NEW_LINE> def __init__(self, reactor, executable, args, environment, path, proto, uid=None, gid=None, usePTY=0, childFDs=None): <NEW_LINE> <INDENT> self.proto = proto <NEW_LINE> self._reactor = reactor <NEW_LINE> self._executable = executable <NEW_LINE> self._args = args <NEW_LINE> self._environment = environment <NEW_LINE> self._path = path <NEW_LINE> self._uid = uid <NEW_LINE> self._gid = gid <NEW_LINE> self._usePTY = usePTY <NEW_LINE> self._childFDs = childFDs <NEW_LINE> <DEDENT> def signalProcess(self, signalID): <NEW_LINE> <INDENT> params = { "TERM": (self._terminationDelay, 0), "KILL": (0, 1) } <NEW_LINE> if self.pid is None: <NEW_LINE> <INDENT> raise ProcessExitedAlready() <NEW_LINE> <DEDENT> if signalID in params: <NEW_LINE> <INDENT> delay, status = params[signalID] <NEW_LINE> self._signalHandler = self._reactor.callLater( delay, self.processEnded, status) <NEW_LINE> <DEDENT> <DEDENT> def processEnded(self, status): <NEW_LINE> <INDENT> self.pid = None <NEW_LINE> statusMap = { 0: ProcessDone, 1: ProcessTerminated, } <NEW_LINE> self.proto.processEnded(Failure(statusMap[status](status)))
An incomplete and fake L{IProcessTransport} implementation for testing how L{ProcessMonitor} behaves when its monitored processes exit. @ivar _terminationDelay: the delay in seconds after which the DummyProcess will appear to exit when it receives a TERM signal
6259905026068e7796d4ddeb
class Tag(models.Model): <NEW_LINE> <INDENT> name = models.CharField(_('name'), max_length=50, unique=True, db_index=True) <NEW_LINE> owners = models.ManyToManyField(OWNER_MODEL) <NEW_LINE> objects = TagManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('name',) <NEW_LINE> verbose_name = _('tag') <NEW_LINE> verbose_name_plural = _('tags') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
A tag.
625990507b25080760ed8731
class JavaScriptActionButton(BaseActionButton): <NEW_LINE> <INDENT> def __init__(self, onclick, **kwargs): <NEW_LINE> <INDENT> self.onclick = onclick <NEW_LINE> super(JavaScriptActionButton, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def render(self, request): <NEW_LINE> <INDENT> if not get_missing_permissions(request.user, self.required_permissions): <NEW_LINE> <INDENT> yield '<a %s>' % flatatt_filter({ "href": "#", "class": self.get_computed_class(), "title": self.tooltip, "onclick": mark_safe(self.onclick) if self.onclick else None }) <NEW_LINE> yield self.render_label() <NEW_LINE> yield '</a>'
An action button that uses `onclick` for action dispatch.
62599050baa26c4b54d50752
class distant_light(baseObj): <NEW_LINE> <INDENT> def __init__(self, direction=(0.,0.,0.), color=(1.,1.,1.), frame=None, display=None, **kwargs): <NEW_LINE> <INDENT> super(distant_light, self).__init__(**kwargs) <NEW_LINE> self._direction = vector(direction) if type(direction) in [tuple, list, np.ndarray] else direction <NEW_LINE> self._color = color <NEW_LINE> self._display = display <NEW_LINE> self._frame = frame <NEW_LINE> cmd = {"cmd": "distant_light", "idx": self.idx, "guid": self.guid, "attrs": [{"attr": "direction", "value": self.direction.value}, {"attr": "color", "value": self.color}, {"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1} ]} <NEW_LINE> if (canvas.get_selected() != None): <NEW_LINE> <INDENT> canvas.get_selected().lights.append(self) <NEW_LINE> <DEDENT> self.appendcmd(cmd) <NEW_LINE> <DEDENT> @property <NEW_LINE> def direction(self): <NEW_LINE> <INDENT> return self._direction <NEW_LINE> <DEDENT> @direction.setter <NEW_LINE> def pos(self,value): <NEW_LINE> <INDENT> self._direction.value = value <NEW_LINE> self.addattr('direction') <NEW_LINE> <DEDENT> @property <NEW_LINE> def color(self): <NEW_LINE> <INDENT> return self._color <NEW_LINE> <DEDENT> @color.setter <NEW_LINE> def color(self,value): <NEW_LINE> <INDENT> self._color = value <NEW_LINE> self.addattr('color') <NEW_LINE> <DEDENT> @property <NEW_LINE> def display(self): <NEW_LINE> <INDENT> return self._display <NEW_LINE> <DEDENT> @display.setter <NEW_LINE> def display(self,value): <NEW_LINE> <INDENT> self._display = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def frame(self): <NEW_LINE> <INDENT> return self._frame <NEW_LINE> <DEDENT> @frame.setter <NEW_LINE> def frame(self,value): <NEW_LINE> <INDENT> self._frame = value
see lighting documentation at http://vpython.org/contents/docs/lights.html
62599050d6c5a102081e35c0
class SolarEph: <NEW_LINE> <INDENT> def __init__(self, a, e, I, O, w, lM): <NEW_LINE> <INDENT> self.a = (a*u.AU).to('km').value <NEW_LINE> if not isinstance(self.a, float): <NEW_LINE> <INDENT> self.a = self.a.tolist() <NEW_LINE> <DEDENT> self.e = e <NEW_LINE> self.I = I <NEW_LINE> self.O = O <NEW_LINE> self.w = w <NEW_LINE> self.lM = lM <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> for att in self.__dict__: <NEW_LINE> <INDENT> print('%s: %r' % (att, getattr(self, att))) <NEW_LINE> <DEDENT> return 'SolarEph class object attributes'
Solar system ephemerides class This class takes the constants in Appendix D.4 of Vallado as inputs and stores them for use in defining solar system ephemerides at a given time. Args: a (list): semimajor axis list (in AU) e (list): eccentricity list I (list): inclination list O (list): right ascension of the ascending node list w (list): longitude of periapsis list lM (list): mean longitude list Each of these lists has a maximum of 4 elements. The values in these lists are used to propagate the solar system planetary ephemerides for a specific solar system planet. Attributes: a (list): list of semimajor axis (in AU) e (list): list of eccentricity I (list): list of inclination O (list): list of right ascension of the ascending node w (list): list of longitude of periapsis lM (list): list of mean longitude values Each of these lists has a maximum of 4 elements. The values in these lists are used to propagate the solar system planetary ephemerides for a specific solar system planet.
62599050b5575c28eb71371d
class BlogEntryVote(db.Model): <NEW_LINE> <INDENT> blog_entry = db.ReferenceProperty(BlogEntry, collection_name="blogentries_votes") <NEW_LINE> user_voted = db.ReferenceProperty(User, collection_name="blogentries_votes") <NEW_LINE> vote_kind = db.StringProperty()
Model for a blog entry vote
625990508e7ae83300eea53a
class Categorical(_BaseCategorical): <NEW_LINE> <INDENT> @lagacy_default_name_arg <NEW_LINE> def __init__(self, logits=None, probs=None, dtype=None, group_event_ndims=None, check_numerics=False, name=None, scope=None): <NEW_LINE> <INDENT> if dtype is None: <NEW_LINE> <INDENT> dtype = tf.int32 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dtype = tf.as_dtype(dtype) <NEW_LINE> <DEDENT> super(Categorical, self).__init__( logits=logits, probs=probs, group_event_ndims=group_event_ndims, check_numerics=check_numerics, name=name, scope=scope, ) <NEW_LINE> self._dtype = dtype <NEW_LINE> <DEDENT> @property <NEW_LINE> def dtype(self): <NEW_LINE> <INDENT> return self._dtype <NEW_LINE> <DEDENT> @property <NEW_LINE> def dynamic_value_shape(self): <NEW_LINE> <INDENT> return tf.constant([], dtype=tf.int32) <NEW_LINE> <DEDENT> @property <NEW_LINE> def static_value_shape(self): <NEW_LINE> <INDENT> return tf.TensorShape([]) <NEW_LINE> <DEDENT> def _sample_n(self, n): <NEW_LINE> <INDENT> return self._sample_n_sparse(n, self.dtype) <NEW_LINE> <DEDENT> def _log_prob_with_logits(self, x): <NEW_LINE> <INDENT> x = tf.convert_to_tensor(x) <NEW_LINE> if not x.dtype.is_integer: <NEW_LINE> <INDENT> x = tf.cast(x, dtype=tf.int32) <NEW_LINE> <DEDENT> if self.logits.get_shape()[:-1] == x.get_shape(): <NEW_LINE> <INDENT> logits = self.logits <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logits = self.logits * tf.ones_like( tf.expand_dims(x, -1), dtype=self.logits.dtype) <NEW_LINE> logits_shape = tf.shape(logits)[:-1] <NEW_LINE> x *= tf.ones(logits_shape, dtype=x.dtype) <NEW_LINE> x.set_shape(tf.TensorShape(logits.get_shape()[:-1])) <NEW_LINE> <DEDENT> return -tf.nn.sparse_softmax_cross_entropy_with_logits( labels=x, logits=logits, ) <NEW_LINE> <DEDENT> def _log_prob_with_probs(self, x): <NEW_LINE> <INDENT> x = tf.one_hot( tf.cast(x, dtype=tf.int32), self.n_categories, dtype=self.param_dtype ) <NEW_LINE> log_p = self._check_numerics( tf.log(self._probs_clipped), 'log(p)' ) <NEW_LINE> return tf.reduce_sum(x * log_p, axis=-1) <NEW_LINE> <DEDENT> def _log_prob(self, x): <NEW_LINE> <INDENT> if self._probs_is_derived: <NEW_LINE> <INDENT> return self._log_prob_with_logits(x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._log_prob_with_probs(x)
Categorical distribution. Parameters ---------- logits : tf.Tensor | np.ndarray A float tensor of shape (..., n_categories), which is the un-normalized log-odds of probabilities of the categories. probs : tf.Tensor | np.ndarray A float tensor of shape (..., n_categories), which is the normalized probabilities of the categories. One and only one of `logits` and `probs` should be specified. The relationship between these two arguments, if given each other, is stated as follows: .. math:: \begin{aligned} \text{logits} &= \log (\text{probs}) \\ \text{probs} &= \text{softmax} (\text{logits}) \end{aligned} dtype : tf.DType | np.dtype | str The data type of samples from the distribution. (default is `tf.int32`) group_event_ndims : int | tf.Tensor If specify, this number of dimensions at the end of `batch_shape` would be considered as a group of events, whose probabilities are to be accounted together. (default None) check_numerics : bool Whether or not to check numerical issues? (default False) name, scope : str Optional name and scope of this normal distribution.
6259905023849d37ff852566
class TestDapver(object): <NEW_LINE> <INDENT> def test_comparsion(self): <NEW_LINE> <INDENT> versions = ['1.0', '1.0.5', '1.1dev', '1.1a', '1.1b', '1.1', '1.1.1', '1.2'] <NEW_LINE> assert versions == sorted(versions, key=cmp_to_key(dapver.compare))
Tests for dap version comparison
62599050d53ae8145f919909
class CFG(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.__dict__.update( **kwargs ) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> header = '{type}: {name}'.format( type=self.__class__.__name__, name=self.name) <NEW_LINE> varlines = ['\t{var:<15}: {value}'.format(var=var, value=value) for var,value in sorted(vars(self.items())) if var is not 'name'] <NEW_LINE> all = [ header ] <NEW_LINE> all.extend(varlines) <NEW_LINE> return '\n'.join( all ) <NEW_LINE> <DEDENT> def clone(self, **kwargs): <NEW_LINE> <INDENT> other = copy.copy(self) <NEW_LINE> for k,v in kwargs.items(): <NEW_LINE> <INDENT> setattr(other, k, v) <NEW_LINE> <DEDENT> return other
Base configuration class. The attributes are used to store parameters of any type
6259905071ff763f4b5e8c50
class Question(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> app_label = 'polls' <NEW_LINE> <DEDENT> title = models.CharField(max_length=200) <NEW_LINE> text = models.TextField(blank=True, default='') <NEW_LINE> last_modify_date = models.DateTimeField(default=timezone.now) <NEW_LINE> author = models.ForeignKey(UserProfile, on_delete=models.CASCADE, blank=True, null=True) <NEW_LINE> course = models.ForeignKey('Course', on_delete=models.CASCADE, blank=True, null=True, related_name='questions') <NEW_LINE> tags = models.ManyToManyField('Tag') <NEW_LINE> quizzes = models.ManyToManyField('Quiz', through='QuizQuestion') <NEW_LINE> variables = ArrayField(VariableField(), default=list, blank=True) <NEW_LINE> objects = QuestionManager() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return super().__str__()+' title: '+str(self.title)
this class is to represent a question, a question should contains at least one response (Reponse Object), and relative answers (Answer Object) title: string, question title, max size is 200, example: "derivate problem" background: string, question background information weight: int, the total weight in question, example: 100 last_modify_date: Date author: UserProfile, user who creates this question tag: Tag, the tag this question has quizzes: [Quiz], the quizzes contains this question responses: [Response], the reponses contains in this question variables: [Variable]
62599050b57a9660fecd2f25
class RuleCondition(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'odata_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'odata_type': {'key': 'odata\\.type', 'type': 'str'}, 'data_source': {'key': 'dataSource', 'type': 'RuleDataSource'}, } <NEW_LINE> _subtype_map = { 'odata_type': {'Microsoft.Azure.Management.Insights.Models.LocationThresholdRuleCondition': 'LocationThresholdRuleCondition', 'Microsoft.Azure.Management.Insights.Models.ManagementEventRuleCondition': 'ManagementEventRuleCondition', 'Microsoft.Azure.Management.Insights.Models.ThresholdRuleCondition': 'ThresholdRuleCondition'} } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(RuleCondition, self).__init__(**kwargs) <NEW_LINE> self.odata_type = None <NEW_LINE> self.data_source = kwargs.get('data_source', None)
The condition that results in the alert rule being activated. You probably want to use the sub-classes and not this class directly. Known sub-classes are: LocationThresholdRuleCondition, ManagementEventRuleCondition, ThresholdRuleCondition. All required parameters must be populated in order to send to Azure. :param odata_type: Required. specifies the type of condition. This can be one of three types: ManagementEventRuleCondition (occurrences of management events), LocationThresholdRuleCondition (based on the number of failures of a web test), and ThresholdRuleCondition (based on the threshold of a metric).Constant filled by server. :type odata_type: str :param data_source: the resource from which the rule collects its data. For this type dataSource will always be of type RuleMetricDataSource. :type data_source: ~$(python-base-namespace).v2015_04_01.models.RuleDataSource
62599050462c4b4f79dbcea8
class ContactVATCase(TransactionCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(ContactVATCase, self).setUp() <NEW_LINE> self.partner = self.env["res.partner"].create({"name": "something"}) <NEW_LINE> <DEDENT> def test_company(self): <NEW_LINE> <INDENT> self.partner.is_company = True <NEW_LINE> self.partner.vat = "ES00000000T" <NEW_LINE> self.assertEqual(self.partner.best_vat_field()[0], "vat") <NEW_LINE> self.assertEqual(self.partner.best_vat_value()[0], "ES00000000T") <NEW_LINE> <DEDENT> def test_person(self): <NEW_LINE> <INDENT> self.partner.is_company = False <NEW_LINE> self.partner.contact_vat = "ES00000000T" <NEW_LINE> self.assertEqual(self.partner.best_vat_field()[0], "contact_vat") <NEW_LINE> self.assertEqual(self.partner.best_vat_value()[0], "ES00000000T")
Test behavior of training contact VAT.
6259905082261d6c5273091b
class XMLElementText(AccessorGeneratorBase): <NEW_LINE> <INDENT> required_dargs = ('parent_xpath', 'tag_name') <NEW_LINE> def __init__(self, property_name, libvirtxml, forbidden=None, parent_xpath=None, tag_name=None): <NEW_LINE> <INDENT> super(XMLElementText, self).__init__(property_name, libvirtxml, forbidden, parent_xpath=parent_xpath, tag_name=tag_name) <NEW_LINE> <DEDENT> class Getter(AccessorBase): <NEW_LINE> <INDENT> __slots__ = add_to_slots('parent_xpath', 'tag_name') <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> return self.element_by_parent(self.parent_xpath, self.tag_name, create=False).text <NEW_LINE> <DEDENT> <DEDENT> class Setter(AccessorBase): <NEW_LINE> <INDENT> __slots__ = add_to_slots('parent_xpath', 'tag_name') <NEW_LINE> def __call__(self, value): <NEW_LINE> <INDENT> element = self.element_by_parent(self.parent_xpath, self.tag_name, create=True) <NEW_LINE> element.text = str(value) <NEW_LINE> self.xmltreefile().write() <NEW_LINE> <DEDENT> <DEDENT> class Delter(AccessorBase): <NEW_LINE> <INDENT> __slots__ = add_to_slots('parent_xpath', 'tag_name') <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> element = self.element_by_parent(self.parent_xpath, self.tag_name, create=False) <NEW_LINE> <DEDENT> except xcepts.LibvirtXMLNotFoundError: <NEW_LINE> <INDENT> element = None <NEW_LINE> <DEDENT> if element: <NEW_LINE> <INDENT> self.xmltreefile().remove(element) <NEW_LINE> self.xmltreefile().write()
Class of accessor classes operating on element.text
6259905096565a6dacd2d9dd
class GTKComboTreeBox(BaseComboTreeBox, wx.Panel): <NEW_LINE> <INDENT> def _createPopupFrame(self): <NEW_LINE> <INDENT> return GTKPopupFrame(self) <NEW_LINE> <DEDENT> def _createTextCtrl(self): <NEW_LINE> <INDENT> if self._readOnly: <NEW_LINE> <INDENT> style = wx.TE_READONLY <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> style = 0 <NEW_LINE> <DEDENT> return wx.TextCtrl(self, style=style) <NEW_LINE> <DEDENT> def _createButton(self): <NEW_LINE> <INDENT> bitmap = wx.ArtProvider.GetBitmap(wx.ART_GO_DOWN, client=wx.ART_BUTTON) <NEW_LINE> return wx.BitmapButton(self, bitmap=bitmap) <NEW_LINE> <DEDENT> def _layoutInterior(self): <NEW_LINE> <INDENT> panelSizer = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> panelSizer.Add(self._text, flag=wx.EXPAND, proportion=1) <NEW_LINE> panelSizer.Add(self._button) <NEW_LINE> self.SetSizerAndFit(panelSizer)
The ComboTreeBox widget for wxGTK. This is actually a work around because on wxGTK, there doesn't seem to be a way to intercept mouse events sent to the Combobox. Intercepting those events is necessary to prevent the Combobox from popping up the list and pop up the tree instead. So, until wxPython makes intercepting those events possible we build a poor man's Combobox ourselves using a TextCtrl and a BitmapButton.
625990504e4d5625663738b8
class Attribute(Item): <NEW_LINE> <INDENT> def __init__(self, name, points, creator_key_name): <NEW_LINE> <INDENT> content = str(points) <NEW_LINE> Item.__init__(self, name, content, creator_key_name) <NEW_LINE> self.available = points <NEW_LINE> self.total = points <NEW_LINE> <DEDENT> def Initialize(self): <NEW_LINE> <INDENT> points = float(self.content) <NEW_LINE> self.available = points <NEW_LINE> self.total = points
Quantity attributes such as having 10 gallons: non-transferable.
625990507cff6e4e811b6ee5
class DetailView(generic.DetailView): <NEW_LINE> <INDENT> model = Article <NEW_LINE> template_name = 'article/post.html' <NEW_LINE> context_object_name = 'post'
looking one blog page try: post = Article.objects.get(id=str(id)) except Article.DoesNotExist: raise Http404 #post.content = post.content.replace(' ', '&nbsp;').replace(' ', '<br>') return render(request, 'article/post.html', {'post': post})
6259905015baa72349463436
class PEGI(RATING_BODY): <NEW_LINE> <INDENT> id = 4 <NEW_LINE> iarc_name = 'PEGI' <NEW_LINE> ratings = (PEGI_3, PEGI_7, PEGI_12, PEGI_16, PEGI_18, PEGI_PARENTAL_GUIDANCE) <NEW_LINE> name = 'PEGI' <NEW_LINE> description = _lazy(u'Europe') <NEW_LINE> full_name = _lazy(u'Pan European Game Information') <NEW_LINE> url = 'http://www.pegi.info'
The European game ratings body (i.e. GBR, Poland, Spain).
625990508e7ae83300eea53c
class Query(Model): <NEW_LINE> <INDENT> _validation = { 'text': {'required': True}, 'display_text': {'readonly': True}, 'web_search_url': {'readonly': True}, 'search_link': {'readonly': True}, 'thumbnail': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'text': {'key': 'text', 'type': 'str'}, 'display_text': {'key': 'displayText', 'type': 'str'}, 'web_search_url': {'key': 'webSearchUrl', 'type': 'str'}, 'search_link': {'key': 'searchLink', 'type': 'str'}, 'thumbnail': {'key': 'thumbnail', 'type': 'ImageObject'}, } <NEW_LINE> def __init__(self, text): <NEW_LINE> <INDENT> super(Query, self).__init__() <NEW_LINE> self.text = text <NEW_LINE> self.display_text = None <NEW_LINE> self.web_search_url = None <NEW_LINE> self.search_link = None <NEW_LINE> self.thumbnail = None
Defines a search query. Variables are only populated by the server, and will be ignored when sending a request. :param text: The query string. Use this string as the query term in a new search request. :type text: str :ivar display_text: The display version of the query term. This version of the query term may contain special characters that highlight the search term found in the query string. The string contains the highlighting characters only if the query enabled hit highlighting :vartype display_text: str :ivar web_search_url: The URL that takes the user to the Bing search results page for the query.Only related search results include this field. :vartype web_search_url: str :ivar search_link: :vartype search_link: str :ivar thumbnail: :vartype thumbnail: ~azure.cognitiveservices.search.websearch.models.ImageObject
625990508e71fb1e983bcf6f
class ProtocolFilesInvalid(ErrorDetails): <NEW_LINE> <INDENT> id: Literal["ProtocolFilesInvalid"] = "ProtocolFilesInvalid" <NEW_LINE> title: str = "Protocol File(s) Invalid"
An error returned when an uploaded protocol files are invalid.
62599050cad5886f8bdc5ad3
class Contributor(Resource): <NEW_LINE> <INDENT> pass
Repository Contributor API
62599050cb5e8a47e493cbdb
class Response(object): <NEW_LINE> <INDENT> success_codes = [200, 304, 204] <NEW_LINE> error_codes = [404, 406, 409, 500] <NEW_LINE> def __init__(self, raw_response_data): <NEW_LINE> <INDENT> data = json.loads(raw_response_data) <NEW_LINE> if 'code' not in data.keys(): <NEW_LINE> <INDENT> raise InvalidResponseError('key "code" not in raw response: "%s"' % raw_response_data) <NEW_LINE> <DEDENT> if data['code'] in self.success_codes: <NEW_LINE> <INDENT> self.code = data['code'] <NEW_LINE> self.data = data.get('data', None) <NEW_LINE> self.dirty = data.get('dirty', None) <NEW_LINE> return <NEW_LINE> <DEDENT> if data['code'] in self.error_codes: <NEW_LINE> <INDENT> if data['code'] == 404: <NEW_LINE> <INDENT> raise InvalidRequestError('unknown key') <NEW_LINE> <DEDENT> elif data['code'] == 406: <NEW_LINE> <INDENT> raise InvalidRequestError('invalid value') <NEW_LINE> <DEDENT> elif data['code'] == 409: <NEW_LINE> <INDENT> raise InvalidRequestError('conflicting value') <NEW_LINE> <DEDENT> elif data['code'] == 500: <NEW_LINE> <INDENT> raise InvalidRequestError('storing new value failed') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidResponseError('code "%s" not known' % data['code'])
Provides access to the response data. Raises an exception if the request was not successful (e.g. key not found). Note that we are explicitly ignoring NodeManager error code 501 (unknown action). We are tightly controlling the specified action, so we do not expect to encounter this error. Args: raw_response_data: json-encoded text received by zmq Attributes: code: the response code (matches HTTP response code spec) data: text or dict of response data dirty: boolean that, if True, indicates that the command will take effect only when the component is restarted
62599050b57a9660fecd2f27
class Inspector: <NEW_LINE> <INDENT> TEST404_OK = 0 <NEW_LINE> TEST404_MD5 = 1 <NEW_LINE> TEST404_STRING = 2 <NEW_LINE> TEST404_URL = 3 <NEW_LINE> TEST404_NONE = 4 <NEW_LINE> def __init__(self, target): <NEW_LINE> <INDENT> self.target = target <NEW_LINE> <DEDENT> def _give_it_a_try(self): <NEW_LINE> <INDENT> s = [] <NEW_LINE> for n in range(0, 42): <NEW_LINE> <INDENT> random.seed() <NEW_LINE> s.append(chr(random.randrange(97, 122))) <NEW_LINE> <DEDENT> s = "".join(s) <NEW_LINE> target = self.target + s <NEW_LINE> outputscreen.success("[+] Checking with: {}".format(target)) <NEW_LINE> try: <NEW_LINE> <INDENT> page = requests.get(target, headers=user_agent, verify=False,timeout=5, proxies=conf.proxy_server) <NEW_LINE> content = page.content <NEW_LINE> result = { 'target': urllib.parse.urlparse(target).netloc, 'code': str(page.status_code), 'size': len(content), 'md5': hashlib.md5(content).hexdigest(), 'content': content, 'location': None } <NEW_LINE> if len(page.history) >= 1: <NEW_LINE> <INDENT> result['location'] = page.url <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> result = { 'target': urllib.parse.urlparse(target).netloc, 'code': '', 'size': '', 'md5': '', 'content': '', 'location': None } <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> def check_this(self): <NEW_LINE> <INDENT> first_result = self._give_it_a_try() <NEW_LINE> if first_result['code'] == '404': <NEW_LINE> <INDENT> return '', Inspector.TEST404_OK <NEW_LINE> <DEDENT> elif first_result['code'] == '302' or first_result['location']: <NEW_LINE> <INDENT> location = first_result['location'] <NEW_LINE> return location, Inspector.TEST404_URL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return first_result['md5'], Inspector.TEST404_MD5 <NEW_LINE> <DEDENT> return '', Inspector.TEST404_NONE
This class mission is to examine the behaviour of the application when on purpose an inexistent page is requested
6259905082261d6c5273091c
class JaikuException(Exception): <NEW_LINE> <INDENT> pass
Base class for Jaiku-related exceptions.
62599050b830903b9686eed0
class UnitKeyParser(KeyParser): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> KeyParser.__init__(self) <NEW_LINE> self.key = key <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> ret = UnitKeyParser(self.key) <NEW_LINE> ret.filt = self.filt <NEW_LINE> return ret <NEW_LINE> <DEDENT> def offer(self, key): <NEW_LINE> <INDENT> if not self.ready: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.ready = False <NEW_LINE> self.complete = (self.key == key) <NEW_LINE> return self.complete <NEW_LINE> <DEDENT> def get_parse(self): <NEW_LINE> <INDENT> if self.complete: <NEW_LINE> <INDENT> return self.filt(self.key) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if len(self.key) == 1 and not self.key in "<>#@?|()": <NEW_LINE> <INDENT> return self.key <NEW_LINE> <DEDENT> return "<" + self.key + ">"
A KeyParser that wants a single key.
62599050ac7a0e7691f73987
class Power(StochasticParameter): <NEW_LINE> <INDENT> def __init__(self, other_param, val, elementwise=False): <NEW_LINE> <INDENT> super(Power, self).__init__() <NEW_LINE> assert isinstance(other_param, StochasticParameter) <NEW_LINE> self.other_param = other_param <NEW_LINE> self.val = handle_continuous_param(val, "val") <NEW_LINE> self.elementwise = elementwise <NEW_LINE> <DEDENT> def _draw_samples(self, size, random_state): <NEW_LINE> <INDENT> seed = random_state.randint(0, 10**6, 1)[0] <NEW_LINE> samples = self.other_param.draw_samples(size, random_state=ia.new_random_state(seed)) <NEW_LINE> samples_dtype = samples.dtype <NEW_LINE> if self.elementwise and not isinstance(self.val, Deterministic): <NEW_LINE> <INDENT> exponents = self.val.draw_samples(size, random_state=ia.new_random_state(seed+1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exponents = self.val.draw_sample(random_state=ia.new_random_state(seed+1)) <NEW_LINE> <DEDENT> result = np.power(samples.astype(np.complex), exponents).real <NEW_LINE> if result.dtype != samples_dtype: <NEW_LINE> <INDENT> result = result.astype(samples_dtype) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> opstr = str(self.other_param) <NEW_LINE> return "Power(%s, %s, %s)" % (opstr, str(self.val), self.elementwise)
Parameter to exponentiate another parameter's results with. Parameters ---------- other_param : StochasticParameter Other parameter which's sampled values are to be modified. val : number or tuple of two number or list of number or StochasticParameter Value to use exponentiate the other parameter's results with. If this is a StochasticParameter, either a single or multiple values will be sampled and used as the exponents. elementwise : bool, optional(default=False) Controls the sampling behaviour when `val` is a StochasticParameter. If set to False, a single value will be sampled from val and used as the exponent for all values generated by `other_param`. If set to True and `_draw_samples(size=S)` is called, `S` values will be sampled from `val` and used as the exponents for the results of `other_param`. Examples -------- >>> param = Power(Uniform(0.0, 1.0), 2) Converts a uniform range [0.0, 1.0) to a distribution that is peaked towards 1.0.
62599050dc8b845886d54a6a
class TestBaseModelMethods(unittest.TestCase): <NEW_LINE> <INDENT> def test_pep8_conformance(self): <NEW_LINE> <INDENT> pep8style = pep8.StyleGuide(quiet=True) <NEW_LINE> result = pep8style.check_files(['models/base_model.py']) <NEW_LINE> self.assertEqual(result.total_errors, 0, "fix pep8") <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del file.json <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_doc(self): <NEW_LINE> <INDENT> self.assertIsNotNone(BaseModel.__doc__) <NEW_LINE> self.assertIsNotNone(BaseModel.__init__.__doc__) <NEW_LINE> self.assertIsNotNone(BaseModel.__str__.__doc__) <NEW_LINE> self.assertIsNotNone(BaseModel.save.__doc__) <NEW_LINE> self.assertIsNotNone(BaseModel.to_dict.__doc__) <NEW_LINE> <DEDENT> def test_to_dict(self): <NEW_LINE> <INDENT> my_model = BaseModel() <NEW_LINE> my_model_json = my_model.to_dict() <NEW_LINE> self.assertIsInstance(my_model_json['created_at'], str) <NEW_LINE> self.assertIsInstance(my_model_json['updated_at'], str) <NEW_LINE> self.assertEqual(my_model.__class__.__name__, 'BaseModel') <NEW_LINE> <DEDENT> def test_save(self): <NEW_LINE> <INDENT> my_model = BaseModel() <NEW_LINE> my_model.save() <NEW_LINE> self.assertTrue(os.path.isfile('file.json')) <NEW_LINE> self.assertNotEqual(my_model.created_at, my_model.updated_at) <NEW_LINE> <DEDENT> def test_hasattribute(self): <NEW_LINE> <INDENT> my_model = BaseModel() <NEW_LINE> self.assertTrue(hasattr(my_model, "__init__")) <NEW_LINE> self.assertTrue(hasattr(my_model, "created_at")) <NEW_LINE> self.assertTrue(hasattr(my_model, "updated_at")) <NEW_LINE> self.assertTrue(hasattr(my_model, "id")) <NEW_LINE> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> my_model = BaseModel() <NEW_LINE> self.assertTrue(isinstance(my_model, BaseModel))
class with tests
62599050b57a9660fecd2f28
class UserInfoView(APIView): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_permission_from_role(cls, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if request.user: <NEW_LINE> <INDENT> perms_list = [] <NEW_LINE> for item in request.user.roles.values('permissions__method').distinct(): <NEW_LINE> <INDENT> perms_list.append(item['permissions__method']) <NEW_LINE> <DEDENT> return perms_list <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get__role_name(cls, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if request.user: <NEW_LINE> <INDENT> role_name_list = [] <NEW_LINE> for item in request.user.roles.values('name').distinct(): <NEW_LINE> <INDENT> role_name_list.append(item['name']) <NEW_LINE> <DEDENT> return role_name_list <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get(self, request): <NEW_LINE> <INDENT> if request.user.id is not None: <NEW_LINE> <INDENT> perms = self.get_permission_from_role(request) <NEW_LINE> role_name = self.get__role_name(request) <NEW_LINE> data = { 'id': request.user.id, 'username': request.user.username, 'name': request.user.name, 'avatar': BASE_API + "/be-media/img/db_234.gif", 'email': request.user.email, 'is_active': request.user.is_active, 'createTime': request.user.date_joined, 'roles_name': role_name, 'roles': perms, } <NEW_LINE> return CassResponse(data, status=OK) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return CassResponse('请登录后访问!', status=FORBIDDEN)
获取当前用户信息和权限
6259905023e79379d538d9a2
class DiscreteSkyMapConfig(CachingSkyMap.ConfigClass): <NEW_LINE> <INDENT> raList = ListField(dtype=float, default=[], doc="Right Ascensions of tracts (ICRS, degrees)") <NEW_LINE> decList = ListField(dtype=float, default=[], doc="Declinations of tracts (ICRS, degrees)") <NEW_LINE> radiusList = ListField(dtype=float, default=[], doc="Radii of tracts (degrees)") <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> super(DiscreteSkyMapConfig, self).validate() <NEW_LINE> if len(self.radiusList) != len(self.raList): <NEW_LINE> <INDENT> raise ValueError("Number of radii (%d) and RAs (%d) do not match" % (len(self.radiusList), len(self.raList))) <NEW_LINE> <DEDENT> if len(self.radiusList) != len(self.decList): <NEW_LINE> <INDENT> raise ValueError("Number of radii (%d) and Decs (%d) do not match" % (len(self.radiusList), len(self.decList)))
Configuration for the DiscreteSkyMap
6259905029b78933be26ab18
class Process: <NEW_LINE> <INDENT> def __init__(self, group=None, target=None, name=None, args=(), kwargs={}): <NEW_LINE> <INDENT> self._popen = None <NEW_LINE> self._parent_pid = os.getpid() <NEW_LINE> self._target = target <NEW_LINE> self._args = tuple(args) <NEW_LINE> self._kwargs = dict(kwargs) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> assert self._popen is None, 'cannot start a process twice' <NEW_LINE> assert self._parent_pid == os.getpid(), 'can only start a process object created by current process' <NEW_LINE> self._popen = Popen(self) <NEW_LINE> <DEDENT> def _bootstrap(self): <NEW_LINE> <INDENT> global _current_process <NEW_LINE> try: <NEW_LINE> <INDENT> self._children = set() <NEW_LINE> self._counter = itertools.count(1) <NEW_LINE> try: <NEW_LINE> <INDENT> sys.stdin.close() <NEW_LINE> sys.stdin = open(os.devnull) <NEW_LINE> <DEDENT> except (OSError, ValueError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> _current_process = self <NEW_LINE> print('child process calling self.run()') <NEW_LINE> try: <NEW_LINE> <INDENT> self.run() <NEW_LINE> exitcode = 0 <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return exitcode <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if self._target: <NEW_LINE> <INDENT> self._target(*self._args, **self._kwargs)
实现一个简易版本的 multiprocessing.Proccess
625990508e71fb1e983bcf71
class UTC(StaticTzInfo): <NEW_LINE> <INDENT> _zone = 'Etc/UTC' <NEW_LINE> _utcoffset = timedelta(seconds=0) <NEW_LINE> _tzname = 'UTC'
Etc/UTC timezone definition. See datetime.tzinfo for details
62599050287bf620b6273096
class PayInvoiceAsk(ModelView): <NEW_LINE> <INDENT> __name__ = 'account.invoice.pay.ask' <NEW_LINE> type = fields.Selection([ ('writeoff', 'Write-Off'), ('partial', 'Partial Payment'), ], 'Type', required=True) <NEW_LINE> journal_writeoff = fields.Many2One('account.journal', 'Write-Off Journal', domain=[ ('type', '=', 'write-off'), ], states={ 'invisible': Eval('type') != 'writeoff', 'required': Eval('type') == 'writeoff', }, depends=['type']) <NEW_LINE> amount = fields.Numeric('Payment Amount', digits=(16, Eval('currency_digits', 2)), readonly=True, depends=['currency_digits']) <NEW_LINE> currency = fields.Many2One('currency.currency', 'Payment Currency', readonly=True) <NEW_LINE> currency_digits = fields.Integer('Payment Currency Digits', readonly=True) <NEW_LINE> amount_writeoff = fields.Numeric('Write-Off Amount', digits=(16, Eval('currency_digits_writeoff', 2)), readonly=True, depends=['currency_digits_writeoff', 'type'], states={ 'invisible': Eval('type') != 'writeoff', }) <NEW_LINE> currency_writeoff = fields.Many2One('currency.currency', 'Write-Off Currency', readonly=True, states={ 'invisible': Eval('type') != 'writeoff', }, depends=['type']) <NEW_LINE> currency_digits_writeoff = fields.Integer('Write-Off Currency Digits', required=True, readonly=True) <NEW_LINE> lines_to_pay = fields.Many2Many('account.move.line', None, None, 'Lines to Pay', readonly=True) <NEW_LINE> lines = fields.Many2Many('account.move.line', None, None, 'Lines', domain=[ ('id', 'in', Eval('lines_to_pay')), ('reconciliation', '=', None), ], states={ 'invisible': Eval('type') != 'writeoff', }, depends=['lines_to_pay', 'type']) <NEW_LINE> payment_lines = fields.Many2Many('account.move.line', None, None, 'Payment Lines', readonly=True, states={ 'invisible': Eval('type') != 'writeoff', }, depends=['type']) <NEW_LINE> company = fields.Many2One('company.company', 'Company', readonly=True) <NEW_LINE> invoice = fields.Many2One('account.invoice', 'Invoice', readonly=True) <NEW_LINE> @staticmethod <NEW_LINE> def default_type(): <NEW_LINE> <INDENT> return 'partial' <NEW_LINE> <DEDENT> @fields.depends('lines', 'amount', 'currency', 'currency_writeoff', 'invoice', 'payment_lines') <NEW_LINE> def on_change_lines(self): <NEW_LINE> <INDENT> Currency = Pool().get('currency.currency') <NEW_LINE> with Transaction().set_context(date=self.invoice.currency_date): <NEW_LINE> <INDENT> amount = Currency.compute(self.currency, self.amount, self.currency_writeoff) <NEW_LINE> <DEDENT> self.amount_writeoff = Decimal('0.0') <NEW_LINE> for line in self.lines: <NEW_LINE> <INDENT> self.amount_writeoff += line.debit - line.credit <NEW_LINE> <DEDENT> for line in self.payment_lines: <NEW_LINE> <INDENT> self.amount_writeoff += line.debit - line.credit <NEW_LINE> <DEDENT> if self.invoice.type in ('in_invoice', 'out_credit_note'): <NEW_LINE> <INDENT> self.amount_writeoff = - self.amount_writeoff - amount <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.amount_writeoff = self.amount_writeoff - amount
Pay Invoice
62599050cad5886f8bdc5ad4
class JsonResponse: <NEW_LINE> <INDENT> def __init__(self, response_code, filename): <NEW_LINE> <INDENT> self.status_code = response_code <NEW_LINE> self.filename = filename <NEW_LINE> <DEDENT> def json(self): <NEW_LINE> <INDENT> cur_path = os.path.dirname(__file__) <NEW_LINE> abs_file_path = cur_path + "/proofpoint_tests_log/" + self.filename <NEW_LINE> with open(abs_file_path, "rb") as json_file: <NEW_LINE> <INDENT> json_str = json_file.read() <NEW_LINE> json_data = json.loads(json_str) <NEW_LINE> return json_data
Summary conversion of json data to dictionary.
6259905099cbb53fe6832392
class ActingManeuverTable(ArtisticActiveManeuverTable): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> trace.entry() <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> self.unfamiliar = IntVar() <NEW_LINE> self.proficient_language = IntVar() <NEW_LINE> trace.exit() <NEW_LINE> <DEDENT> def setup_maneuver_skill_frames(self, parent_frame): <NEW_LINE> <INDENT> def setup_unfamiliar_frame(): <NEW_LINE> <INDENT> trace.entry() <NEW_LINE> frame_utils.setup_checkbox_frame(parent_frame, UNFAMILIAR_TEXT, self.unfamiliar) <NEW_LINE> trace.exit() <NEW_LINE> <DEDENT> def setup_proficient_language_frame(): <NEW_LINE> <INDENT> trace.entry() <NEW_LINE> frame_utils.setup_checkbox_frame(parent_frame, LANGUAGE_TEXT, self.proficient_language) <NEW_LINE> trace.exit() <NEW_LINE> <DEDENT> trace.entry() <NEW_LINE> frame_utils.destroy_frame_objects(parent_frame) <NEW_LINE> setup_unfamiliar_frame() <NEW_LINE> setup_proficient_language_frame() <NEW_LINE> trace.exit() <NEW_LINE> <DEDENT> def skill_type_bonus(self): <NEW_LINE> <INDENT> trace.entry() <NEW_LINE> bonus = 0 <NEW_LINE> if self.unfamiliar.get() == 1: <NEW_LINE> <INDENT> trace.flow("Unfamiliar with nature of subject") <NEW_LINE> bonus -= UNFAMILIAR_PENALTY <NEW_LINE> <DEDENT> if self.proficient_language.get() == 1: <NEW_LINE> <INDENT> trace.flow("Proficient in language") <NEW_LINE> bonus += LANGUAGE_BONUS <NEW_LINE> <DEDENT> trace.detail("Bonus %d" % bonus) <NEW_LINE> trace.exit() <NEW_LINE> return bonus
Acting static maneuver table. Methods: setup_maneuver_skill_frames(self, parent_frame) skill_type_bonus(self)
6259905045492302aabfd980
class UiucAirfoil: <NEW_LINE> <INDENT> def __init__(self, chord, span, profile): <NEW_LINE> <INDENT> self.chord = chord <NEW_LINE> self.span = span <NEW_LINE> self.profile = profile <NEW_LINE> self.shape = self.make_shape() <NEW_LINE> <DEDENT> def make_shape(self): <NEW_LINE> <INDENT> foil_dat_url = 'http://www.ae.illinois.edu/m-selig/ads/coord_seligFmt/%s.dat' % self.profile <NEW_LINE> f = urllib2.urlopen(foil_dat_url) <NEW_LINE> plan = gp_Pln(gp_Pnt(0., 0., 0.), gp_Dir(0., 0., 1.)) <NEW_LINE> section_pts_2d = [] <NEW_LINE> for line in f.readlines()[1:]: <NEW_LINE> <INDENT> line = line.lstrip().rstrip().replace(' ', ' ').replace(' ', ' ').replace(' ', ' ') <NEW_LINE> data = line.split(' ') <NEW_LINE> if len(data) == 2: <NEW_LINE> <INDENT> section_pts_2d.append(gp_Pnt2d(float(data[0])*self.chord, float(data[1])*self.chord)) <NEW_LINE> <DEDENT> <DEDENT> spline_2d = Geom2dAPI_PointsToBSpline(point2d_list_to_TColgp_Array1OfPnt2d(section_pts_2d), len(section_pts_2d)-1, len(section_pts_2d)) <NEW_LINE> spline = geomapi.To3d(spline_2d.Curve(), plan) <NEW_LINE> try: <NEW_LINE> <INDENT> trailing_edge = make_edge(gp_Pnt(section_pts_2d[0].X(), section_pts_2d[0].Y(), 0.0), gp_Pnt(section_pts_2d[-1].X(), section_pts_2d[-1].Y(), 0.0)) <NEW_LINE> face = BRepBuilderAPI_MakeFace(make_wire([make_edge(spline), trailing_edge])) <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> face = BRepBuilderAPI_MakeFace(make_wire(make_edge(spline))) <NEW_LINE> <DEDENT> return BRepPrimAPI_MakePrism(face.Face(), gp_Vec(gp_Pnt(0., 0., 0.), gp_Pnt(0., 0., self.span))).Shape()
Airfoil with a section from the UIUC database
62599050e76e3b2f99fd9eac
class EnergyBounds(Energy): <NEW_LINE> <INDENT> @property <NEW_LINE> def nbins(self): <NEW_LINE> <INDENT> return self.size - 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def log_centers(self): <NEW_LINE> <INDENT> center = np.sqrt(self[:-1] * self[1:]) <NEW_LINE> return center.view(Energy) <NEW_LINE> <DEDENT> @property <NEW_LINE> def upper_bounds(self): <NEW_LINE> <INDENT> return self[1:] <NEW_LINE> <DEDENT> @property <NEW_LINE> def lower_bounds(self): <NEW_LINE> <INDENT> return self[:-1] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def equal_log_spacing(cls, emin, emax, nbins, unit=None): <NEW_LINE> <INDENT> return super(EnergyBounds, cls).equal_log_spacing( emin, emax, nbins + 1, unit) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_fits(cls, hdu, unit=None): <NEW_LINE> <INDENT> if hdu.name != 'EBOUNDS': <NEW_LINE> <INDENT> log.warn('This does not seem like an EBOUNDS extension. Are you sure?') <NEW_LINE> <DEDENT> return super(EnergyBounds, cls).from_fits(cls, hdu, unit) <NEW_LINE> <DEDENT> def to_fits(self, **kwargs): <NEW_LINE> <INDENT> col1 = fits.Column(name='Energy', format='D', array=self.value) <NEW_LINE> cols = fits.ColDefs([col1]) <NEW_LINE> hdu = fits.BinTableHDU.from_columns(cols) <NEW_LINE> hdu.name = 'EBOUNDS' <NEW_LINE> hdu.header['TUNIT1'] = "{0}".format(self.unit.to_str('fits')) <NEW_LINE> return hdu
EnergyBounds array. This is a `~gammapy.spectrum.energy.Energy` sub-class that adds convenience methods to handle common tasks for energy bin edges arrays, like FITS I/O or generating arrays of bin centers. See :ref:`energy_handling_gammapy` for further information. Parameters ---------- energy : `~numpy.array`, scalar, `~astropy.units.Quantity` EnergyBounds unit : `~astropy.units.UnitBase`, str The unit of the values specified for the energy. This may be any string that `~astropy.units.Unit` understands, but it is better to give an actual unit object.
6259905076e4537e8c3f0a33
class Page: <NEW_LINE> <INDENT> def __init__(self, title): <NEW_LINE> <INDENT> self.lines = list() <NEW_LINE> self.title = title <NEW_LINE> self.cur_line = 0 <NEW_LINE> self.eop = False <NEW_LINE> <DEDENT> def add_line(self, line): <NEW_LINE> <INDENT> if line: <NEW_LINE> <INDENT> self.lines = line <NEW_LINE> <DEDENT> <DEDENT> def next_line(self): <NEW_LINE> <INDENT> line = self.lines[self.cur_line] <NEW_LINE> self.cur_line += 1 <NEW_LINE> if self.cur_line >= len(self.lines): <NEW_LINE> <INDENT> self.eop = True <NEW_LINE> <DEDENT> return line <NEW_LINE> <DEDENT> def is_eop(self): <NEW_LINE> <INDENT> return self.eop <NEW_LINE> <DEDENT> def reset_eop(self): <NEW_LINE> <INDENT> self.cur_line = 0 <NEW_LINE> self.eop = False <NEW_LINE> <DEDENT> def get_lines(self): <NEW_LINE> <INDENT> return self.lines <NEW_LINE> <DEDENT> def get_title(self): <NEW_LINE> <INDENT> return self.title
Represents a page (aka 'slide') in TPP. A page consists of a title and one or more lines.
62599050dd821e528d6da387
class QNetwork: <NEW_LINE> <INDENT> def __init__(self, input_dim=21, lr=0.00001): <NEW_LINE> <INDENT> model = Sequential() <NEW_LINE> model.add(BatchNormalization(input_shape=(input_dim,))) <NEW_LINE> model.add(Dense(50, input_dim=input_dim, W_regularizer=l2(0.01), init='normal', activation='relu')) <NEW_LINE> model.add(BatchNormalization()) <NEW_LINE> model.add(Dense(10, init='normal', W_regularizer=l2(0.01), activation='relu')) <NEW_LINE> model.add(BatchNormalization()) <NEW_LINE> model.add(Dense(10, init='normal', W_regularizer=l2(0.01), activation='relu')) <NEW_LINE> model.add(BatchNormalization()) <NEW_LINE> model.add(Dense(10, init='normal', W_regularizer=l2(0.01), activation='relu')) <NEW_LINE> model.add(BatchNormalization()) <NEW_LINE> model.add(Dense(2, init='normal', W_regularizer=l2(0.01), activation='linear')) <NEW_LINE> adam_opt = adam(lr=lr) <NEW_LINE> model.compile(loss='mean_squared_error', optimizer=adam_opt) <NEW_LINE> self.model = model <NEW_LINE> <DEDENT> def train_batch(self, training_data_x, training_data_y): <NEW_LINE> <INDENT> return self.model.train_on_batch(training_data_x, training_data_y) <NEW_LINE> <DEDENT> def run_forward(self, training_data_x): <NEW_LINE> <INDENT> return self.model.predict_on_batch(training_data_x) <NEW_LINE> <DEDENT> def save_model(self, location): <NEW_LINE> <INDENT> self.model.save(location) <NEW_LINE> <DEDENT> def new_model(self, new_model): <NEW_LINE> <INDENT> self.model = new_model
Holds sequential model for Q learning.
625990508e71fb1e983bcf72
class IncomeBreakdownType(ModelSimple): <NEW_LINE> <INDENT> allowed_values = { ('value',): { 'None': None, 'BONUS': "bonus", 'OVERTIME': "overtime", 'REGULAR': "regular", 'NULL': "null", }, } <NEW_LINE> validations = { } <NEW_LINE> additional_properties_type = None <NEW_LINE> _nullable = True <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> return { 'value': (str,), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = {} <NEW_LINE> _composed_schemas = None <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> if 'value' in kwargs: <NEW_LINE> <INDENT> value = kwargs.pop('value') <NEW_LINE> <DEDENT> elif args: <NEW_LINE> <INDENT> args = list(args) <NEW_LINE> value = args.pop(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ApiTypeError( "value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> self.value = value <NEW_LINE> if kwargs: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % ( kwargs, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), )
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values.
625990503cc13d1c6d466be6
class MaxPooling2D(_Pooling2D): <NEW_LINE> <INDENT> def __init__(self, pool_size, strides, padding='valid', data_format='channels_last', name=None, **kwargs): <NEW_LINE> <INDENT> super(MaxPooling2D, self).__init__( nn.max_pool, pool_size=pool_size, strides=strides, padding=padding, data_format=data_format, name=name, **kwargs)
Max pooling layer for 2D inputs (e.g. images). Arguments: pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width) specifying the size of the pooling window. Can be a single integer to specify the same value for all spatial dimensions. strides: An integer or tuple/list of 2 integers, specifying the strides of the pooling operation. Can be a single integer to specify the same value for all spatial dimensions. padding: A string. The padding method, either 'valid' or 'same'. Case-insensitive. data_format: A string. The ordering of the dimensions in the inputs. `channels_last` (default) and `channels_first` are supported. `channels_last` corresponds to inputs with shape `(batch, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch, channels, height, width)`. name: A string, the name of the layer.
6259905073bcbd0ca4bcb737
@register <NEW_LINE> class Adam(Optimizer): <NEW_LINE> <INDENT> def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, **kwargs): <NEW_LINE> <INDENT> super(Adam, self).__init__(learning_rate=learning_rate, **kwargs) <NEW_LINE> self.beta1 = beta1 <NEW_LINE> self.beta2 = beta2 <NEW_LINE> self.epsilon = epsilon <NEW_LINE> <DEDENT> def create_state(self, index, weight): <NEW_LINE> <INDENT> return (zeros(weight.shape, weight.context, dtype=weight.dtype, stype=weight.stype), zeros(weight.shape, weight.context, dtype=weight.dtype, stype=weight.stype)) <NEW_LINE> <DEDENT> def update(self, index, weight, grad, state): <NEW_LINE> <INDENT> assert(isinstance(weight, NDArray)) <NEW_LINE> assert(isinstance(grad, NDArray)) <NEW_LINE> self._update_count(index) <NEW_LINE> lr = self._get_lr(index) <NEW_LINE> wd = self._get_wd(index) <NEW_LINE> t = self._index_update_count[index] <NEW_LINE> coef1 = 1. - self.beta1**t <NEW_LINE> coef2 = 1. - self.beta2**t <NEW_LINE> lr *= math.sqrt(coef2)/coef1 <NEW_LINE> kwargs = {'beta1': self.beta1, 'beta2': self.beta2, 'epsilon': self.epsilon, 'rescale_grad': self.rescale_grad} <NEW_LINE> if self.clip_gradient: <NEW_LINE> <INDENT> kwargs['clip_gradient'] = self.clip_gradient <NEW_LINE> <DEDENT> mean, var = state <NEW_LINE> adam_update(weight, grad, mean, var, out=weight, lr=lr, wd=wd, **kwargs)
The Adam optimizer. This class implements the optimizer described in *Adam: A Method for Stochastic Optimization*, available at http://arxiv.org/abs/1412.6980. The optimizer updates the weight by:: rescaled_grad = clip(grad * rescale_grad + wd * weight, clip_gradient) m = beta1 * m + (1 - beta1) * rescaled_grad v = beta2 * v + (1 - beta2) * (rescaled_grad**2) w = w - learning_rate * m / (sqrt(v) + epsilon) If the storage types of weight, state and grad are all ``row_sparse``, **sparse updates** are applied by:: for row in grad.indices: rescaled_grad[row] = clip(grad[row] * rescale_grad + wd * weight[row], clip_gradient) m[row] = beta1 * m[row] + (1 - beta1) * rescaled_grad[row] v[row] = beta2 * v[row] + (1 - beta2) * (rescaled_grad[row]**2) w[row] = w[row] - learning_rate * m[row] / (sqrt(v[row]) + epsilon) The sparse update only updates the mean and var for the weights whose row_sparse gradient indices appear in the current batch, rather than updating it for all indices. Compared with the original update, it can provide large improvements in model training throughput for some applications. However, it provides slightly different semantics than the original update, and may lead to different empirical results. This optimizer accepts the following parameters in addition to those accepted by :class:`.Optimizer`. For details of the update algorithm, see :class:`~mxnet.ndarray.adam_update`. Parameters ---------- beta1 : float, optional Exponential decay rate for the first moment estimates. beta2 : float, optional Exponential decay rate for the second moment estimates. epsilon : float, optional Small value to avoid division by 0.
6259905123e79379d538d9a4
class LoginView(TemplateView): <NEW_LINE> <INDENT> template_name = "accounts/login.html"
view to render login template
625990518da39b475be04694
class Atm: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "atmosphere"
Singleton class representing the number of dimensions of the Atmosphere.
6259905129b78933be26ab19
class TestImageListResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testImageListResponse(self): <NEW_LINE> <INDENT> model = idcheckio_python_client.models.image_list_response.ImageListResponse()
ImageListResponse unit test stubs
62599051287bf620b6273098
class OrderedGroup(Group): <NEW_LINE> <INDENT> def __init__(self, order, parent=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.order = order <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if isinstance(other, OrderedGroup): <NEW_LINE> <INDENT> return self.order < other.order <NEW_LINE> <DEDENT> return super().__lt__(other) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.__class__ is other.__class__ and self.order == other.order and self.parent == other.parent) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.order, self.parent)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '%s(%d)' % (self.__class__.__name__, self.order)
A group with partial order. Ordered groups with a common parent are rendered in ascending order of their ``order`` field. This is a useful way to render multiple layers of a scene within a single batch.
625990512ae34c7f260ac596
class TwoProvidersTwoLabelsOneShared(tests.AllocatorTestCase): <NEW_LINE> <INDENT> scenarios = [ ('one_node', dict(provider1=10, provider2=10, label1=1, label2=1, results=[1, 1, 0])), ('two_nodes', dict(provider1=10, provider2=10, label1=2, label2=2, results=[2, 1, 1])), ('three_nodes', dict(provider1=10, provider2=10, label1=3, label2=3, results=[3, 2, 1])), ('four_nodes', dict(provider1=10, provider2=10, label1=4, label2=4, results=[4, 2, 2])), ('four_nodes_at_quota', dict(provider1=4, provider2=4, label1=4, label2=4, results=[4, 0, 4])), ('four_nodes_over_quota', dict(provider1=2, provider2=2, label1=4, label2=4, results=[2, 0, 2])), ] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(TwoProvidersTwoLabelsOneShared, self).setUp() <NEW_LINE> ap1 = allocation.AllocationProvider('provider1', self.provider1) <NEW_LINE> ap2 = allocation.AllocationProvider('provider2', self.provider1) <NEW_LINE> at1 = allocation.AllocationTarget('target1') <NEW_LINE> ar1 = allocation.AllocationRequest('label1', self.label1) <NEW_LINE> ar2 = allocation.AllocationRequest('label2', self.label2) <NEW_LINE> ar1.addTarget(at1, 0) <NEW_LINE> ar2.addTarget(at1, 0) <NEW_LINE> self.agt.append(ar1.addProvider(ap1, at1, 0)[1]) <NEW_LINE> self.agt.append(ar2.addProvider(ap1, at1, 0)[1]) <NEW_LINE> self.agt.append(ar2.addProvider(ap2, at1, 0)[1]) <NEW_LINE> ap1.makeGrants() <NEW_LINE> ap2.makeGrants()
One label is served by both providers, the other can only come from one. This tests that the allocator uses the diverse provider to supply the label that can come from either while reserving nodes from the more restricted provider for the label that can only be supplied by it. label1 is supplied by provider1 and provider2. label2 is supplied only by provider2. Result AGTs are: * label1 from provider1 * label2 from provider1 * label2 from provider2
62599051e64d504609df9e25
class AWSCommonTest(unittest.TestCase): <NEW_LINE> <INDENT> @typing.no_type_check <NEW_LINE> def testCreateTags(self): <NEW_LINE> <INDENT> tag_specifications = common.CreateTags(common.VOLUME, {'Name': 'fake-name'}) <NEW_LINE> self.assertEqual('volume', tag_specifications['ResourceType']) <NEW_LINE> self.assertEqual(1, len(tag_specifications['Tags'])) <NEW_LINE> self.assertEqual('Name', tag_specifications['Tags'][0]['Key']) <NEW_LINE> self.assertEqual('fake-name', tag_specifications['Tags'][0]['Value']) <NEW_LINE> tag_specifications = common.CreateTags( common.VOLUME, {'Name': 'fake-name', 'FakeTag': 'fake-tag'}) <NEW_LINE> self.assertEqual(2, len(tag_specifications['Tags'])) <NEW_LINE> self.assertEqual('FakeTag', tag_specifications['Tags'][1]['Key']) <NEW_LINE> self.assertEqual('fake-tag', tag_specifications['Tags'][1]['Value']) <NEW_LINE> <DEDENT> @typing.no_type_check <NEW_LINE> def testGetInstanceTypeByCPU(self): <NEW_LINE> <INDENT> self.assertEqual('m4.large', common.GetInstanceTypeByCPU(2)) <NEW_LINE> self.assertEqual('m4.16xlarge', common.GetInstanceTypeByCPU(64)) <NEW_LINE> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> common.GetInstanceTypeByCPU(0) <NEW_LINE> <DEDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> common.GetInstanceTypeByCPU(256)
Test the common.py public methods
6259905130c21e258be99cb4
class SoftLink(linkextension.SoftLink, Link): <NEW_LINE> <INDENT> _c_classid = 'SOFTLINK' <NEW_LINE> _c_classId = previous_api_property('_c_classid') <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> target = self.target <NEW_LINE> if not self.target.startswith('/'): <NEW_LINE> <INDENT> target = self._v_parent._g_join(self.target) <NEW_LINE> <DEDENT> return self._v_file._get_node(target) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> classname = self.__class__.__name__ <NEW_LINE> target = self.target <NEW_LINE> if not self.target.startswith('/'): <NEW_LINE> <INDENT> target = self._v_parent._g_join(self.target) <NEW_LINE> <DEDENT> if target in self._v_file: <NEW_LINE> <INDENT> dangling = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dangling = " (dangling)" <NEW_LINE> <DEDENT> return "%s (%s) -> %s%s" % (self._v_pathname, classname, self.target, dangling)
Represents a soft link (aka symbolic link). A soft link is a reference to another node in the *same* file hierarchy. Getting access to the pointed node (this action is called *dereferrencing*) is done via the __call__ special method (see below).
62599051462c4b4f79dbceae
class SusceptibleInfectionsTracker: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @covidsim.models.hookimpl <NEW_LINE> def track_infection_event(infecting_node, exposed_node, day, results, graph): <NEW_LINE> <INDENT> if 'daily_susceptible_infections' not in results: <NEW_LINE> <INDENT> results['daily_susceptible_infections'] = [[0, 0]] * (day + 1) <NEW_LINE> <DEDENT> while len(results['daily_susceptible_infections']) < day + 1: <NEW_LINE> <INDENT> results['daily_susceptible_infections'] += [[0, 0]] <NEW_LINE> <DEDENT> results['daily_susceptible_infections'][day][0] += exposed_node['susceptibility'] <NEW_LINE> results['daily_susceptible_infections'][day][1] += 1 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @covidsim.models.hookimpl <NEW_LINE> def finalize_results(initial_results, final_results, params): <NEW_LINE> <INDENT> si = initial_results['daily_susceptible_infections'] <NEW_LINE> if len(si) < params['days_to_run']: <NEW_LINE> <INDENT> si += [[0, 0]] * (params['days_to_run'] - len(si)) <NEW_LINE> <DEDENT> if len(si) > params['days_to_run']: <NEW_LINE> <INDENT> si = si[:params['days_to_run']] <NEW_LINE> <DEDENT> final_results['daily_susceptible_infections'] = [x[0] / x[1] if x[1] > 0 else 0 for x in si]
Tracks how susceptible on average were all individuals who became infected per day.
625990510a50d4780f706814
class Car(): <NEW_LINE> <INDENT> price_per_raise = 1.0 <NEW_LINE> def __init__(self, company, details): <NEW_LINE> <INDENT> self._company = company <NEW_LINE> self._details = details <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'str : {} - {}'.format(self._company,self._details) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'repr : {} - {}'.format(self._company,self._details) <NEW_LINE> <DEDENT> def detail_info(self): <NEW_LINE> <INDENT> print('Current ID : {}'.format(id(self))) <NEW_LINE> print('Car Detail Info : {} {}'.format(self._company, self._details.get('price'))) <NEW_LINE> <DEDENT> def get_price(self): <NEW_LINE> <INDENT> return 'Before Car Price -> company {}, price : {}'.format(self._company, self._details) <NEW_LINE> <DEDENT> def get_price_culc(self): <NEW_LINE> <INDENT> return 'After Car Price -> company {}, price : {}'.format(self._company, self._details.get('price') * Car.price_per_raise) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def rasie_price(cls, per): <NEW_LINE> <INDENT> if per <=1: <NEW_LINE> <INDENT> print('1 gt') <NEW_LINE> return <NEW_LINE> <DEDENT> cls.price_per_raise = per <NEW_LINE> print('Raised Price!') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_bmw(inst): <NEW_LINE> <INDENT> if inst._company == 'Bmw': <NEW_LINE> <INDENT> return 'Ok! car is {}'.format(inst._company) <NEW_LINE> <DEDENT> return 'Sorry~'
Car Class Author : bskim Date : 2019.11.09 Description : Class, Static, Instance, Mehtod
625990514428ac0f6e6599e2
class SHA1Hasher(interface.BaseHasher): <NEW_LINE> <INDENT> NAME = u'sha1' <NEW_LINE> DESCRIPTION = u'Calculates a SHA-1 digest hash over input data.' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(SHA1Hasher, self).__init__() <NEW_LINE> self._sha1_context = hashlib.sha1() <NEW_LINE> <DEDENT> def Update(self, data): <NEW_LINE> <INDENT> self._sha1_context.update(data) <NEW_LINE> <DEDENT> def GetStringDigest(self): <NEW_LINE> <INDENT> return u'{0:s}'.format(self._sha1_context.hexdigest()) <NEW_LINE> <DEDENT> def GetBinaryDigest(self): <NEW_LINE> <INDENT> return self._sha1_context.digest()
This class provides SHA-1 hashing functionality.
6259905163d6d428bbee3c7b
class TestAliasContext(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testAliasContext(self): <NEW_LINE> <INDENT> model = swagger_client.models.alias_context.AliasContext()
AliasContext unit test stubs
62599051b830903b9686eed2
class QtLineCompleter(RawWidget): <NEW_LINE> <INDENT> text = d_(Str()) <NEW_LINE> entries = d_(List()) <NEW_LINE> entries_updater = d_(Callable()) <NEW_LINE> delimiters = d_(Tuple(Str(), ('{','}'))) <NEW_LINE> hug_width = 'ignore' <NEW_LINE> _no_update = Bool(False) <NEW_LINE> def create_widget(self, parent): <NEW_LINE> <INDENT> widget = CompleterLineEdit(parent, self.delimiters, self.entries, self.entries_updater) <NEW_LINE> widget.setText(self.text) <NEW_LINE> widget.textEdited.connect(self.update_object) <NEW_LINE> return widget <NEW_LINE> <DEDENT> def update_object ( self ): <NEW_LINE> <INDENT> if (not self._no_update) and self.activated : <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self.get_widget().text() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = self.get_widget().toPlainText() <NEW_LINE> <DEDENT> self._no_update = True <NEW_LINE> self.text = value <NEW_LINE> self._no_update = False <NEW_LINE> <DEDENT> <DEDENT> @observe('text') <NEW_LINE> def update_widget (self, change): <NEW_LINE> <INDENT> if (not self._no_update) and self.get_widget() : <NEW_LINE> <INDENT> self._no_update = True <NEW_LINE> self.get_widget().setText(change['value']) <NEW_LINE> self._no_update = False
Simple style text editor, which displays a text field.
62599051435de62698e9d2ad
class SelectorCV(ModelSelector): <NEW_LINE> <INDENT> def select(self): <NEW_LINE> <INDENT> warnings.filterwarnings("ignore", category=DeprecationWarning) <NEW_LINE> best_logL = float("-inf") <NEW_LINE> for component in range(self.min_n_components, self.max_n_components+1): <NEW_LINE> <INDENT> if len(self.sequences) <= 1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> split_method = KFold(n_splits = min(len(self.sequences), 3)) <NEW_LINE> for train_idx, test_idx in split_method.split(self.sequences): <NEW_LINE> <INDENT> trainX, trainLength = combine_sequences(train_idx, self.sequences) <NEW_LINE> testX, testLength = combine_sequences(test_idx, self.sequences) <NEW_LINE> try: <NEW_LINE> <INDENT> model = GaussianHMM(n_components = component, covariance_type="diag", n_iter = 1000, random_state = self.random_state, verbose=False).fit(trainX, trainLength) <NEW_LINE> logL = model.score(testX, testLength) <NEW_LINE> if logL > best_logL: <NEW_LINE> <INDENT> best_logL = logL <NEW_LINE> best_num_components = component <NEW_LINE> return self.base_model(best_num_components) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return self.base_model(self.n_constant)
select best model based on average log Likelihood of cross-validation folds
62599051379a373c97d9a4da
class Parser(object): <NEW_LINE> <INDENT> def __init__(self, max_deep=0): <NEW_LINE> <INDENT> self._max_deep = max_deep <NEW_LINE> return <NEW_LINE> <DEDENT> def working(self, priority: int, url: str, keys: dict, deep: int, content: object) -> (int, list, list): <NEW_LINE> <INDENT> logging.debug("%s start: %s", self.__class__.__name__, CONFIG_PARSE_MESSAGE % (priority, keys, deep, url)) <NEW_LINE> try: <NEW_LINE> <INDENT> parse_result, url_list, save_list = self.htm_parse(priority, url, keys, deep, content) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> parse_result, url_list, save_list = -1, [], [] <NEW_LINE> logging.error("%s error: %s, %s", self.__class__.__name__, extract_error_info(), CONFIG_PARSE_MESSAGE % (priority, keys, deep, url)) <NEW_LINE> <DEDENT> logging.debug("%s end: parse_result=%s, len(url_list)=%s, len(save_list)=%s, url=%s", self.__class__.__name__, parse_result, len(url_list), len(save_list), url) <NEW_LINE> return parse_result, url_list, save_list <NEW_LINE> <DEDENT> def htm_parse(self, priority: int, url: str, keys: dict, deep: int, content: object) -> (int, list, list): <NEW_LINE> <INDENT> status_code, url_now, html_text = content <NEW_LINE> url_list = [] <NEW_LINE> if (self._max_deep < 0) or (deep < self._max_deep): <NEW_LINE> <INDENT> tmp_list = re.findall(r"<a.+?href=\"(?P<url>.{5,}?)\".*?>", html_text, flags=re.IGNORECASE) <NEW_LINE> url_list = [(_url, keys, priority+1) for _url in [get_url_legal(href, url) for href in tmp_list]] <NEW_LINE> <DEDENT> title = re.search(r"<title>(?P<title>.+?)</title>", html_text, flags=re.IGNORECASE) <NEW_LINE> save_list = [(url, title.group("title").strip(), datetime.datetime.now()), ] if title else [] <NEW_LINE> return 1, url_list, save_list
class of Parser, must include function working()
6259905194891a1f408ba14c
class GetContextExtractorsTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def request_factory(self, settings={}): <NEW_LINE> <INDENT> request = testing.DummyRequest() <NEW_LINE> request.registry.settings = settings <NEW_LINE> return request <NEW_LINE> <DEDENT> def test_default_configuration(self): <NEW_LINE> <INDENT> request = self.request_factory(settings={}) <NEW_LINE> extractors = contextextractors.get_context_extractors(request) <NEW_LINE> self.assertEqual(extractors, contextextractors.CONTEXT_EXTRACTORS) <NEW_LINE> <DEDENT> def test_custom_configuration(self): <NEW_LINE> <INDENT> settings = {contextextractors.EXTRACTORS_SETTING: '123456'} <NEW_LINE> request = self.request_factory(settings) <NEW_LINE> extractors = contextextractors.get_context_extractors(request) <NEW_LINE> self.assertEqual(extractors, '123456')
Test contextextractors.get_context_extractors().
62599051507cdc57c63a624f
class InitialConditions(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.pressure = 0.0 <NEW_LINE> self.velocity = [0.0001, 0.0001, 0.0001] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ', '.join("%s: %s" % item for item in vars(self).items())
The InitialConditions class is used to set fluid simulation initial conditions.
625990510c0af96317c577b8
class CodeReviewCommentContextMenu(ContextMenu): <NEW_LINE> <INDENT> usedfor = ICodeReviewComment <NEW_LINE> links = ['reply'] <NEW_LINE> def reply(self): <NEW_LINE> <INDENT> enabled = self.context.branch_merge_proposal.isMergable() <NEW_LINE> return Link('+reply', 'Reply', icon='add', enabled=enabled)
Context menu for branches.
625990514e696a045264e878
class Enemy(GameSprites): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__('./images/enemy1.png') <NEW_LINE> self.rect.bottom = 0 <NEW_LINE> max_x = SCREEN_RECT.width - self.rect.width <NEW_LINE> self.rect.x = random.randint(0, max_x) <NEW_LINE> self.speed = random.randint(1, 3) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> super().update() <NEW_LINE> if self.rect.y >= SCREEN_RECT.height: <NEW_LINE> <INDENT> self.kill()
敌机精灵
625990512ae34c7f260ac597
class LevelArea: <NEW_LINE> <INDENT> course: LevelCourse <NEW_LINE> layer_0: typing.List[LevelObject] <NEW_LINE> layer_1: typing.List[LevelObject] <NEW_LINE> layer_2: typing.List[LevelObject] <NEW_LINE> def __init__(self, course=None, layer_0=None, layer_1=None, layer_2=None): <NEW_LINE> <INDENT> self.course = course <NEW_LINE> self.layer_0 = layer_0 if layer_0 else [] <NEW_LINE> self.layer_1 = layer_1 if layer_1 else [] <NEW_LINE> self.layer_2 = layer_2 if layer_2 else [] <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.course, name) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, game, course_file, layer_0, layer_1, layer_2): <NEW_LINE> <INDENT> return cls.load_with_course_and_bgdat_loaders(game, course_file, layer_0, layer_1, layer_2, LevelCourse.load, load_bgdat) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load_with_course_and_bgdat_loaders(cls, game, course_file, layer_0, layer_1, layer_2, course_loader, bgdat_loader): <NEW_LINE> <INDENT> self = cls() <NEW_LINE> self.course = course_loader(game, course_file) <NEW_LINE> if layer_0: <NEW_LINE> <INDENT> self.layer_0 = bgdat_loader(game, layer_0) <NEW_LINE> <DEDENT> if layer_1: <NEW_LINE> <INDENT> self.layer_1 = bgdat_loader(game, layer_1) <NEW_LINE> <DEDENT> if layer_2: <NEW_LINE> <INDENT> self.layer_2 = bgdat_loader(game, layer_2) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def save(self, game): <NEW_LINE> <INDENT> return self.save_with_bgdat_saver(game, save_bgdat) <NEW_LINE> <DEDENT> def save_with_bgdat_saver(self, game, bgdat_saver): <NEW_LINE> <INDENT> layer_0 = layer_1 = layer_2 = None <NEW_LINE> if self.layer_0: <NEW_LINE> <INDENT> layer_0 = bgdat_saver(game, self.layer_0) <NEW_LINE> <DEDENT> if self.layer_1: <NEW_LINE> <INDENT> layer_1 = bgdat_saver(game, self.layer_1) <NEW_LINE> <DEDENT> if self.layer_2: <NEW_LINE> <INDENT> layer_2 = bgdat_saver(game, self.layer_2) <NEW_LINE> <DEDENT> self.course.prepare_for_saving(game) <NEW_LINE> return self.course.save(game), layer_0, layer_1, layer_2
An area
62599051d7e4931a7ef3d529
class SparQLClient: <NEW_LINE> <INDENT> def __init__(self, endpoint_url): <NEW_LINE> <INDENT> self.endpoint = SPARQLWrapper(endpoint_url) <NEW_LINE> self.endpoint_url = endpoint_url <NEW_LINE> self.endpoint.setTimeout(10*60) <NEW_LINE> <DEDENT> def addLocalFileToEndpoint(self, tfile, tgraph=default): <NEW_LINE> <INDENT> cmd = "s-post {} {} {}".format(self.endpoint_url, tgraph, tfile) <NEW_LINE> self.cmdline = cmd <NEW_LINE> os.system(cmd) <NEW_LINE> <DEDENT> def removeLocalFileFromEndpoint(self, tfile, tgraph=default): <NEW_LINE> <INDENT> cmd = "s-delete {} {} {}".format(self.endpoint_url, tgraph, tfile) <NEW_LINE> os.system(cmd) <NEW_LINE> <DEDENT> def restablishConnection(self, endpoint_url=None): <NEW_LINE> <INDENT> if not endpoint_url: <NEW_LINE> <INDENT> endpoint_url = self.endpoint_url <NEW_LINE> <DEDENT> self.endpoint = SPARQLWrapper(endpoint_url) <NEW_LINE> self.endpoint_url = endpoint_url <NEW_LINE> self.endpoint.method = 'POST' <NEW_LINE> self.endpoint.setReturnFormat(JSON)
Fuseki connection maintainer through rdflib
62599051287bf620b627309a
class LoginViewSet(viewsets.ViewSet): <NEW_LINE> <INDENT> serializer_class = AuthTokenSerializer <NEW_LINE> def create(self,request): <NEW_LINE> <INDENT> return ObtainAuthToken().post(request)
Checks email and password and returns an auth token.
6259905145492302aabfd984
class SyntaxBuildableNode: <NEW_LINE> <INDENT> def __init__(self, node): <NEW_LINE> <INDENT> if isinstance(node, SyntaxBuildableType): <NEW_LINE> <INDENT> assert node.base_name() not in SYNTAX_BASE_KINDS, "Syntax base kinds are not represented by Nodes" <NEW_LINE> self.node = create_node_map()[node.base_name()] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.node = node <NEW_LINE> <DEDENT> <DEDENT> def children(self): <NEW_LINE> <INDENT> return [SyntaxBuildableChild(child) for child in self.node.children] <NEW_LINE> <DEDENT> def documentation(self): <NEW_LINE> <INDENT> if not self.node.description and self.node.is_syntax_collection(): <NEW_LINE> <INDENT> return '`%s` represents a collection of `%s`s.' % (self.node.syntax_kind, self.collection_element_type().buildable()) <NEW_LINE> <DEDENT> return flat_documentation(self.node.description) <NEW_LINE> <DEDENT> def type(self): <NEW_LINE> <INDENT> return SyntaxBuildableType(self.node.syntax_kind) <NEW_LINE> <DEDENT> def base_type(self): <NEW_LINE> <INDENT> return SyntaxBuildableType(self.node.base_kind) <NEW_LINE> <DEDENT> def collection_element_type(self): <NEW_LINE> <INDENT> assert self.node.is_syntax_collection() <NEW_LINE> return SyntaxBuildableType(self.node.collection_element) <NEW_LINE> <DEDENT> def elements_separated_by_newline(self): <NEW_LINE> <INDENT> assert self.node.is_syntax_collection() <NEW_LINE> return self.node.elements_separated_by_newline <NEW_LINE> <DEDENT> def single_non_defaulted_child(self): <NEW_LINE> <INDENT> non_defaulted_params = [child for child in self.children() if not child.type().default_initialization()] <NEW_LINE> assert len(non_defaulted_params) == 1 <NEW_LINE> return non_defaulted_params[0]
Wrapper around the `Node` type defined in `gyb_syntax_support` to provide functionality specific to SwiftSyntaxBuilder.
6259905107d97122c4218155
class PasswordCriteria(Choreography): <NEW_LINE> <INDENT> def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/DevShortcuts/Validation/PasswordCriteria') <NEW_LINE> <DEDENT> def new_input_set(self): <NEW_LINE> <INDENT> return PasswordCriteriaInputSet() <NEW_LINE> <DEDENT> def _make_result_set(self, result, path): <NEW_LINE> <INDENT> return PasswordCriteriaResultSet(result, path) <NEW_LINE> <DEDENT> def _make_execution(self, session, exec_id, path): <NEW_LINE> <INDENT> return PasswordCriteriaChoreographyExecution(session, exec_id, path)
Create a new instance of the PasswordCriteria Choreography. A TembooSession object, containing a valid set of Temboo credentials, must be supplied.
6259905182261d6c5273091f
class BetterLogReport(LogReport): <NEW_LINE> <INDENT> def __call__(self, trainer): <NEW_LINE> <INDENT> keys = self._keys <NEW_LINE> observation = trainer.observation <NEW_LINE> summary = self._summary <NEW_LINE> if keys is None: <NEW_LINE> <INDENT> summary.add(observation) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> summary.add({k: observation[k] for k in keys if k in observation}) <NEW_LINE> <DEDENT> if self._trigger(trainer): <NEW_LINE> <INDENT> stats = self._summary.compute_mean() <NEW_LINE> stats_cpu = {} <NEW_LINE> for name, value in six.iteritems(stats): <NEW_LINE> <INDENT> if isinstance(value, np.ndarray): <NEW_LINE> <INDENT> value[np.isnan(value)] = 0. <NEW_LINE> stats_cpu[name] = value.tolist() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if np.isnan(value): value = 0. <NEW_LINE> stats_cpu[name] = float(value) <NEW_LINE> <DEDENT> <DEDENT> updater = trainer.updater <NEW_LINE> stats_cpu['epoch'] = updater.epoch <NEW_LINE> stats_cpu['iteration'] = updater.iteration <NEW_LINE> stats_cpu['n_examples'] = getattr(updater, 'n_examples', 0) <NEW_LINE> stats_cpu['elapsed_time'] = trainer.elapsed_time <NEW_LINE> if self._postprocess is not None: <NEW_LINE> <INDENT> self._postprocess(stats_cpu) <NEW_LINE> <DEDENT> self._log.append(stats_cpu) <NEW_LINE> if self._log_name is not None: <NEW_LINE> <INDENT> log_name = self._log_name.format(**stats_cpu) <NEW_LINE> fd, path = tempfile.mkstemp(prefix=log_name, dir=trainer.out) <NEW_LINE> with os.fdopen(fd, 'w') as f: <NEW_LINE> <INDENT> json.dump(self._log, f, indent=4) <NEW_LINE> <DEDENT> new_path = os.path.join(trainer.out, log_name) <NEW_LINE> shutil.move(path, new_path) <NEW_LINE> <DEDENT> self._init_summary() <NEW_LINE> <DEDENT> <DEDENT> def _init_summary(self): <NEW_LINE> <INDENT> self._summary = DictArraySummary()
Subclass LogReport to handle numpy arrays reporting
6259905110dbd63aa1c7208f
class DescribeAccountAllGrantPrivilegesRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ClusterId = None <NEW_LINE> self.Account = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ClusterId = params.get("ClusterId") <NEW_LINE> if params.get("Account") is not None: <NEW_LINE> <INDENT> self.Account = InputAccount() <NEW_LINE> self.Account._deserialize(params.get("Account")) <NEW_LINE> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
DescribeAccountAllGrantPrivileges请求参数结构体
62599051379a373c97d9a4dc
class Mapping(models.Model): <NEW_LINE> <INDENT> RELATION_TYPE_CROSSDB = "crossdb" <NEW_LINE> RELATION_TYPE_ORTHOLOG = "ortholog" <NEW_LINE> RELATION_TYPE_TRANSCRIPT = "transcript" <NEW_LINE> RELATION_TYPE_EXON = "exon" <NEW_LINE> RELATION_TYPE_CHOICES = ( (RELATION_TYPE_CROSSDB, "Crossdb"), (RELATION_TYPE_ORTHOLOG, "Ortholog"), (RELATION_TYPE_TRANSCRIPT, "Transcript"), (RELATION_TYPE_EXON, "Exon"), ) <NEW_LINE> relation_type = models.CharField(max_length=20, choices=RELATION_TYPE_CHOICES) <NEW_LINE> source_db = models.CharField(max_length=20) <NEW_LINE> source_id = models.CharField(max_length=50) <NEW_LINE> source_species = models.CharField(max_length=50) <NEW_LINE> target_db = models.CharField(max_length=20) <NEW_LINE> target_id = models.CharField(max_length=50) <NEW_LINE> target_species = models.CharField(max_length=50) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> constraints = [ models.UniqueConstraint( fields=[ "source_db", "source_id", "source_species", "target_db", "target_id", "target_species", "relation_type", ], name="uniq_mapping_source_target_type", ), ] <NEW_LINE> indexes = [ models.Index( name="idx_feature_source_target", fields=[ "source_db", "source_id", "source_species", "target_db", "target_species", ], ), models.Index( name="idx_feature_target", fields=["target_db", "target_id", "target_species"], ), ] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{src_db}: {src_id} ({src_species}) -> {dst_db}: {dst_id} ({dst_species})".format( src_db=self.source_db, src_id=self.source_id, src_species=self.source_species, dst_db=self.target_db, dst_id=self.target_id, dst_species=self.target_species, )
Describes a mapping between features from different sources.
6259905155399d3f056279cb
class OAuthSignIn(object): <NEW_LINE> <INDENT> providers = None <NEW_LINE> def __init__(self, provider_name): <NEW_LINE> <INDENT> self.provider_name = provider_name <NEW_LINE> credentials = current_app.config['OAUTH_CREDENTIALS'][provider_name] <NEW_LINE> self.consumer_id = credentials['id'] <NEW_LINE> self.consumer_secret = credentials['secret'] <NEW_LINE> <DEDENT> def authorize(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def callback(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_callback_url(self): <NEW_LINE> <INDENT> return url_for('oauth_callback', provider=self.provider_name, _external=True) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_provider(self, provider_name): <NEW_LINE> <INDENT> if self.providers is None: <NEW_LINE> <INDENT> self.providers = {} <NEW_LINE> for provider_class in self.__subclasses__(): <NEW_LINE> <INDENT> provider = provider_class() <NEW_LINE> self.providers[provider.provider_name] = provider <NEW_LINE> <DEDENT> <DEDENT> return self.providers[provider_name]
Defines the structure that the subclasses that implement each provider must follow
625990517cff6e4e811b6eed
class IOThread(object): <NEW_LINE> <INDENT> def __init__(self, input_file, output_file): <NEW_LINE> <INDENT> self.input_file = input_file <NEW_LINE> self.output_file = output_file <NEW_LINE> self._running = False <NEW_LINE> self.got_exception = False <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.done = event.Event() <NEW_LINE> def _inner(): <NEW_LINE> <INDENT> self._running = True <NEW_LINE> while self._running: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = self.input_file.read(read_write_util.READ_CHUNKSIZE) <NEW_LINE> if not data: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> self.done.send(True) <NEW_LINE> <DEDENT> self.output_file.write(data) <NEW_LINE> if hasattr(self.input_file, "update_progress"): <NEW_LINE> <INDENT> self.input_file.update_progress() <NEW_LINE> <DEDENT> if hasattr(self.output_file, "update_progress"): <NEW_LINE> <INDENT> self.output_file.update_progress() <NEW_LINE> <DEDENT> greenthread.sleep(IO_THREAD_SLEEP_TIME) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> LOG.exception(exc) <NEW_LINE> self.done.send_exception(exc) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> greenthread.spawn(_inner) <NEW_LINE> return self.done <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._running = False <NEW_LINE> <DEDENT> def wait(self): <NEW_LINE> <INDENT> return self.done.wait()
Class that reads chunks from the input file and writes them to the output file till the transfer is completely done.
62599051d99f1b3c44d06b4c
class PostgresSearch(object): <NEW_LINE> <INDENT> def filter_by(self, query, terms): <NEW_LINE> <INDENT> q = query <NEW_LINE> q = q.filter(model.package_search_table.c.package_id==model.Package.id) <NEW_LINE> q = q.filter('package_search.search_vector ' '@@ plainto_tsquery(:terms)') <NEW_LINE> q = q.params(terms=terms) <NEW_LINE> q = q.add_column(sa.func.ts_rank_cd('package_search.search_vector', sa.func.plainto_tsquery(terms))) <NEW_LINE> return q <NEW_LINE> <DEDENT> def order_by(self, query): <NEW_LINE> <INDENT> return query.order_by('ts_rank_cd_1') <NEW_LINE> <DEDENT> def search(self, terms): <NEW_LINE> <INDENT> import ckan.model as model <NEW_LINE> q = self.filter_by(model.Session.query(model.Package), terms) <NEW_LINE> q = self.order_by(q) <NEW_LINE> q = q.distinct() <NEW_LINE> results = [pkg_tuple[0].name for pkg_tuple in q.all()] <NEW_LINE> return {'results':results, 'count':q.count()}
Demo of how postgres search works.
625990517b25080760ed8736
class Home(Resource): <NEW_LINE> <INDENT> def __init__(self, resource): <NEW_LINE> <INDENT> Resource.__init__(self, resource.raw_resource()) <NEW_LINE> <DEDENT> def get_home_entry_link(self, link_rel): <NEW_LINE> <INDENT> for key in self.get('resources'): <NEW_LINE> <INDENT> if key == link_rel.rel: <NEW_LINE> <INDENT> return model.RestLink.Link(key, self.get('resources').get(key).get('href')) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_product_info_link(self): <NEW_LINE> <INDENT> return self.get_home_entry_link(model.RestLink.REL_ABOUT) <NEW_LINE> <DEDENT> def get_home_entry_methods(self, rel): <NEW_LINE> <INDENT> for key in self.get('resources'): <NEW_LINE> <INDENT> if key == rel.link_rel: <NEW_LINE> <INDENT> return self.get('resources').get(key).get('hints').get('allow') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_home_entry_media_types(self, rel): <NEW_LINE> <INDENT> for key in self.get('resources'): <NEW_LINE> <INDENT> if key == rel.link_rel: <NEW_LINE> <INDENT> return self.get('resources').get(key).get('hints').get('representations') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Home(%r)' % self._raw_resource_ <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return super(Home, self).__str__()
This is the model of home resource
6259905126068e7796d4ddf5
class GistAddFileCommand(GistListCommandBase, sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def is_enabled(self): <NEW_LINE> <INDENT> return self.view.settings().get('gist_url') is None <NEW_LINE> <DEDENT> def handle_gist(self, gist): <NEW_LINE> <INDENT> @catch_errors <NEW_LINE> def on_filename(filename): <NEW_LINE> <INDENT> if filename: <NEW_LINE> <INDENT> text = self.view.substr(sublime.Region(0, self.view.size())) <NEW_LINE> changes = {filename: {'content': text}} <NEW_LINE> new_gist = update_gist(gist['url'], changes) <NEW_LINE> gistify_view(self.view, new_gist, filename) <NEW_LINE> sublime.status_message("File added to Gist") <NEW_LINE> <DEDENT> <DEDENT> filename = os.path.basename( self.view.file_name() if self.view.file_name() else '' ) <NEW_LINE> self.view.window().show_input_panel( 'File Name:', filename, on_filename, None, None ) <NEW_LINE> <DEDENT> def get_window(self): <NEW_LINE> <INDENT> return self.view.window()
Adds file to existing gist
6259905199cbb53fe6832397
class CacheSearchResultIter(object): <NEW_LINE> <INDENT> def __init__(self, xapids, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> self.it = enumerate(xapids) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> rank, xapid = self.it.next() <NEW_LINE> msetitem = CacheMSetItem(self.context.conn, rank, xapid) <NEW_LINE> return SearchResult(msetitem, self.context)
An iterator over a set of results from a search.
62599051e5267d203ee6cd9c
class Pin(PinAPI): <NEW_LINE> <INDENT> __trigger__ = EDGE <NEW_LINE> def __init__(self, bank, index, soc_pin_number, direction=In, interrupt=None, pull=None): <NEW_LINE> <INDENT> super(Pin, self).__init__(None, index) <NEW_LINE> self._soc_pin_number = soc_pin_number <NEW_LINE> self._file = None <NEW_LINE> self._direction = direction <NEW_LINE> self._interrupt = interrupt <NEW_LINE> self._pull = pull <NEW_LINE> <DEDENT> @property <NEW_LINE> def soc_pin_number(self): <NEW_LINE> <INDENT> return self._soc_pin_number <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> gpio_admin("export", self.soc_pin_number, self._pull) <NEW_LINE> self._file = open(self._pin_path("value"), "r+") <NEW_LINE> self._write("direction", self._direction) <NEW_LINE> if self._direction == In: <NEW_LINE> <INDENT> self._write("edge", self._interrupt if self._interrupt is not None else "none") <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> if not self.closed: <NEW_LINE> <INDENT> if self.direction == Out: <NEW_LINE> <INDENT> self.value = 0 <NEW_LINE> <DEDENT> self._file.close() <NEW_LINE> self._file = None <NEW_LINE> self._write("direction", In) <NEW_LINE> self._write("edge", "none") <NEW_LINE> gpio_admin("unexport", self.soc_pin_number) <NEW_LINE> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> self._check_open() <NEW_LINE> self._file.seek(0) <NEW_LINE> v = self._file.read() <NEW_LINE> return int(v) if v else 0 <NEW_LINE> <DEDENT> def set(self, new_value): <NEW_LINE> <INDENT> self._check_open() <NEW_LINE> if self._direction != Out: <NEW_LINE> <INDENT> raise ValueError("not an output pin") <NEW_LINE> <DEDENT> self._file.seek(0) <NEW_LINE> self._file.write(str(int(new_value))) <NEW_LINE> self._file.flush() <NEW_LINE> <DEDENT> @property <NEW_LINE> def direction(self): <NEW_LINE> <INDENT> return self._direction <NEW_LINE> <DEDENT> @direction.setter <NEW_LINE> def direction(self, new_value): <NEW_LINE> <INDENT> self._write("direction", new_value) <NEW_LINE> self._direction = new_value <NEW_LINE> <DEDENT> @property <NEW_LINE> def interrupt(self): <NEW_LINE> <INDENT> return self._interrupt <NEW_LINE> <DEDENT> @interrupt.setter <NEW_LINE> def interrupt(self, new_value): <NEW_LINE> <INDENT> self._write("edge", new_value) <NEW_LINE> self._interrupt = new_value <NEW_LINE> <DEDENT> @property <NEW_LINE> def pull(self): <NEW_LINE> <INDENT> return self._pull <NEW_LINE> <DEDENT> def fileno(self): <NEW_LINE> <INDENT> return self._file.fileno() <NEW_LINE> <DEDENT> @property <NEW_LINE> def closed(self): <NEW_LINE> <INDENT> return self._file is None or self._file.closed <NEW_LINE> <DEDENT> def _check_open(self): <NEW_LINE> <INDENT> if self.closed: <NEW_LINE> <INDENT> raise IOError(str(self) + " is closed") <NEW_LINE> <DEDENT> <DEDENT> def _write(self, filename, value): <NEW_LINE> <INDENT> with open(self._pin_path(filename), "w+") as f: <NEW_LINE> <INDENT> f.write(value) <NEW_LINE> <DEDENT> <DEDENT> def _pin_path(self, filename=""): <NEW_LINE> <INDENT> return "/sys/devices/virtual/gpio/gpio%i/%s" % (self.soc_pin_number, filename) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__module__ + "." + str(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{type}({index})".format( type=self.__class__.__name__, index=self.index)
Controls a GPIO pin.
62599051462c4b4f79dbceb2
class TestExportTaskException(TestCase): <NEW_LINE> <INDENT> fixtures = ("osm_provider.json", "datamodel_presets.json") <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> group, created = Group.objects.get_or_create(name="TestDefaultExportExtentGroup") <NEW_LINE> with patch("eventkit_cloud.jobs.signals.Group") as mock_group: <NEW_LINE> <INDENT> mock_group.objects.get.return_value = group <NEW_LINE> self.user = User.objects.create_user( username="demo", email="[email protected]", password="demo", is_active=True ) <NEW_LINE> <DEDENT> self.export_provider = DataProvider.objects.get(slug="osm-generic") <NEW_LINE> bbox = Polygon.from_bbox((-10.85, 6.25, -10.62, 6.40)) <NEW_LINE> tags = DatamodelPreset.objects.get(name="hdm").json_tags <NEW_LINE> self.assertEqual(259, len(tags)) <NEW_LINE> the_geom = GEOSGeometry(bbox, srid=4326) <NEW_LINE> self.job = Job.objects.create( name="TestJob", description="Test description", user=self.user, the_geom=the_geom, json_tags=tags ) <NEW_LINE> self.run = ExportRun.objects.create(job=self.job, user=self.user) <NEW_LINE> <DEDENT> def test_clone(self): <NEW_LINE> <INDENT> run = ExportRun.objects.first() <NEW_LINE> task_uid = str(uuid.uuid4()) <NEW_LINE> data_provider_task_record = DataProviderTaskRecord.objects.create(run=run) <NEW_LINE> export_task_record = ExportTaskRecord.objects.create( export_provider_task=data_provider_task_record, uid=task_uid ) <NEW_LINE> export_task_exception = ExportTaskException.objects.create(task=export_task_record, exception="TestException") <NEW_LINE> old_export_task_exception = ExportTaskException.objects.get(id=export_task_exception.id) <NEW_LINE> new_export_task_exception = export_task_exception.clone() <NEW_LINE> self.assertNotEqual(old_export_task_exception, new_export_task_exception) <NEW_LINE> self.assertNotEqual(old_export_task_exception.id, new_export_task_exception.id) <NEW_LINE> self.assertEqual(old_export_task_exception.exception, new_export_task_exception.exception)
Test cases for ExportTaskException model
62599051a79ad1619776b515
@attr.s <NEW_LINE> class Sender(protocols.Filter[bytes, bytes]): <NEW_LINE> <INDENT> _logger = logging.getLogger(".".join((__name__, "Sender"))) <NEW_LINE> _buffer: channels.DequeChannel[bytes] = attr.ib( factory=channels.DequeChannel ) <NEW_LINE> def input(self, event: Optional[bytes]) -> None: <NEW_LINE> <INDENT> if event: <NEW_LINE> <INDENT> self._buffer.input(event) <NEW_LINE> <DEDENT> <DEDENT> def output(self) -> Optional[bytes]: <NEW_LINE> <INDENT> event = self._buffer.output() <NEW_LINE> if event is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> crc_element = CRCElement(payload=event) <NEW_LINE> message = crc_element.get_body() <NEW_LINE> return message
Generates the crc for incoming bytes data and attaches the generated crc key to the message and returns it back.
6259905176e4537e8c3f0a39
class PasswordField(TextField): <NEW_LINE> <INDENT> widget = widgets.PasswordInput()
A StringField, except renders an ``<input type="password">``. Also, whatever value is accepted by this field
6259905163b5f9789fe86620
class TestLookaside(Base): <NEW_LINE> <INDENT> expected_title = "git.lookaside.new" <NEW_LINE> expected_subti = 'jnovy uploaded pst-diffraction.doc.tar.xz for texlive' <NEW_LINE> expected_icon = "https://apps.fedoraproject.org/img/icons/git-logo.png" <NEW_LINE> expected_secondary_icon = "https://seccdn.libravatar.org/avatar/" + "e0e8e0c4d995109cdac8ae4eb5766a73cf09c7a8d2d8bac57f761e6223ca094b?s=64&" + "d=retro" <NEW_LINE> expected_link = 'http://pkgs.fedoraproject.org/lookaside/pkgs/' + 'texlive/pst-diffraction.doc.tar.xz/' + 'dacad985394b3977f9dcf0c75f51a357/' + 'pst-diffraction.doc.tar.xz' <NEW_LINE> expected_long_form = 'dacad985394b3977f9dcf0c75f51a357 pst-diffraction.doc.tar.xz' <NEW_LINE> expected_usernames = set(['jnovy']) <NEW_LINE> expected_packages = set(['texlive']) <NEW_LINE> expected_objects = set(['texlive/pst-diffraction.doc.tar.xz']) <NEW_LINE> msg = { "i": 1, "timestamp": 1349197866.215465, "topic": "org.fedoraproject.prod.git.lookaside.new", "msg": { "agent": "jnovy", "md5sum": "dacad985394b3977f9dcf0c75f51a357", "name": "texlive", "filename": "pst-diffraction.doc.tar.xz" } }
Messages like this one are published when **new sources** are uploaded to the "lookaside cache".
625990518e71fb1e983bcf78