code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class KerisWirelessWired(KerisWireless): <NEW_LINE> <INDENT> product_id = 0x195E | Keris Wireless in wired mode. | 625990832c8b7c6e89bd530a |
class MissingDataWarning(Warning): <NEW_LINE> <INDENT> pass | Warns when prerequisite data/files are not available. | 6259908363b5f9789fe86c8d |
class UserDefinedType(TypeEngine): <NEW_LINE> <INDENT> __visit_name__ = "user_defined" <NEW_LINE> def _adapt_expression(self, op, othertype): <NEW_LINE> <INDENT> return self.adapt_operator(op), self <NEW_LINE> <DEDENT> def adapt_operator(self, op): <NEW_LINE> <INDENT> return op | Base for user defined types.
This should be the base of new types. Note that
for most cases, :class:`TypeDecorator` is probably
more appropriate::
import sqlalchemy.types as types
class MyType(types.UserDefinedType):
def __init__(self, precision = 8):
self.precision = precision
def get_col_spec(self):
return "MYTYPE(%s)" % self.precision
def bind_processor(self, dialect):
def process(value):
return value
return process
def result_processor(self, dialect, coltype):
def process(value):
return value
return process
Once the type is made, it's immediately usable::
table = Table('foo', meta,
Column('id', Integer, primary_key=True),
Column('data', MyType(16))
) | 62599083796e427e5385029f |
class StackedInlineWithGeneric(BaseGenericModelAdmin, admin.StackedInline): <NEW_LINE> <INDENT> pass | "Normal stacked inline with a generic relation | 62599083283ffb24f3cf53c5 |
class MonitorData(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.StartTime = None <NEW_LINE> self.EndTime = None <NEW_LINE> self.Data = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.StartTime = params.get("StartTime") <NEW_LINE> self.EndTime = params.get("EndTime") <NEW_LINE> self.Data = params.get("Data") | 监控数据
| 62599083091ae35668706765 |
class HookMap(dict): <NEW_LINE> <INDENT> def __new__(cls, points=None): <NEW_LINE> <INDENT> d = dict.__new__(cls) <NEW_LINE> for p in points or []: <NEW_LINE> <INDENT> d[p] = [] <NEW_LINE> <DEDENT> return d <NEW_LINE> <DEDENT> def __init__(self, *a, **kw): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def attach(self, point, callback, failsafe=None, priority=None, **kwargs): <NEW_LINE> <INDENT> self[point].append(Hook(callback, failsafe, priority, **kwargs)) <NEW_LINE> <DEDENT> def run(self, point): <NEW_LINE> <INDENT> exc = None <NEW_LINE> hooks = self[point] <NEW_LINE> hooks.sort() <NEW_LINE> for hook in hooks: <NEW_LINE> <INDENT> if exc is None or hook.failsafe: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> hook() <NEW_LINE> <DEDENT> except (KeyboardInterrupt, SystemExit): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except (cherrypy.HTTPError, cherrypy.HTTPRedirect, cherrypy.InternalRedirect): <NEW_LINE> <INDENT> exc = sys.exc_info()[1] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> exc = sys.exc_info()[1] <NEW_LINE> cherrypy.log(traceback=True, severity=40) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if exc: <NEW_LINE> <INDENT> raise exc <NEW_LINE> <DEDENT> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> newmap = self.__class__() <NEW_LINE> for k, v in self.items(): <NEW_LINE> <INDENT> newmap[k] = v[:] <NEW_LINE> <DEDENT> return newmap <NEW_LINE> <DEDENT> copy = __copy__ <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> cls = self.__class__ <NEW_LINE> return '%s.%s(points=%r)' % ( cls.__module__, cls.__name__, list(self) ) | A map of call points to lists of callbacks (Hook objects). | 62599083099cdd3c6367618c |
class OrientationClusterer: <NEW_LINE> <INDENT> time_stamp_format = '%H:%M:%S %m/%d/%Y' <NEW_LINE> def __init__(self, gmm, cluster_on=[""]): <NEW_LINE> <INDENT> self.gmm = gmm <NEW_LINE> self.accelerometer_data = self.get_accelerometer_data() <NEW_LINE> self.predictions = None <NEW_LINE> self.data_array = self.to_numpy() <NEW_LINE> self.is_fitted = False <NEW_LINE> self.is_predicted = False <NEW_LINE> <DEDENT> @cache_dict("accelerometer_data.json") <NEW_LINE> def get_accelerometer_data(self): <NEW_LINE> <INDENT> all_data = sorted(LogEntry.select(), key=lambda row: datetime.strptime(row.timestamp, self.time_stamp_format)) <NEW_LINE> accelerometer_data = [{"timestamp": row.timestamp, "light": row.light_reading, "proximity": row.proximity_reading, "x": row.x_reading, "y": row.y_reading, "z": row.z_reading} for row in all_data] <NEW_LINE> return accelerometer_data <NEW_LINE> <DEDENT> def fit(self): <NEW_LINE> <INDENT> if self.is_fitted: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.gmm.fit(self.data_array) <NEW_LINE> self.is_fitted = True <NEW_LINE> <DEDENT> def to_numpy(self): <NEW_LINE> <INDENT> fields = [ "x", "y", "z", "proximity" ] <NEW_LINE> return np.array([[row[field] for field in fields] for row in self.accelerometer_data]) <NEW_LINE> <DEDENT> def predict(self, data=None): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> data = self.data_array <NEW_LINE> <DEDENT> self.predictions = self.gmm.predict(data) <NEW_LINE> return self.predictions <NEW_LINE> <DEDENT> def classify(self): <NEW_LINE> <INDENT> self.fit() <NEW_LINE> ids_to_names = {name: set() for name in REFERENCE_DATA} <NEW_LINE> for name, reference_points in REFERENCE_DATA.iteritems(): <NEW_LINE> <INDENT> for point in reference_points: <NEW_LINE> <INDENT> prediction = self.predict([point]) <NEW_LINE> ids_to_names[name] = ids_to_names[name].union(prediction) <NEW_LINE> <DEDENT> <DEDENT> return ids_to_names | Class for clustering accelerometer data and retrieving data | 62599083aad79263cf4302e0 |
class ContractReceiveRouteNew(StateChange): <NEW_LINE> <INDENT> def __init__( self, token_network_identifier: typing.TokenNetworkID, participant1: typing.Address, participant2: typing.Address, ): <NEW_LINE> <INDENT> if not isinstance(participant1, typing.T_Address): <NEW_LINE> <INDENT> raise ValueError('participant1 must be of type address') <NEW_LINE> <DEDENT> if not isinstance(participant2, typing.T_Address): <NEW_LINE> <INDENT> raise ValueError('participant2 must be of type address') <NEW_LINE> <DEDENT> self.token_network_identifier = token_network_identifier <NEW_LINE> self.participant1 = participant1 <NEW_LINE> self.participant2 = participant2 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<ContractReceiveRouteNew token_network:{} node1:{} node2:{}>'.format( pex(self.token_network_identifier), pex(self.participant1), pex(self.participant2), ) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return ( isinstance(other, ContractReceiveRouteNew) and self.token_network_identifier == other.token_network_identifier and self.participant1 == other.participant1 and self.participant2 == other.participant2 ) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) | New channel was created and this node is NOT a participant. | 62599083283ffb24f3cf53c6 |
class AnonPermissionOnly(permissions.BasePermission): <NEW_LINE> <INDENT> message = "You are already authenticated. Please log out to try again" <NEW_LINE> def has_permission(self, request, view): <NEW_LINE> <INDENT> return not request.user.is_authenticated() | Non authenticated User Only | 625990837b180e01f3e49df7 |
class DeleteDisk(DiskCommand): <NEW_LINE> <INDENT> positional_args = '<disk-name-1> ... <disk-name-n>' <NEW_LINE> safety_prompt = 'Delete disk' <NEW_LINE> def __init__(self, name, flag_values): <NEW_LINE> <INDENT> super(DeleteDisk, self).__init__(name, flag_values) <NEW_LINE> <DEDENT> def Handle(self, *disk_names): <NEW_LINE> <INDENT> self._AutoDetectZone() <NEW_LINE> requests = [] <NEW_LINE> for disk_name in disk_names: <NEW_LINE> <INDENT> requests.append(self.api.disks.delete( **self._PrepareRequestArgs(disk_name))) <NEW_LINE> <DEDENT> results, exceptions = self.ExecuteRequests(requests) <NEW_LINE> return (self.MakeListResult(results, 'operationList'), exceptions) | Delete one or more persistent disks.
Specify multiple disks as space-separated entries. If multiple disk names
are specified, the disks will be deleted in parallel. | 625990834527f215b58eb732 |
class Ship(object): <NEW_LINE> <INDENT> def __init__(self, ai_settings, screen): <NEW_LINE> <INDENT> self.screen = screen <NEW_LINE> self.ai_settings = ai_settings <NEW_LINE> self.image = pygame.image.load('images/ship.png') <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.screen_rect = screen.get_rect() <NEW_LINE> self.rect.centerx = self.screen_rect.centerx <NEW_LINE> self.rect.bottom = self.screen_rect.bottom <NEW_LINE> self.center = float(self.rect.centerx) <NEW_LINE> self.moving_right = False <NEW_LINE> self.moving_left = False <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.moving_right and self.rect.right < self.screen_rect.right: <NEW_LINE> <INDENT> self.center += self.ai_settings.ship_speed_factor <NEW_LINE> <DEDENT> if self.moving_left and self.rect.left > 0: <NEW_LINE> <INDENT> self.center -= self.ai_settings.ship_speed_factor <NEW_LINE> <DEDENT> self.rect.centerx = self.center <NEW_LINE> <DEDENT> def blitme(self): <NEW_LINE> <INDENT> self.screen.blit(self.image, self.rect) <NEW_LINE> <DEDENT> def center_ship(self): <NEW_LINE> <INDENT> self.center = self.screen_rect.centerx | docstring | 625990834428ac0f6e65a054 |
class SubmissionErrorRecastMiddleware(object): <NEW_LINE> <INDENT> def process_exception(self, request, exception): <NEW_LINE> <INDENT> if isinstance(exception, SubmissionError) and not hasattr(SubmissionError, "response_data"): <NEW_LINE> <INDENT> raise StructuredException(code="SUBMISSION_ERROR", message=exception.message) | When this middleware sees a SubmissionError,
it adds a response_data field to it.
We do this instead of "monkey-patching" a response_data
property into SubmissionError. | 62599083d8ef3951e32c8bf1 |
class DelSubscriptionEventSubscriber(EventSubscriber): <NEW_LINE> <INDENT> event_id = DEL_SUBSCRIPTION_EVENT_ID | Event Notification Subscriber for Subscription Modifications.
The "origin" parameter in this class' initializer should be the dispatcher resource id (UUID). | 6259908392d797404e3898ef |
class ActiveBannerManager(models.Manager): <NEW_LINE> <INDENT> def get_query_set(self, *args, **kwargs): <NEW_LINE> <INDENT> now = datetime.now() <NEW_LINE> return super(ActiveBannerManager, self). get_query_set(*args, **kwargs).filter(is_active=True) .filter(Q(end_date__gte=now) | Q(end_date__isnull=True)) .filter(Q(start_date__lte=now) | Q(start_date__isnull=True)) | Queryset com o banner ativo (active=True) e datas de início e final,
válidas (de acordo com o número de dias definido pela assinatura). | 625990837cff6e4e811b7567 |
class FileParentIsNotInRevisionAncestryScenario(BrokenRepoScenario): <NEW_LINE> <INDENT> def all_versions_after_reconcile(self): <NEW_LINE> <INDENT> return (b'rev1a', b'rev2') <NEW_LINE> <DEDENT> def populated_parents(self): <NEW_LINE> <INDENT> return ( ((), b'rev1a'), ((), b'rev1b'), ((b'rev1a', b'rev1b'), b'rev2')) <NEW_LINE> <DEDENT> def corrected_parents(self): <NEW_LINE> <INDENT> return ( ((), b'rev1a'), (None, b'rev1b'), ((b'rev1a',), b'rev2')) <NEW_LINE> <DEDENT> def check_regexes(self, repo): <NEW_LINE> <INDENT> return [r"\* a-file-id version rev2 has parents \(rev1a, rev1b\) " r"but should have \(rev1a\)", "1 unreferenced text versions", ] <NEW_LINE> <DEDENT> def populate_repository(self, repo): <NEW_LINE> <INDENT> inv = self.make_one_file_inventory( repo, b'rev1a', [], root_revision=b'rev1a') <NEW_LINE> self.add_revision(repo, b'rev1a', inv, []) <NEW_LINE> inv = self.make_one_file_inventory( repo, b'rev1b', [], root_revision=b'rev1b') <NEW_LINE> repo.add_inventory(b'rev1b', inv, []) <NEW_LINE> inv = self.make_one_file_inventory( repo, b'rev2', [b'rev1a', b'rev1b']) <NEW_LINE> self.add_revision(repo, b'rev2', inv, [b'rev1a']) <NEW_LINE> self.versioned_root = repo.supports_rich_root() <NEW_LINE> <DEDENT> def repository_text_key_references(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> if self.versioned_root: <NEW_LINE> <INDENT> result.update({(b'TREE_ROOT', b'rev1a'): True, (b'TREE_ROOT', b'rev2'): True}) <NEW_LINE> <DEDENT> result.update({(b'a-file-id', b'rev1a'): True, (b'a-file-id', b'rev2'): True}) <NEW_LINE> return result <NEW_LINE> <DEDENT> def repository_text_keys(self): <NEW_LINE> <INDENT> return {(b'a-file-id', b'rev1a'): [NULL_REVISION], (b'a-file-id', b'rev2'): [(b'a-file-id', b'rev1a')]} <NEW_LINE> <DEDENT> def versioned_repository_text_keys(self): <NEW_LINE> <INDENT> return {(b'TREE_ROOT', b'rev1a'): [NULL_REVISION], (b'TREE_ROOT', b'rev2'): [(b'TREE_ROOT', b'rev1a')]} | A scenario where a revision 'rev2' has 'a-file' with a
parent 'rev1b' that is not in the revision ancestry.
Reconcile should remove 'rev1b' from the parents list of 'a-file' in
'rev2', preserving 'rev1a' as a parent. | 625990837d847024c075df04 |
class HideReferers(Transform): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.regexp = re.compile(r'href="(http[^"]+)"') <NEW_LINE> <DEDENT> def replace(self, match): <NEW_LINE> <INDENT> url = match.group(1) <NEW_LINE> scheme, host, path, parameters, query, fragment = urlparse.urlparse(url) <NEW_LINE> url = 'http://www.google.com/url?sa=D&q=' + urllib.quote(url) <NEW_LINE> return 'href="%s"' % (url,) | A transform that hides referers for external hyperlinks. | 6259908363b5f9789fe86c8f |
class RemoveLabelTransform(AbstractTransform): <NEW_LINE> <INDENT> def __init__(self, remove_label, replace_with=0, input_key="seg", output_key="seg"): <NEW_LINE> <INDENT> self.output_key = output_key <NEW_LINE> self.input_key = input_key <NEW_LINE> self.replace_with = replace_with <NEW_LINE> self.remove_label = remove_label <NEW_LINE> <DEDENT> def __call__(self, **data_dict): <NEW_LINE> <INDENT> seg = data_dict[self.input_key] <NEW_LINE> seg[seg == self.remove_label] = self.replace_with <NEW_LINE> data_dict[self.output_key] = seg <NEW_LINE> return data_dict | Replaces all pixels in data_dict[input_key] that have value remove_label with replace_with and saves the result to
data_dict[output_key] | 62599083283ffb24f3cf53c7 |
class TestStripping(InvenioTestCase): <NEW_LINE> <INDENT> if UNIDECODE_AVAILABLE: <NEW_LINE> <INDENT> def test_text_to_ascii(self): <NEW_LINE> <INDENT> self.assertEqual(translate_to_ascii( ["á í Ú", "H\xc3\xb6hne", "Åge Øst Vær", "normal"]), ["a i U", "Hohne", "Age Ost Vaer", "normal"] ) <NEW_LINE> self.assertEqual(translate_to_ascii("àèéìòù"), ["aeeiou"]) <NEW_LINE> self.assertEqual(translate_to_ascii(None), None) <NEW_LINE> self.assertEqual(translate_to_ascii([]), []) <NEW_LINE> self.assertEqual(translate_to_ascii([None]), [None]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_strip_accents(self): <NEW_LINE> <INDENT> self.assertEqual("memememe", strip_accents('mémêmëmè')) <NEW_LINE> self.assertEqual("MEMEMEME", strip_accents('MÉMÊMËMÈ')) <NEW_LINE> self.assertEqual("oe", strip_accents('œ')) <NEW_LINE> self.assertEqual("OE", strip_accents('Œ')) | Test for stripping functions like accents and control characters. | 62599083aad79263cf4302e1 |
class Method(_TwincatProjectSubItem, _POUMember): <NEW_LINE> <INDENT> Implementation: list <NEW_LINE> Declaration: list <NEW_LINE> @property <NEW_LINE> def source_code(self): <NEW_LINE> <INDENT> return "\n".join( ( self.declaration, self.implementation, "END_METHOD", ) ) | [TcPOU] Code declaration for function block methods. | 62599083aad79263cf4302e2 |
class Solution: <NEW_LINE> <INDENT> def ladderLength(self, beginWord: str, endWord: str, wordList: List[str]) -> int: <NEW_LINE> <INDENT> self.len_word = len(beginWord) <NEW_LINE> self.deq = deque() <NEW_LINE> self.history = set() <NEW_LINE> self.wordList = wordList <NEW_LINE> self.add2deq(beginWord, 1) <NEW_LINE> while len(self.deq) > 0: <NEW_LINE> <INDENT> word, cnt = self.deq.popleft() <NEW_LINE> if word == endWord: <NEW_LINE> <INDENT> return cnt <NEW_LINE> <DEDENT> self.add2deq(word, cnt) <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> def compare(self, word_1, word_2): <NEW_LINE> <INDENT> cnt = 0 <NEW_LINE> for i in range(self.len_word): <NEW_LINE> <INDENT> if word_1[i] != word_2[i]: <NEW_LINE> <INDENT> cnt += 1 <NEW_LINE> if cnt > 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True if cnt == 1 else False <NEW_LINE> <DEDENT> def add2deq(self, word, cnt): <NEW_LINE> <INDENT> for w in self.wordList: <NEW_LINE> <INDENT> if w not in self.history and self.compare(word, w): <NEW_LINE> <INDENT> self.deq.append((w, cnt + 1)) <NEW_LINE> self.history.add(w) | bfs | 62599083283ffb24f3cf53c8 |
class Store(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.data = {} <NEW_LINE> self.document = {} <NEW_LINE> <DEDENT> def configure(self, document): <NEW_LINE> <INDENT> self.document = document <NEW_LINE> <DEDENT> def count_matrixes(self): <NEW_LINE> <INDENT> return len(self.data) <NEW_LINE> <DEDENT> def count_stages(self, matrix_name): <NEW_LINE> <INDENT> return len(self.data[matrix_name]) if matrix_name in self.data else 0 <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.data = {} <NEW_LINE> <DEDENT> def get_stage(self, matrix_name, stage_name): <NEW_LINE> <INDENT> found_stage = None <NEW_LINE> if matrix_name in self.data: <NEW_LINE> <INDENT> result = Select(self.data[matrix_name]).where( lambda entry: entry.stage == stage_name).build() <NEW_LINE> found_stage = result[0] if len(result) > 0 else None <NEW_LINE> <DEDENT> return found_stage <NEW_LINE> <DEDENT> def get_duration(self, matrix_name): <NEW_LINE> <INDENT> duration = 0.0 <NEW_LINE> if matrix_name in self.data: <NEW_LINE> <INDENT> duration = sum([stage.duration() for stage in self.data[matrix_name]]) <NEW_LINE> <DEDENT> return duration <NEW_LINE> <DEDENT> def update(self, item): <NEW_LINE> <INDENT> if item.matrix not in self.data: <NEW_LINE> <INDENT> self.data[item.matrix] = [] <NEW_LINE> <DEDENT> result = Select(self.data[item.matrix]).where( lambda entry: entry.stage == item.stage).build() <NEW_LINE> if len(result) > 0: <NEW_LINE> <INDENT> stage = result[0] <NEW_LINE> stage.status = item.status <NEW_LINE> stage.add(item.timestamp, item.information) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stage = CollectorStage(stage=item.stage, status=item.status) <NEW_LINE> stage.add(item.timestamp, item.information) <NEW_LINE> self.data[item.matrix].append(stage) | Central collection of pipeline process data.
Attributes:
data (dict): the key is the matrix name and each value represents a list of stages. | 6259908355399d3f0562803c |
class BaseSinkAdapter(BaseFalcon): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> engines = { 'key': 'url' } <NEW_LINE> @abstractmethod <NEW_LINE> def __call__(self, req, resp, **kwargs): <NEW_LINE> <INDENT> pass | think this:
I want to get 3 results, but i only want to request 1 urls from client.
Sink Adapter can help us, translate 1 url -> 3 url by engines (dict), and
request them, get result and return. | 6259908350812a4eaa621958 |
class TestThreadLeakDetection(tests.TestCase): <NEW_LINE> <INDENT> class LeakRecordingResult(tests.ExtendedTestResult): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> tests.ExtendedTestResult.__init__(self, StringIO(), 0, 1) <NEW_LINE> self.leaks = [] <NEW_LINE> <DEDENT> def _report_thread_leak(self, test, leaks, alive): <NEW_LINE> <INDENT> self.leaks.append((test, leaks)) <NEW_LINE> <DEDENT> <DEDENT> def test_testcase_without_addCleanups(self): <NEW_LINE> <INDENT> class Test(unittest.TestCase): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> result = self.LeakRecordingResult() <NEW_LINE> test = Test() <NEW_LINE> result.startTestRun() <NEW_LINE> test.run(result) <NEW_LINE> result.stopTestRun() <NEW_LINE> self.assertEqual(result._tests_leaking_threads_count, 0) <NEW_LINE> self.assertEqual(result.leaks, []) <NEW_LINE> <DEDENT> def test_thread_leak(self): <NEW_LINE> <INDENT> event = threading.Event() <NEW_LINE> thread = threading.Thread(name="Leaker", target=event.wait) <NEW_LINE> class Test(tests.TestCase): <NEW_LINE> <INDENT> def test_leak(self): <NEW_LINE> <INDENT> thread.start() <NEW_LINE> <DEDENT> <DEDENT> result = self.LeakRecordingResult() <NEW_LINE> test = Test("test_leak") <NEW_LINE> self.addCleanup(thread.join) <NEW_LINE> self.addCleanup(event.set) <NEW_LINE> result.startTestRun() <NEW_LINE> test.run(result) <NEW_LINE> result.stopTestRun() <NEW_LINE> self.assertEqual(result._tests_leaking_threads_count, 1) <NEW_LINE> self.assertEqual(result._first_thread_leaker_id, test.id()) <NEW_LINE> self.assertEqual(result.leaks, [(test, set([thread]))]) <NEW_LINE> self.assertContainsString(result.stream.getvalue(), "leaking threads") <NEW_LINE> <DEDENT> def test_multiple_leaks(self): <NEW_LINE> <INDENT> event = threading.Event() <NEW_LINE> thread_a = threading.Thread(name="LeakerA", target=event.wait) <NEW_LINE> thread_b = threading.Thread(name="LeakerB", target=event.wait) <NEW_LINE> thread_c = threading.Thread(name="LeakerC", target=event.wait) <NEW_LINE> class Test(tests.TestCase): <NEW_LINE> <INDENT> def test_first_leak(self): <NEW_LINE> <INDENT> thread_b.start() <NEW_LINE> <DEDENT> def test_second_no_leak(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_third_leak(self): <NEW_LINE> <INDENT> thread_c.start() <NEW_LINE> thread_a.start() <NEW_LINE> <DEDENT> <DEDENT> result = self.LeakRecordingResult() <NEW_LINE> first_test = Test("test_first_leak") <NEW_LINE> third_test = Test("test_third_leak") <NEW_LINE> self.addCleanup(thread_a.join) <NEW_LINE> self.addCleanup(thread_b.join) <NEW_LINE> self.addCleanup(thread_c.join) <NEW_LINE> self.addCleanup(event.set) <NEW_LINE> result.startTestRun() <NEW_LINE> unittest.TestSuite( [first_test, Test("test_second_no_leak"), third_test] ).run(result) <NEW_LINE> result.stopTestRun() <NEW_LINE> self.assertEqual(result._tests_leaking_threads_count, 2) <NEW_LINE> self.assertEqual(result._first_thread_leaker_id, first_test.id()) <NEW_LINE> self.assertEqual(result.leaks, [ (first_test, set([thread_b])), (third_test, set([thread_a, thread_c]))]) <NEW_LINE> self.assertContainsString(result.stream.getvalue(), "leaking threads") | Ensure when tests leak threads we detect and report it | 625990837c178a314d78e97e |
class EventLinks(flourish.page.RefineLinksViewlet): <NEW_LINE> <INDENT> pass | Manager for Action links in event views. | 6259908376e4537e8c3f10a7 |
class UsefulnessFactory(factory.django.DjangoModelFactory): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Usefulness <NEW_LINE> <DEDENT> name = factory.Sequence(lambda n: 'usefulness%d' % n) <NEW_LINE> notes = factory.Faker('paragraph', nb_sentences=1) | Usefulness factory | 62599084283ffb24f3cf53c9 |
class WorkflowTriggerHistoryListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[WorkflowTriggerHistory]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(WorkflowTriggerHistoryListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None) | The list of workflow trigger histories.
:param value: A list of workflow trigger histories.
:type value: list[~azure.mgmt.logic.models.WorkflowTriggerHistory]
:param next_link: The URL to get the next set of results.
:type next_link: str | 625990845fcc89381b266ef1 |
class MeterStatDict(UserDict): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> UserDict.__init__(self) <NEW_LINE> self._m1 = EWMA.oneMinute() <NEW_LINE> self._m5 = EWMA.fiveMinute() <NEW_LINE> self._m15 = EWMA.fifteenMinute() <NEW_LINE> self._meters = (self._m1, self._m5, self._m15) <NEW_LINE> TICKERS.append(self.tick) <NEW_LINE> self['unit'] = 'per second' <NEW_LINE> self['count'] = 0 <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> if item in self: <NEW_LINE> <INDENT> return UserDict.__getitem__(self, item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> <DEDENT> def tick(self): <NEW_LINE> <INDENT> for m in self._meters: <NEW_LINE> <INDENT> m.tick() <NEW_LINE> <DEDENT> self['m1'] = self._m1.rate <NEW_LINE> self['m5'] = self._m5.rate <NEW_LINE> self['m15'] = self._m15.rate <NEW_LINE> <DEDENT> def mark(self, value=1): <NEW_LINE> <INDENT> self['count'] += value <NEW_LINE> for m in self._meters: <NEW_LINE> <INDENT> m.update(value) | Stores the meters for MeterStat. Expects to be ticked every 5 seconds. | 62599084283ffb24f3cf53ca |
class JobOffer(models.Model): <NEW_LINE> <INDENT> position = models.CharField(max_length=160) <NEW_LINE> description = MarkdownxField(blank=True, null=True) <NEW_LINE> slug = AutoSlugField(populate_from='position', unique=True) <NEW_LINE> entity = models.CharField(max_length=100) <NEW_LINE> location = models.CharField(max_length=100) <NEW_LINE> published = models.BooleanField(default=True) <NEW_LINE> date_created = models.DateTimeField(auto_now_add=True) <NEW_LINE> gps_lat = models.DecimalField(null=True, blank=True, max_digits=9, decimal_places=6) <NEW_LINE> gps_lon = models.DecimalField(null=True, blank=True, max_digits=9, decimal_places=6) <NEW_LINE> files = GenericRelation(FileItem) <NEW_LINE> entity.help_text = 'Institution (univ., agency or company) that offers the job' <NEW_LINE> published.help_text = 'Should the event be displayed publicly?' <NEW_LINE> gps_lat.help_text = 'GPS Latitute (for mapping purposes)' <NEW_LINE> gps_lon.help_text = 'GPS Longitude (for mapping purposes)' <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{self.position} at {self.entity} in {self.location}' | Job offer item
position -
entity - entity offering the position
location - (blank) where the job is offered
published - boolean for the job to be listed on the website
date_created - (auto) date of inserting in the DB
gps_lat - GPS coordinates (for mapping purposes)
gps_lon - GPS coordinates (for mapping purposes)
files - List of attached FileItem | 62599084ad47b63b2c5a937b |
class Lps141(base): <NEW_LINE> <INDENT> __tablename__ = 'lps141' <NEW_LINE> year = Column(CHAR,primary_key=True) <NEW_LINE> mq = Column(VARCHAR,primary_key=True) <NEW_LINE> kisc_code = Column(VARCHAR,primary_key=True) <NEW_LINE> ca_gva_index = Column(DOUBLE) <NEW_LINE> def __init__(self, year, mq, kisc_code, ca_gva_index): <NEW_LINE> <INDENT> self.year = year <NEW_LINE> self.mq = mq <NEW_LINE> self.kisc_code = kisc_code <NEW_LINE> self.ca_gva_index = ca_gva_index | Value Add Index | 625990845fdd1c0f98e5faa9 |
class Test01_ReconnectLDAPObject(Test00_SimpleLDAPObject): <NEW_LINE> <INDENT> ldap_object_class = ReconnectLDAPObject <NEW_LINE> @requires_sasl() <NEW_LINE> @requires_ldapi() <NEW_LINE> def test101_reconnect_sasl_external(self): <NEW_LINE> <INDENT> l = self.ldap_object_class(self.server.ldapi_uri) <NEW_LINE> l.sasl_external_bind_s() <NEW_LINE> authz_id = l.whoami_s() <NEW_LINE> self.assertEqual(authz_id, 'dn:'+self.server.root_dn.lower()) <NEW_LINE> self.server.restart() <NEW_LINE> self.assertEqual(l.whoami_s(), authz_id) <NEW_LINE> <DEDENT> def test102_reconnect_simple_bind(self): <NEW_LINE> <INDENT> l = self.ldap_object_class(self.server.ldap_uri) <NEW_LINE> bind_dn = 'cn=user1,'+self.server.suffix <NEW_LINE> l.simple_bind_s(bind_dn, 'user1_pw') <NEW_LINE> self.assertEqual(l.whoami_s(), 'dn:'+bind_dn) <NEW_LINE> self.server.restart() <NEW_LINE> self.assertEqual(l.whoami_s(), 'dn:'+bind_dn) <NEW_LINE> <DEDENT> def test103_reconnect_get_state(self): <NEW_LINE> <INDENT> l1 = self.ldap_object_class(self.server.ldap_uri) <NEW_LINE> bind_dn = 'cn=user1,'+self.server.suffix <NEW_LINE> l1.simple_bind_s(bind_dn, 'user1_pw') <NEW_LINE> self.assertEqual(l1.whoami_s(), 'dn:'+bind_dn) <NEW_LINE> self.assertEqual( l1.__getstate__(), { str('_last_bind'): ( 'simple_bind_s', (bind_dn, 'user1_pw'), {} ), str('_options'): [(17, 3)], str('_reconnects_done'): 0, str('_retry_delay'): 60.0, str('_retry_max'): 1, str('_start_tls'): 0, str('_trace_level'): 0, str('_trace_stack_limit'): 5, str('_uri'): self.server.ldap_uri, str('bytes_mode'): l1.bytes_mode, str('bytes_mode_hardfail'): l1.bytes_mode_hardfail, str('timeout'): -1, }, ) <NEW_LINE> <DEDENT> def test104_reconnect_restore(self): <NEW_LINE> <INDENT> l1 = self.ldap_object_class(self.server.ldap_uri) <NEW_LINE> bind_dn = 'cn=user1,'+self.server.suffix <NEW_LINE> l1.simple_bind_s(bind_dn, 'user1_pw') <NEW_LINE> self.assertEqual(l1.whoami_s(), 'dn:'+bind_dn) <NEW_LINE> l1_state = pickle.dumps(l1) <NEW_LINE> del l1 <NEW_LINE> l2 = pickle.loads(l1_state) <NEW_LINE> self.assertEqual(l2.whoami_s(), 'dn:'+bind_dn) | test ReconnectLDAPObject by restarting slapd | 62599084d8ef3951e32c8bf3 |
class PretrainedModelLoader: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._valid_models = [n for n, _ in inspect.getmembers(torchvision.models, inspect.isfunction)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def model_names(self): <NEW_LINE> <INDENT> return self._valid_models <NEW_LINE> <DEDENT> def load(self, model_name): <NEW_LINE> <INDENT> model_func = getattr(torchvision.models, model_name, None) <NEW_LINE> assert callable( model_func) == True, "The function torchvision.models.{} must be a callable. The valid list of callables are {}".format( model_name, self._valid_models) <NEW_LINE> model = model_func(pretrained=True) <NEW_LINE> return model <NEW_LINE> <DEDENT> def save(self, model, model_dir): <NEW_LINE> <INDENT> model_name_path = os.path.join(model_dir, "model.pt") <NEW_LINE> torch.save(model, model_name_path) <NEW_LINE> return model_name_path <NEW_LINE> <DEDENT> def __call__(self, model_name, model_dir): <NEW_LINE> <INDENT> return self.save(self.load(model_name), model_dir) | Loads a pretrained model | 6259908492d797404e3898f1 |
class InstallCommand(InstallCommandBase): <NEW_LINE> <INDENT> def finalize_options(self): <NEW_LINE> <INDENT> ret = InstallCommandBase.finalize_options(self) <NEW_LINE> self.install_headers = os.path.join(self.install_purelib, 'tensorflow_core', 'include') <NEW_LINE> self.install_lib = self.install_platlib <NEW_LINE> return ret | Override the dir where the headers go. | 625990847cff6e4e811b756b |
class GoodsSPU(BaseModel): <NEW_LINE> <INDENT> name = models.CharField(max_length=20, verbose_name='商品SPU名称') <NEW_LINE> detail = HTMLField(verbose_name='商品详情', blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'goods_spu' <NEW_LINE> verbose_name = '商品SPU' <NEW_LINE> verbose_name_plural = verbose_name | 商品SPU模型类 | 6259908499fddb7c1ca63b6f |
class Iris: <NEW_LINE> <INDENT> flowers = [] <NEW_LINE> labels = [] <NEW_LINE> threshold = 0 <NEW_LINE> def __init__(self, values, label): <NEW_LINE> <INDENT> self.values = values <NEW_LINE> self.label = label | Class to store a kind of Iris and it's values | 62599084e1aae11d1e7cf5a8 |
class TestExample(TestCase): <NEW_LINE> <INDENT> def test_add_positive(self): <NEW_LINE> <INDENT> self.assertEqual(5, add(2, 3)) | Tests of example module | 625990844a966d76dd5f0a10 |
class Subscription: <NEW_LINE> <INDENT> __slots__ = ('id', 'topic', 'active', 'session', 'handler') <NEW_LINE> def __init__(self, subscription_id, topic, session, handler): <NEW_LINE> <INDENT> self.id = subscription_id <NEW_LINE> self.topic = topic <NEW_LINE> self.active = True <NEW_LINE> self.session = session <NEW_LINE> self.handler = handler <NEW_LINE> <DEDENT> def unsubscribe(self): <NEW_LINE> <INDENT> if self.active: <NEW_LINE> <INDENT> return self.session._unsubscribe(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("subscription no longer active") <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Subscription(id={0}, is_active={1})".format(self.id, self.active) | Object representing a handler subscription. | 625990845fc7496912d49000 |
class SetWebProtectSwitchRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(SetWebProtectSwitchRequest, self).__init__( '/domain/{domain}/wafWebProtectSwitch', 'POST', header, version) <NEW_LINE> self.parameters = parameters | 设置web防护开关 | 625990845fdd1c0f98e5faaa |
class AlphaBetaPlayer(IsolationPlayer): <NEW_LINE> <INDENT> def get_move(self, game, time_left): <NEW_LINE> <INDENT> self.time_left = time_left <NEW_LINE> best_move = (-1, -1) <NEW_LINE> try: <NEW_LINE> <INDENT> depth = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> depth += 1 <NEW_LINE> best_move = self.alphabeta(game, depth) <NEW_LINE> <DEDENT> <DEDENT> except SearchTimeout: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return best_move <NEW_LINE> <DEDENT> def alphabeta(self, game, depth, alpha=float("-inf"), beta=float("inf")): <NEW_LINE> <INDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> _, move = self.alpha_beta_max_val(game, depth, alpha, beta) <NEW_LINE> return move <NEW_LINE> <DEDENT> def alpha_beta_max_val(self, game, depth, alpha, beta): <NEW_LINE> <INDENT> stop_search, legal_moves, score = self.check_move(game, depth, max_level = True) <NEW_LINE> if stop_search: <NEW_LINE> <INDENT> return score, legal_moves <NEW_LINE> <DEDENT> best_move = (-1, -1) <NEW_LINE> best_value = self.MIN_VALUE <NEW_LINE> for move in legal_moves: <NEW_LINE> <INDENT> value, _ = self.alpha_beta_min_val(game.forecast_move(move), depth - 1, alpha, beta) <NEW_LINE> if value > best_value: <NEW_LINE> <INDENT> best_value, best_move = value, move <NEW_LINE> <DEDENT> if best_value >= beta: <NEW_LINE> <INDENT> return best_value, best_move <NEW_LINE> <DEDENT> alpha = max(alpha, best_value) <NEW_LINE> <DEDENT> return best_value, best_move <NEW_LINE> <DEDENT> def alpha_beta_min_val(self, game, depth, alpha, beta): <NEW_LINE> <INDENT> stop_search, legal_moves, score = self.check_move(game, depth, max_level = False) <NEW_LINE> if stop_search: <NEW_LINE> <INDENT> return score, legal_moves <NEW_LINE> <DEDENT> best_move = (-1, -1) <NEW_LINE> best_value = self.MAX_VALUE <NEW_LINE> for move in legal_moves: <NEW_LINE> <INDENT> value, val_move = self.alpha_beta_max_val(game.forecast_move(move), depth - 1, alpha, beta) <NEW_LINE> if value < best_value: <NEW_LINE> <INDENT> best_value, best_move = value, move <NEW_LINE> <DEDENT> if best_value <= alpha: <NEW_LINE> <INDENT> return best_value, best_move <NEW_LINE> <DEDENT> beta = min(beta, best_value) <NEW_LINE> <DEDENT> return best_value, best_move | Game-playing agent that chooses a move using iterative deepening minimax
search with alpha-beta pruning. You must finish and test this player to
make sure it returns a good move before the search time limit expires. | 625990844527f215b58eb735 |
class S3Storage(BaseFileStorage): <NEW_LINE> <INDENT> def upload(self, filename, content, app): <NEW_LINE> <INDENT> new_file = CloudFile(name=filename, app=app) <NEW_LINE> conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) <NEW_LINE> key = new_file.get_upload_loc(filename) <NEW_LINE> if default_storage.exists(key): <NEW_LINE> <INDENT> raise FileExists() <NEW_LINE> <DEDENT> new_file.size = content.size <NEW_LINE> new_file.content.save(filename, content) <NEW_LINE> url = conn.generate_url( settings.AWS_URL_EXPIRY, "GET", bucket=settings.AWS_STORAGE_BUCKET_NAME, key=key) <NEW_LINE> new_file.url = url <NEW_LINE> new_file.save() <NEW_LINE> return new_file <NEW_LINE> <DEDENT> def delete(self, filename, app): <NEW_LINE> <INDENT> conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) <NEW_LINE> try: <NEW_LINE> <INDENT> fileobj = CloudFile.objects.get(name=filename, app=app) <NEW_LINE> <DEDENT> except CloudFile.DoesNotExist: <NEW_LINE> <INDENT> raise FileNotFound() <NEW_LINE> <DEDENT> fileobj.content.delete() <NEW_LINE> fileobj.delete() | Amazon S3 storage for app file uploads | 62599084bf627c535bcb2ffe |
class TestUserInfo(dict, Enum): <NEW_LINE> <INDENT> user1 = { 'username': 'foo', 'firstname': 'bar', 'lastname': 'User', 'keycloak_guid': uuid.uuid4() } | Test scenarios of user. | 625990845fdd1c0f98e5faab |
class Scene: <NEW_LINE> <INDENT> def __init__(self, store, painter, selection_store): <NEW_LINE> <INDENT> self.store = store <NEW_LINE> self.painter = painter <NEW_LINE> self.selection_store = selection_store <NEW_LINE> <DEDENT> def repaint(self): <NEW_LINE> <INDENT> self.painter.clear() <NEW_LINE> for object in self.store.list: <NEW_LINE> <INDENT> object.draw(self.painter) <NEW_LINE> <DEDENT> for select in self.selection_store.list: <NEW_LINE> <INDENT> select.draw(self.painter) | Класс сцены | 6259908497e22403b383ca29 |
class FactSheetHasPredecessor: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'ID': 'str', 'factSheetID': 'str', 'factSheetRefID': 'str', 'description': 'str', 'dependencyTypeID': 'str' } <NEW_LINE> self.ID = None <NEW_LINE> self.factSheetID = None <NEW_LINE> self.factSheetRefID = None <NEW_LINE> self.description = None <NEW_LINE> self.dependencyTypeID = None | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599084656771135c48adc6 |
class MessageFailedException(TransportException): <NEW_LINE> <INDENT> pass | The transport has failed to deliver the message due to a problem with
the message itself, and no attempt should be made to retry delivery of
this message. The transport may still be re-used, however.
The reason for the failure should be the first argument. | 6259908444b2445a339b76f2 |
class CyberSourceTransaction(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User, on_delete=models.CASCADE) <NEW_LINE> course = models.ForeignKey(Course, on_delete=models.CASCADE) <NEW_LINE> uuid = models.CharField(max_length=32) <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> return_from_cybersource = models.DateTimeField(null=True, blank=True) | Stores credit card transaction receipts made with CyberSource. | 62599084adb09d7d5dc0c086 |
class Meta: <NEW_LINE> <INDENT> app_label = "game" <NEW_LINE> verbose_name = "User Profile" | Metadata class for Player | 625990847c178a314d78e980 |
@inside_glslc_testsuite('Include') <NEW_LINE> class TestWrongPoundVersionInIncludingFile(expect.ValidObjectFileWithWarning): <NEW_LINE> <INDENT> environment = Directory('.', [ File('a.vert', '#version 100000000\n#include "b.glsl"\n'), File('b.glsl', 'void main() {}\n')]) <NEW_LINE> glslc_args = ['-c', 'a.vert'] <NEW_LINE> expected_warning = [ 'a.vert: warning: version 100000000 is unknown.\n', '1 warning generated.\n' ] | Tests that warning message for #version directive in the including file
has the correct filename. | 62599084283ffb24f3cf53cd |
class CambiarDireccionForm(forms.Form): <NEW_LINE> <INDENT> direccion = forms.ModelChoiceField(Direccion.objects.all()) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> usuario = kwargs.pop('usuario') <NEW_LINE> direccion_previa = kwargs.pop('direccion_previa') <NEW_LINE> super(CambiarDireccionForm, self).__init__(*args, **kwargs) <NEW_LINE> if direccion_previa is not None: <NEW_LINE> <INDENT> self.fields['direccion']._set_queryset(usuario.direccion.all().exclude(pk=direccion_previa.pk)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fields['direccion']._set_queryset(usuario.direccion.all()) | CambiarDireccion form class. | 62599084e1aae11d1e7cf5a9 |
class TestLti1p3LaunchGateEndpoint(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.location = 'block-v1:course+test+2020+type@problem+block@test' <NEW_LINE> self.url = '/lti_consumer/v1/launch/' <NEW_LINE> self.request = {'login_hint': self.location} <NEW_LINE> xblock_handler_patcher = patch( 'lti_consumer.plugin.views.compat.run_xblock_handler', return_value=HttpResponse() ) <NEW_LINE> self.addCleanup(xblock_handler_patcher.stop) <NEW_LINE> self._mock_xblock_handler = xblock_handler_patcher.start() <NEW_LINE> <DEDENT> def test_launch_gate(self): <NEW_LINE> <INDENT> response = self.client.get(self.url, self.request) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self._mock_xblock_handler.assert_called_once() <NEW_LINE> kwargs = self._mock_xblock_handler.call_args.kwargs <NEW_LINE> self.assertEqual(kwargs['usage_id'], self.location) <NEW_LINE> self.assertEqual(kwargs['handler'], 'lti_1p3_launch_callback') <NEW_LINE> <DEDENT> def test_invalid_usage_key(self): <NEW_LINE> <INDENT> self._mock_xblock_handler.side_effect = Exception() <NEW_LINE> response = self.client.get(self.url, self.request) <NEW_LINE> self.assertEqual(response.status_code, 404) | Test `launch_gate_endpoint` method. | 625990843617ad0b5ee07c7d |
class DescribeBanRegionsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RegionSet = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("RegionSet") is not None: <NEW_LINE> <INDENT> self.RegionSet = [] <NEW_LINE> for item in params.get("RegionSet"): <NEW_LINE> <INDENT> obj = RegionSet() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.RegionSet.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId") | DescribeBanRegions返回参数结构体
| 62599084ad47b63b2c5a937f |
class CrcPURCHASE(db.Model): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __tablename__ = 'crcPURCHASE' <NEW_LINE> id = db.Column(db.Integer(15, unsigned=True), nullable=False, primary_key=True, autoincrement=True) <NEW_LINE> id_bibrec = db.Column(db.MediumInteger(8, unsigned=True), db.ForeignKey(Bibrec.id), nullable=False, server_default='0') <NEW_LINE> id_crcVENDOR = db.Column(db.Integer(15, unsigned=True), db.ForeignKey(CrcVENDOR.id), nullable=False, server_default='0') <NEW_LINE> ordered_date = db.Column(db.DateTime, nullable=False, server_default='1900-01-01 00:00:00') <NEW_LINE> expected_date = db.Column(db.DateTime, nullable=False, server_default='1900-01-01 00:00:00') <NEW_LINE> price = db.Column(db.String(20), nullable=False, server_default='0') <NEW_LINE> status = db.Column(db.String(20), nullable=False, server_default='') <NEW_LINE> notes = db.Column(db.Text, nullable=True) <NEW_LINE> bibrec = db.relationship(Bibrec, backref='purchases') <NEW_LINE> vendor = db.relationship(CrcVENDOR, backref='purchases') | Represents a CrcPURCHASE record. | 6259908455399d3f05628042 |
class NoNameFoundError(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message | No name was found for this entry | 6259908423849d37ff852be7 |
class EqualizedLinear(EqualizedLayer): <NEW_LINE> <INDENT> def __init__(self, dim_in, dim_out, equalized = True): <NEW_LINE> <INDENT> super(EqualizedLinear, self).__init__( nn.Linear(dim_in, dim_out), equalized ) | Linear layer with He's initialization | 625990842c8b7c6e89bd5313 |
class Kkma(): <NEW_LINE> <INDENT> def nouns(self, phrase): <NEW_LINE> <INDENT> nouns = self.jki.extractNoun(phrase) <NEW_LINE> if not nouns: return [] <NEW_LINE> return [nouns.get(i).getString() for i in range(nouns.size())] <NEW_LINE> <DEDENT> def pos(self, phrase, flatten=True): <NEW_LINE> <INDENT> sentences = self.jki.morphAnalyzer(phrase) <NEW_LINE> morphemes = [] <NEW_LINE> if not sentences: return morphemes <NEW_LINE> for i in range(sentences.size()): <NEW_LINE> <INDENT> sentence = sentences.get(i) <NEW_LINE> for j in range(sentence.size()): <NEW_LINE> <INDENT> eojeol = sentence.get(j) <NEW_LINE> if flatten: <NEW_LINE> <INDENT> for k in range(eojeol.size()): <NEW_LINE> <INDENT> morpheme = eojeol.get(k) <NEW_LINE> morphemes.append((morpheme.getString(), morpheme.getTag())) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> morphemes.append([(eojeol.get(k).getString(), eojeol.get(k).getTag()) for k in range(eojeol.size())]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return morphemes <NEW_LINE> <DEDENT> def morphs(self, phrase): <NEW_LINE> <INDENT> return [s for s, t in self.pos(phrase)] <NEW_LINE> <DEDENT> def sentences(self, phrase): <NEW_LINE> <INDENT> sentences = self.jki.morphAnalyzer(phrase) <NEW_LINE> if not sentences: return [] <NEW_LINE> return [sentences.get(i).getSentence() for i in range(sentences.size())] <NEW_LINE> <DEDENT> def __init__(self, jvmpath=None): <NEW_LINE> <INDENT> if not jpype.isJVMStarted(): <NEW_LINE> <INDENT> jvm.init_jvm(jvmpath) <NEW_LINE> <DEDENT> kkmaJavaPackage = jpype.JPackage('kr.lucypark.kkma') <NEW_LINE> KkmaInterfaceJavaClass = kkmaJavaPackage.KkmaInterface <NEW_LINE> self.jki = KkmaInterfaceJavaClass() | Wrapper for `Kkma <http://kkma.snu.ac.kr>`_.
Kkma is a morphological analyzer and natural language processing system written in Java, developed by the Intelligent Data Systems (IDS) Laboratory at `SNU <http://snu.ac.kr>`_.
.. code-block:: python
>>> from konlpy.tag import Kkma
>>> kkma = Kkma()
>>> print(kkma.morphs(u'공부를 하면할수록 모르는게 많다는 것을 알게 됩니다.'))
['공부', '를', '하', '면', '하', 'ㄹ수록', '모르', '는', '것', '이', '많', '다는', '것', '을', '알', '게', '되', 'ㅂ니다', '.']
>>> print(kkma.nouns(u'대학에서 DB, 통계학, 이산수학 등을 배웠지만...'))
['대학', '통계학', '이산', '이산수학', '수학', '등']
>>> print(kkma.pos(u'다 까먹어버렸네요?ㅋㅋ'))
[('다', 'MAG'), ('까먹', 'VV'), ('어', 'ECD'), ('버리', 'VXV'), ('었', 'EPT'), ('네요', 'EFN'), ('?', 'SF'), ('ㅋㅋ', 'EMO')]
>>> print(kkma.sentences(u'그래도 계속 공부합니다. 재밌으니까!'))
['그래도 계속 공부합니다.', '재밌으니까!']
:param jvmpath: The path of the JVM passed to :py:func:`.init_jvm`. | 62599084796e427e538502a9 |
class ExpressionEvaluationOptions(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'scope': {'key': 'scope', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, scope: Optional[Union[str, "ExpressionEvaluationOptionsScopeType"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ExpressionEvaluationOptions, self).__init__(**kwargs) <NEW_LINE> self.scope = scope | Specifies whether template expressions are evaluated within the scope of the parent template or nested template.
:ivar scope: The scope to be used for evaluation of parameters, variables and functions in a
nested template. Possible values include: "NotSpecified", "Outer", "Inner".
:vartype scope: str or
~azure.mgmt.resource.resources.v2021_04_01.models.ExpressionEvaluationOptionsScopeType | 625990848a349b6b43687d8c |
class ImageSpec(BaseImageSpec): <NEW_LINE> <INDENT> processors = [] <NEW_LINE> format = None <NEW_LINE> options = None <NEW_LINE> autoconvert = True <NEW_LINE> def __init__(self, source): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> super(ImageSpec, self).__init__() <NEW_LINE> <DEDENT> @property <NEW_LINE> def cachefile_name(self): <NEW_LINE> <INDENT> if not self.source: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> fn = get_by_qname(settings.IMAGEKIT_SPEC_CACHEFILE_NAMER, 'namer') <NEW_LINE> return fn(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def source(self): <NEW_LINE> <INDENT> src = getattr(self, '_source', None) <NEW_LINE> if not src: <NEW_LINE> <INDENT> field_data = getattr(self, '_field_data', None) <NEW_LINE> if field_data: <NEW_LINE> <INDENT> src = self._source = getattr(field_data['instance'], field_data['attname']) <NEW_LINE> del self._field_data <NEW_LINE> <DEDENT> <DEDENT> return src <NEW_LINE> <DEDENT> @source.setter <NEW_LINE> def source(self, value): <NEW_LINE> <INDENT> self._source = value <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> state = copy(self.__dict__) <NEW_LINE> if isinstance(self.source, ImageFieldFile): <NEW_LINE> <INDENT> field = getattr(self.source, 'field') <NEW_LINE> state['_field_data'] = { 'instance': getattr(self.source, 'instance', None), 'attname': getattr(field, 'name', None), } <NEW_LINE> state.pop('_source', None) <NEW_LINE> <DEDENT> return state <NEW_LINE> <DEDENT> def get_hash(self): <NEW_LINE> <INDENT> return hashers.pickle([ self.source.name, self.processors, self.format, self.options, self.autoconvert, ]) <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> if not self.source: <NEW_LINE> <INDENT> raise MissingSource("The spec '%s' has no source file associated" " with it." % self) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> img = open_image(self.source) <NEW_LINE> <DEDENT> except (ValueError, AttributeError): <NEW_LINE> <INDENT> if not hasattr(self.source, "storage"): <NEW_LINE> <INDENT> self.source.storage = get_singleton(settings.IMAGEKIT_DEFAULT_FILE_STORAGE, 'file storage backend') <NEW_LINE> <DEDENT> self.source.open() <NEW_LINE> img = open_image(self.source) <NEW_LINE> <DEDENT> return process_image(img, processors=self.processors, format=self.format, autoconvert=self.autoconvert, options=self.options) | An object that defines how to generate a new image from a source file using
PIL-based processors. (See :mod:`imagekit.processors`) | 625990847b180e01f3e49dfc |
class Sky2Pix_TAN(Sky2PixProjection, Zenithal): <NEW_LINE> <INDENT> @property <NEW_LINE> def inverse(self): <NEW_LINE> <INDENT> return Pix2Sky_TAN() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def evaluate(cls, phi, theta): <NEW_LINE> <INDENT> phi = np.deg2rad(phi) <NEW_LINE> theta = np.deg2rad(theta) <NEW_LINE> r_theta = cls._compute_r_theta(theta) <NEW_LINE> x = np.rad2deg(r_theta * np.sin(phi)) <NEW_LINE> y = -np.rad2deg(r_theta * np.cos(phi)) <NEW_LINE> return x, y <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _compute_r_theta(theta): <NEW_LINE> <INDENT> return 1 / np.tan(theta) | TAN : Gnomonic Projection - sky to pixel. | 6259908423849d37ff852be9 |
class SpellRecords(_UsesEffectsMixin): <NEW_LINE> <INDENT> _extra_attrs = (u'flags.noAutoCalc', u'flags.startSpell', u'flags.immuneToSilence', u'flags.ignoreLOS', u'flags.scriptEffectAlwaysApplies', u'flags.disallowAbsorbReflect', u'flags.touchExplodesWOTarget') <NEW_LINE> _csv_attrs = (u'eid', u'cost', u'level', u'spellType', u'flags') <NEW_LINE> _csv_header = (_(u'Type'), _(u'Mod Name'), _(u'ObjectIndex'), _(u'Editor Id'), _(u'Cost'), _(u'Level Type'), _(u'Spell Type'), _(u'Spell Flags')) <NEW_LINE> _row_fmt_str = u'"SPEL","%s","0x%06X",%s\n' <NEW_LINE> _parser_sigs = [b'SPEL'] <NEW_LINE> _attr_dex = None <NEW_LINE> def __init__(self, aliases_=None, detailed=False, called_from_patcher=False): <NEW_LINE> <INDENT> atts = (bush.game.spell_stats_attrs if called_from_patcher else self._csv_attrs) <NEW_LINE> if detailed: <NEW_LINE> <INDENT> atts += (*self.__class__._extra_attrs, u'effects') <NEW_LINE> self._csv_header += ( *(attr_csv_struct[x][1] for x in self.__class__._extra_attrs), *_UsesEffectsMixin.effect_headers * 2, _(u'Additional Effects (Same format)')) <NEW_LINE> self._attr_dex = dict( zip(self.__class__._extra_attrs, range(8, 15))) <NEW_LINE> <DEDENT> super(SpellRecords, self).__init__(aliases_, atts, called_from_patcher) <NEW_LINE> <DEDENT> def _parse_line(self, fields): <NEW_LINE> <INDENT> if fields[0].lower() != u'spel': return <NEW_LINE> mid = self._coerce_fid(fields[1], fields[2]) <NEW_LINE> if int_or_none(fields[4]) is None: <NEW_LINE> <INDENT> attr_dex = {u'eid': 3, u'cost': 5, u'level': 6, u'spellType': 7} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attr_dex = {u'eid': 3, u'cost': 4, u'level': 5, u'spellType': 6, u'flags': 7} <NEW_LINE> <DEDENT> self.fid_stats[mid] = super(_UsesEffectsMixin, self)._update_from_csv( b'SPEL', fields, index_dict=attr_dex) <NEW_LINE> if self._attr_dex: <NEW_LINE> <INDENT> attr_val = super(_UsesEffectsMixin, self)._update_from_csv(b'SPEL', fields) <NEW_LINE> attr_val[u'effects'] = self.readEffects(fields[15:]) <NEW_LINE> self.fid_stats[mid].update(attr_val) | Statistics for spells, with functions for importing/exporting from/to
mod/text file. | 6259908499fddb7c1ca63b72 |
@doc_subst(_doc_snippets) <NEW_LINE> class LineOptions(HasTraits): <NEW_LINE> <INDENT> stroke_color = geotraitlets.ColorAlpha( allow_none=False, default_value=DEFAULT_STROKE_COLOR ).tag(sync=True) <NEW_LINE> stroke_weight = Float( min=0.0, allow_none=False, default_value=2.0 ).tag(sync=True) <NEW_LINE> stroke_opacity = geotraitlets.StrokeOpacity().tag(sync=True) <NEW_LINE> def to_line(self, start, end): <NEW_LINE> <INDENT> new_line = Line( start=start, end=end, stroke_color=self.stroke_color, stroke_weight=self.stroke_weight, stroke_opacity=self.stroke_opacity ) <NEW_LINE> return new_line | Style options for a line
Pass an instance of this class to :func:`gmaps.drawing_layer` to
control the style of new user-drawn lines on the map.
:Examples:
>>> fig = gmaps.figure()
>>> drawing = gmaps.drawing_layer(
marker_options=gmaps.MarkerOptions(hover_text='some text'),
line_options=gmaps.LineOptions(stroke_color='red')
)
>>> fig.add_layer(drawing)
>>> fig # display the figure
{stroke_options_params} | 62599084099cdd3c63676192 |
class SupportEngineer(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'email_address': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'email_address': {'key': 'emailAddress', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SupportEngineer, self).__init__(**kwargs) <NEW_LINE> self.email_address = None | Support engineer information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar email_address: Email address of the Azure Support engineer assigned to the support
ticket.
:vartype email_address: str | 62599084aad79263cf4302eb |
class PageListShop(PageList): <NEW_LINE> <INDENT> def __init__(self, win, api, set_statusbar, page): <NEW_LINE> <INDENT> page_name = page[0] <NEW_LINE> col_category = page[1] <NEW_LINE> col_category_json = page[2] <NEW_LINE> view_cols = [ ["Date", 9, 'date', 'd'], ["Item", 25, 'item', 'i'], [col_category, 20, 'category', col_category_json], ["Cost", 10, 'cost', 'c'], ["Shop", 20, 'shop', 's'] ] <NEW_LINE> self.cols = { 'view': view_cols, 'edit': view_cols } <NEW_LINE> self.col_category_json = col_category_json <NEW_LINE> super().__init__(win, api, set_statusbar, page_name) <NEW_LINE> <DEDENT> def calculate_data(self): <NEW_LINE> <INDENT> return [{ 'id': item['I'], 'date': item['d'], 'item': item['i'], 'category': item[self.col_category_json], 'cost': item['c'], 'shop': item['s'] } for item in self.data['data']] | used for things like food, socials etc. | 62599084dc8b845886d550eb |
class Location: <NEW_LINE> <INDENT> def __init__(self, data: dict) -> None: <NEW_LINE> <INDENT> self.code = data["LocationCode"] <NEW_LINE> self.point = geopy.Point(data["Latitude"], data["Longitude"]) <NEW_LINE> self.facility_owned = bool(data["FacilityOwnedByCarvana"]) <NEW_LINE> self.trips = [] <NEW_LINE> self.visited = False <NEW_LINE> <DEDENT> def __hash__(self) -> Hashable: <NEW_LINE> <INDENT> return hash(self.code) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return ( f"Location(Code:{self.code},Coordinate:{self.point}," f"FacilityOwned:{self.facility_owned})" ) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return ( f"Location: {self.code}\n" f"\tCoordinate: {self.point}\n" f"\tFacilityOwned: {self.facility_owned}" ) <NEW_LINE> <DEDENT> def __eq__(self, other: object) -> bool: <NEW_LINE> <INDENT> if isinstance(other, str): <NEW_LINE> <INDENT> return self.code is other <NEW_LINE> <DEDENT> elif not isinstance(other, Location): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return self.code is other.code <NEW_LINE> <DEDENT> def add_trip(self, trip: Trip) -> Trips: <NEW_LINE> <INDENT> self.trips.append(trip) <NEW_LINE> return self.trips | The Node/Vertex of the graph.
Contains an adjacency list stored in the `trips` attribute used to build the
adjacency graph. Also has a `visited` attribute used in the search to avoid
cycles. | 62599084283ffb24f3cf53d2 |
class Rectangle: <NEW_LINE> <INDENT> number_of_instances = 0 <NEW_LINE> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> Rectangle.number_of_instances += 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("width must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("width must be >= 0") <NEW_LINE> <DEDENT> self.__width = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("height must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("height must be >= 0") <NEW_LINE> <DEDENT> self.__height = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.height * self.width <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.width == 0 or self.height == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return (2 * self.height) + (2 * self.width) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.width == 0 or self.height == 0: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> Rect = "" <NEW_LINE> for i in range(self.height): <NEW_LINE> <INDENT> Rect += "#" * self.width <NEW_LINE> Rect += "\n" <NEW_LINE> <DEDENT> return Rect[:-1] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Rectangle({}, {})".format(self.width, self.height) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print("Bye rectangle...") <NEW_LINE> Rectangle.number_of_instances -= 1 | Show the attribute of rectangle | 625990847047854f46340ee7 |
class Status(NamedTuple): <NEW_LINE> <INDENT> instance_name: str <NEW_LINE> driver_name: str <NEW_LINE> provisioner_name: str <NEW_LINE> scenario_name: str <NEW_LINE> created: bool <NEW_LINE> converged: bool | Scenario status information. | 62599084ec188e330fdfa3de |
class _SlotPriorityQueues(object): <NEW_LINE> <INDENT> def __init__(self, pqfactory, slot_startprios=None): <NEW_LINE> <INDENT> self.pqfactory = pqfactory <NEW_LINE> self.pqueues = {} <NEW_LINE> for slot, startprios in (slot_startprios or {}).items(): <NEW_LINE> <INDENT> self.pqueues[slot] = self.pqfactory(startprios) <NEW_LINE> <DEDENT> <DEDENT> def pop_slot(self, slot): <NEW_LINE> <INDENT> queue = self.pqueues[slot] <NEW_LINE> request = queue.pop() <NEW_LINE> if len(queue) == 0: <NEW_LINE> <INDENT> del self.pqueues[slot] <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> def push_slot(self, slot, obj, priority): <NEW_LINE> <INDENT> if slot not in self.pqueues: <NEW_LINE> <INDENT> self.pqueues[slot] = self.pqfactory() <NEW_LINE> <DEDENT> queue = self.pqueues[slot] <NEW_LINE> queue.push(obj, priority) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> active = {slot: queue.close() for slot, queue in self.pqueues.items()} <NEW_LINE> self.pqueues.clear() <NEW_LINE> return active <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return sum(len(x) for x in self.pqueues.values()) if self.pqueues else 0 | Container for multiple priority queues. | 62599084283ffb24f3cf53d3 |
class SimpleHistoryOneToOneField(OneToOneField): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> sh_to_field = kwargs.pop("sh_to_field",None) <NEW_LINE> if sh_to_field is not None: <NEW_LINE> <INDENT> self.sh_to_field = sh_to_field <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("'sh_to_field' is a required kwarg.") <NEW_LINE> <DEDENT> super(OneToOneField,self).__init__(*args, **kwargs) | Allows a OneToOneField to work when
'to' is a string
Kwarg 'sh_to_field' should be a field instance
with the same arguments as the 'to' field. | 625990845fdd1c0f98e5fab3 |
class Almost: <NEW_LINE> <INDENT> def __init__(self, value, offset=0.00001): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self.offset = offset <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return abs(other - self.value) < self.offset | Compares a float value with a certain jitter.
| 6259908497e22403b383ca2f |
class TestCLIInitDb(BaseTest): <NEW_LINE> <INDENT> def test_init_db(self): <NEW_LINE> <INDENT> runner = CliRunner() <NEW_LINE> result = runner.invoke( init_db_cli, catch_exceptions=False, ) <NEW_LINE> self.assertEqual(result.exit_code, 0) | Test case for init_db_cli cli method. | 6259908423849d37ff852bed |
class CategorymanagerAssetsAnnotationTagsListRequest(_messages.Message): <NEW_LINE> <INDENT> name = _messages.StringField(1, required=True) <NEW_LINE> pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32) <NEW_LINE> pageToken = _messages.StringField(3) <NEW_LINE> subAssetName = _messages.StringField(4) | A CategorymanagerAssetsAnnotationTagsListRequest object.
Fields:
name: [Required] Resource name of the asset, must be RFC3986 escaped.
pageSize: The maximum number of items to return.
pageToken: The next_page_token value returned from a previous List
request, if any.
subAssetName: A finer grained sub asset of the asset to list annotation
tags. For Bigquery, provide the name of the column you list annotation
tags. If this field is empty, list annotation tags that are associated
with the entire asset. | 62599084adb09d7d5dc0c08e |
class SatelliteSchema(Schema): <NEW_LINE> <INDENT> id = fields.Int(dump_only=True) <NEW_LINE> timestamp = fields.DateTime(dump_only=True) <NEW_LINE> sets = fields.Nested(SetSchema, many=True) | Satellite Schema | 6259908463b5f9789fe86c9d |
class Menu: <NEW_LINE> <INDENT> def __init__(self, name, items, start_time, end_time): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.items = items <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{name} menu available from {start} to {end}".format(name=self.name, start=self.start_time, end=self.end_time) <NEW_LINE> <DEDENT> def calculate_bill(self, purchased_items): <NEW_LINE> <INDENT> bill = 0 <NEW_LINE> for purchased_item in purchased_items: <NEW_LINE> <INDENT> price = self.items[purchased_item] <NEW_LINE> bill += price <NEW_LINE> <DEDENT> return bill | Class that represents a restaraunt menu. | 62599084796e427e538502af |
class CustomToken(Base): <NEW_LINE> <INDENT> _CREDENTIAL_TYPE = 'authorized_user' <NEW_LINE> def __init__(self, token, project_id): <NEW_LINE> <INDENT> super(CustomToken, self).__init__() <NEW_LINE> self._project_id = project_id <NEW_LINE> self._g_credential = credentials.Credentials(token=token, scopes=_scopes) <NEW_LINE> <DEDENT> @property <NEW_LINE> def client_id(self): <NEW_LINE> <INDENT> return self._g_credential.client_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def client_secret(self): <NEW_LINE> <INDENT> return self._g_credential.client_secret <NEW_LINE> <DEDENT> @property <NEW_LINE> def refresh_token(self): <NEW_LINE> <INDENT> return self._g_credential.refresh_token <NEW_LINE> <DEDENT> @property <NEW_LINE> def project_id(self): <NEW_LINE> <INDENT> return self._project_id <NEW_LINE> <DEDENT> def get_credential(self): <NEW_LINE> <INDENT> return self._g_credential | A credential initialized from an existing refresh token. | 625990847047854f46340ee9 |
class SnmpManagerPost(object): <NEW_LINE> <INDENT> swagger_types = { 'host': 'str', 'notification': 'str', 'v2c': 'SnmpV2c', 'v3': 'SnmpV3Post', 'version': 'str' } <NEW_LINE> attribute_map = { 'host': 'host', 'notification': 'notification', 'v2c': 'v2c', 'v3': 'v3', 'version': 'version' } <NEW_LINE> required_args = { } <NEW_LINE> def __init__( self, host=None, notification=None, v2c=None, v3=None, version=None, ): <NEW_LINE> <INDENT> if host is not None: <NEW_LINE> <INDENT> self.host = host <NEW_LINE> <DEDENT> if notification is not None: <NEW_LINE> <INDENT> self.notification = notification <NEW_LINE> <DEDENT> if v2c is not None: <NEW_LINE> <INDENT> self.v2c = v2c <NEW_LINE> <DEDENT> if v3 is not None: <NEW_LINE> <INDENT> self.v3 = v3 <NEW_LINE> <DEDENT> if version is not None: <NEW_LINE> <INDENT> self.version = version <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if key not in self.attribute_map: <NEW_LINE> <INDENT> raise KeyError("Invalid key `{}` for `SnmpManagerPost`".format(key)) <NEW_LINE> <DEDENT> self.__dict__[key] = value <NEW_LINE> <DEDENT> def __getattribute__(self, item): <NEW_LINE> <INDENT> value = object.__getattribute__(self, item) <NEW_LINE> if isinstance(value, Property): <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> if hasattr(self, attr): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if issubclass(SnmpManagerPost, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SnmpManagerPost): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition. | 62599084ec188e330fdfa3e0 |
class TopRecipientOrganizationsResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | Retrieve the value for the "Response" output from this choreography execution. ((json) The response from Influence Explorer.) | 6259908426068e7796d4e476 |
class ScalarLocalBasis(ScalarNode): <NEW_LINE> <INDENT> def __new__(cls, u=None, v=None, tag=None): <NEW_LINE> <INDENT> tag = tag or random_string( 6 ) <NEW_LINE> obj = Basic.__new__(cls, tag) <NEW_LINE> obj._test = v <NEW_LINE> obj._trial = u <NEW_LINE> return obj <NEW_LINE> <DEDENT> @property <NEW_LINE> def tag(self): <NEW_LINE> <INDENT> return self._args[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def trial(self): <NEW_LINE> <INDENT> return self._trial <NEW_LINE> <DEDENT> @property <NEW_LINE> def test(self): <NEW_LINE> <INDENT> return self._test | This is used to describe scalar dof over an element | 6259908450812a4eaa62195f |
class HasFile(): <NEW_LINE> <INDENT> def __init__(self, filefield): <NEW_LINE> <INDENT> self.filefield = filefield <NEW_LINE> <DEDENT> def __call__(self, form, field): <NEW_LINE> <INDENT> filename = form._fields.get(self.filefield).data.filename.strip() <NEW_LINE> if len(filename) == 0: <NEW_LINE> <INDENT> raise ValidationError('No file provided') | Validator to check if the form has a file in a given field | 6259908423849d37ff852bef |
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = util.Counter() <NEW_LINE> for p in self.legalPositions: self.beliefs[p] = 1.0 <NEW_LINE> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observe(self, observation, gameState): <NEW_LINE> <INDENT> noisyDistance = observation <NEW_LINE> emissionModel = busters.getObservationDistribution(noisyDistance) <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> if noisyDistance is None: <NEW_LINE> <INDENT> allPossible[self.getJailPosition()] = 1.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for p in self.legalPositions: <NEW_LINE> <INDENT> Truedist = util.manhattanDistance(p, pacmanPosition) <NEW_LINE> if emissionModel[Truedist] > 0: <NEW_LINE> <INDENT> allPossible[p] = self.beliefs[p] * emissionModel[Truedist] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> "*** END YOUR CODE HERE ***" <NEW_LINE> allPossible.normalize() <NEW_LINE> self.beliefs = allPossible <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> for oldPos in self.legalPositions: <NEW_LINE> <INDENT> newPosDist = self.getPositionDistribution(self.setGhostPosition(gameState, oldPos)) <NEW_LINE> for newPos, prob in newPosDist.items(): <NEW_LINE> <INDENT> allPossible[newPos] += prob * self.beliefs[oldPos] <NEW_LINE> <DEDENT> <DEDENT> allPossible.normalize() <NEW_LINE> self.beliefs = allPossible <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs | The exact dynamic inference module should use forward-algorithm
updates to compute the exact belief function at each time step. | 62599084a05bb46b3848bec2 |
class VisualTreeChangeType(Enum,IComparable,IFormattable,IConvertible): <NEW_LINE> <INDENT> def __eq__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __format__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ge__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __gt__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __le__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __lt__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ne__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __reduce_ex__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Add=None <NEW_LINE> Remove=None <NEW_LINE> value__=None | enum VisualTreeChangeType,values: Add (0),Remove (1) | 6259908471ff763f4b5e92e2 |
class PythonRunner(): <NEW_LINE> <INDENT> def __init__(self, module_string, args, func_name='execute'): <NEW_LINE> <INDENT> assert isinstance(args, dict), ('Args must be a dict, ' '%s (%s) found instead' % (args, type(args))) <NEW_LINE> module, module_name = locate_module(module_string) <NEW_LINE> timestamp = datetime.datetime.now().strftime("%Y-%m-%d--%H_%M_%S") <NEW_LINE> filename = '%s-log-%s.txt' % (module_name, timestamp) <NEW_LINE> tempdir = os.path.join(args['workspace_dir'], 'tmp') <NEW_LINE> for path in [args['workspace_dir'], tempdir]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(tempdir) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> log_file_uri = os.path.join(args['workspace_dir'], filename) <NEW_LINE> self.executor = Executor(module, args, func_name, log_file_uri, tempdir=tempdir) <NEW_LINE> self._checker = RepeatingTimer(0.1, self._check_executor) <NEW_LINE> self.args = args <NEW_LINE> self.started = Communicator() <NEW_LINE> self.finished = Communicator() <NEW_LINE> self.failed = None <NEW_LINE> self.traceback = None <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.failed = None <NEW_LINE> self.traceback = None <NEW_LINE> self.executor.start() <NEW_LINE> LOGGER.debug('Started executor thread') <NEW_LINE> self._checker.start() <NEW_LINE> LOGGER.debug('Started checker thread') <NEW_LINE> self.started.emit(thread_name=self.executor.name, thread_args=self.args) <NEW_LINE> <DEDENT> def is_finished(self): <NEW_LINE> <INDENT> if self.executor is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return not self.executor.is_alive() <NEW_LINE> <DEDENT> <DEDENT> def _check_executor(self): <NEW_LINE> <INDENT> if not self.executor.is_alive(): <NEW_LINE> <INDENT> self._checker.cancel() <NEW_LINE> self.failed = self.executor.failed <NEW_LINE> self.traceback = self.executor.traceback <NEW_LINE> self.finished.emit(thread_name=self.executor.name, thread_failed=self.executor.failed, thread_traceback=self.executor.traceback) <NEW_LINE> del self.executor <NEW_LINE> self.executor = None | Wrapper object for the executor class
* Loads the target module
* Creates the output workspace
* Runs the executor
* contains communicator event objects that other functions can register
with. | 62599084adb09d7d5dc0c090 |
class _Connection_Pool( object ): <NEW_LINE> <INDENT> _defpoolsize = 2 <NEW_LINE> _logmode = MythLog.SOCKET <NEW_LINE> @classmethod <NEW_LINE> def setDefaultSize(cls, size): <NEW_LINE> <INDENT> cls._defpoolsize = size <NEW_LINE> <DEDENT> def resizePool(self, size): <NEW_LINE> <INDENT> if size < 1: <NEW_LINE> <INDENT> size = 1 <NEW_LINE> <DEDENT> diff = size - self._poolsize <NEW_LINE> self._poolsize = size <NEW_LINE> while diff: <NEW_LINE> <INDENT> if diff > 0: <NEW_LINE> <INDENT> self._pool.append(self._connect()) <NEW_LINE> diff -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conn = self._pool.pop() <NEW_LINE> conn.close() <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> key,conn = self._inuse.popitem() <NEW_LINE> conn.close() <NEW_LINE> <DEDENT> diff += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self._pool = [] <NEW_LINE> self._inuse = {} <NEW_LINE> self._refs = {} <NEW_LINE> self._stack = {} <NEW_LINE> self._poolsize = self._defpoolsize <NEW_LINE> for i in range(self._poolsize): <NEW_LINE> <INDENT> self._pool.append(self._connect()) <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> for conn in self._pool: <NEW_LINE> <INDENT> conn.close() <NEW_LINE> <DEDENT> for id,conn in self._inuse.items(): <NEW_LINE> <INDENT> conn.close() <NEW_LINE> <DEDENT> <DEDENT> def acquire(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conn = self._pool.pop(0) <NEW_LINE> self._inuse[id(conn)] = conn <NEW_LINE> self.log(self._logmode, MythLog.DEBUG, 'Acquiring connection from pool') <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> conn = self._connect() <NEW_LINE> <DEDENT> return conn <NEW_LINE> <DEDENT> def release(self, id): <NEW_LINE> <INDENT> conn = self._inuse.pop(id) <NEW_LINE> self._pool.append(conn) <NEW_LINE> self.log(self._logmode, MythLog.DEBUG, 'Releasing connection to pool') | Provides a scaling connection pool to access a shared resource. | 62599084aad79263cf4302f1 |
class MainPage(Handler): <NEW_LINE> <INDENT> def render_posts(self, title="", blogtext="", error=""): <NEW_LINE> <INDENT> blogs = db.GqlQuery("SELECT * FROM BlogDB ORDER BY created DESC LIMIT 5") <NEW_LINE> self.render("posts.html", title=title, blogtext=blogtext, error=error, blogs=blogs) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> self.render_posts() | handles the first page | 625990845fdd1c0f98e5fab6 |
class UnorderedList(Base): <NEW_LINE> <INDENT> def __init__(self, raw, parsed_blocks_list, style_cls): <NEW_LINE> <INDENT> super(UnorderedList, self).__init__(raw, style_cls) <NEW_LINE> self.parsed_blocks_list = parsed_blocks_list <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, raw, style_cls): <NEW_LINE> <INDENT> lines = raw.strip().split('\n') <NEW_LINE> items = [] <NEW_LINE> for l in lines: <NEW_LINE> <INDENT> if l.startswith('* '): <NEW_LINE> <INDENT> items.append(l[2:]) <NEW_LINE> <DEDENT> elif l.startswith(' '): <NEW_LINE> <INDENT> if not items: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> items[-1] += '\n' + l[2:] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> from n0ted0wn.Block.Parser import Parser <NEW_LINE> return cls(raw, [Parser(style_cls, 0).parse(item) for item in items], style_cls) | Implements parsing for the following block format.
* Item 1
* Item 2
* Item 3 | 625990844c3428357761bdf1 |
class Operation(Loggable): <NEW_LINE> <INDENT> name = dict( type = "text" ) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name = "Transaction" | The operation entity class, representing a logical
operation and attributes. | 6259908492d797404e3898f8 |
class Asyncer: <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> async def _a_func(self, *args): <NEW_LINE> <INDENT> return self.func(*args) <NEW_LINE> <DEDENT> async def _async_tasker(self, args): <NEW_LINE> <INDENT> tasks = [asyncio.ensure_future(self._a_func(*arg)) for arg in args] <NEW_LINE> return await asyncio.gather(*tasks) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> async def _fetch(cls, url, mode='text', timeout=0): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> timeout = aiohttp.ClientTimeout(timeout) <NEW_LINE> async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False), timeout=timeout) as session: <NEW_LINE> <INDENT> async with session.get(url) as response: <NEW_LINE> <INDENT> print('{} - {}'.format(url, 'wait... ')) <NEW_LINE> if mode == 'json': <NEW_LINE> <INDENT> resp = await response.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resp = await response.text() <NEW_LINE> <DEDENT> print('{} - {}'.format(url, 'done')) <NEW_LINE> return {'url': url, 'response': resp} <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print('{} - {}'.format(url, 'FAIL')) <NEW_LINE> return {'url': url, 'response': None} <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> async def _fetcher(cls, urls, mode='text', timeout=0): <NEW_LINE> <INDENT> tasks = [] <NEW_LINE> for url in urls: <NEW_LINE> <INDENT> tasks.append(asyncio.ensure_future(cls._fetch(url, mode, timeout))) <NEW_LINE> <DEDENT> return await asyncio.gather(*tasks) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def async_fetch(cls, urls, mode='text', timeout=0): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ioloop = asyncio.get_event_loop() <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> ioloop = asyncio.new_event_loop() <NEW_LINE> <DEDENT> return ioloop.run_until_complete(cls._fetcher(urls, mode)) <NEW_LINE> <DEDENT> def async_run(self, args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ioloop = asyncio.get_event_loop() <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> ioloop = asyncio.new_event_loop() <NEW_LINE> <DEDENT> return ioloop.run_until_complete(self._async_tasker(args)) | Основной и единственный класс в модуле. | 6259908471ff763f4b5e92e4 |
class Nice(Strategy): <NEW_LINE> <INDENT> def decide(self, _): <NEW_LINE> <INDENT> return coop() if rnd(3) else defeat() | Cooperates (almost) always. | 6259908444b2445a339b76f8 |
class TestParameterLambdaRuntime(BaseRuleTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestParameterLambdaRuntime, self).setUp() <NEW_LINE> self.collection.register(LambdaRuntime()) <NEW_LINE> <DEDENT> def atest_file_positive(self): <NEW_LINE> <INDENT> self.helper_file_positive() <NEW_LINE> <DEDENT> def test_file_negative(self): <NEW_LINE> <INDENT> self.helper_file_negative('fixtures/templates/bad/resources_lambda.yaml', 1) | Test template parameter configurations | 62599084283ffb24f3cf53d8 |
class ConvLayer(object): <NEW_LINE> <INDENT> def __init__(self, rng, input, filter_shape, image_shape, activation=None, W_values=None, b_values=None, use_adam=False): <NEW_LINE> <INDENT> assert image_shape[1] == filter_shape[1] <NEW_LINE> self.input = input <NEW_LINE> "fan_in - #of input to convolution layer = #of input feature maps * filter height * filter width" <NEW_LINE> fan_in = np.prod(filter_shape[1:]) <NEW_LINE> fan_out = (filter_shape[0]) * np.prod(filter_shape[2:]) <NEW_LINE> if W_values is None: <NEW_LINE> <INDENT> W_bound = np.sqrt(6. / (fan_in + fan_out)) <NEW_LINE> W_values = np.asarray( rng.uniform(low=-1.0*W_bound, high=1.2*W_bound, size=filter_shape), dtype=theano.config.floatX ) <NEW_LINE> <DEDENT> self.W = theano.shared(W_values, borrow=True) <NEW_LINE> if b_values is None: <NEW_LINE> <INDENT> b_values = np.zeros((filter_shape[0],), dtype=theano.config.floatX) <NEW_LINE> <DEDENT> self.b = theano.shared(value=b_values, borrow=True) <NEW_LINE> if use_adam: <NEW_LINE> <INDENT> zero_W_values = np.zeros(filter_shape, dtype=theano.config.floatX) <NEW_LINE> self.W_m = theano.shared(value=zero_W_values, borrow=False) <NEW_LINE> self.W_v = theano.shared(value=zero_W_values, borrow=False) <NEW_LINE> self.b_m = theano.shared(value=b_values, borrow=False) <NEW_LINE> self.b_v = theano.shared(value=b_values, borrow=False) <NEW_LINE> <DEDENT> "conv_out: 4d tensor (batch_size, #of output feature map, output height, output width)" <NEW_LINE> conv_out = conv2d( input=input, filters=self.W, filter_shape=filter_shape, border_mode='valid', input_shape=image_shape ) <NEW_LINE> conv_out_plus_b = conv_out + self.b.dimshuffle('x', 0, 'x', 'x') <NEW_LINE> self.output = T.switch(T.lt(conv_out_plus_b, - 1179 / 256.), 0, T.switch(T.lt(conv_out_plus_b, 1 / 256.), conv_out_plus_b / 1280. + 1279 / 327680., T.switch(T.lt(conv_out_plus_b, 255 / 256.), conv_out_plus_b, T.switch(T.lt(conv_out_plus_b, 1535 / 256.), conv_out_plus_b / 1280. + 326145. / 327680., 1)))) <NEW_LINE> self.params = [self.W, self.b] <NEW_LINE> if use_adam: <NEW_LINE> <INDENT> self.params_m = [self.W_m, self.b_m] <NEW_LINE> self.params_v = [self.W_v, self.b_v] <NEW_LINE> <DEDENT> <DEDENT> def cost(self, y): <NEW_LINE> <INDENT> return T.mean(T.sqr(y - self.output)) | Convolution Layer
| 62599084adb09d7d5dc0c092 |
class Mode_Countdown(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, tb, hosts, set_current_mode, choreography): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.tb = tb <NEW_LINE> self.hosts = hosts <NEW_LINE> self.mode_names = settings.Game_Modes <NEW_LINE> self.set_current_mode = set_current_mode <NEW_LINE> self.choreography = choreography <NEW_LINE> self.queue = queue.Queue() <NEW_LINE> self.game_mode_names = settings.Game_Modes <NEW_LINE> self.animation = Animation(hosts,set_current_mode,choreography) <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def begin(self): <NEW_LINE> <INDENT> print("mode_countdown Mode_Countdown.begin 1") <NEW_LINE> self.animation.add_to_queue("set_comienza_buttons",self.hosts.get_games_with_players()) <NEW_LINE> print("mode_countdown Mode_Countdown.begin 2") <NEW_LINE> self.animation.add_to_queue("begin",None) <NEW_LINE> print("mode_countdown Mode_Countdown.begin 3") <NEW_LINE> self.animation.animation_frame_counter = 0 <NEW_LINE> print("mode_countdown Mode_Countdown.begin 4") <NEW_LINE> <DEDENT> def end(self): <NEW_LINE> <INDENT> print("mode_countdown.end") <NEW_LINE> self.animation.add_to_queue("end",None) <NEW_LINE> <DEDENT> def respond_host_connected(self, message, origin, destination): <NEW_LINE> <INDENT> if self.hosts.get_all_host_connected() == True: <NEW_LINE> <INDENT> self.set_current_mode(self.game_mode_names.SYSTEM_TESTS) <NEW_LINE> <DEDENT> <DEDENT> def event_button_comienza(self, message, origin, destination): <NEW_LINE> <INDENT> self.hosts.set_games_with_players(origin) <NEW_LINE> self.animation.add_to_queue("set_comienza_buttons",self.hosts.get_games_with_players()) <NEW_LINE> <DEDENT> def add_to_queue(self, topic, message, origin, destination): <NEW_LINE> <INDENT> self.queue.put((topic, message, origin, destination)) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> topic, message, origin, destination = self.queue.get(True) <NEW_LINE> if isinstance(topic, bytes): <NEW_LINE> <INDENT> topic = codecs.decode(topic, 'UTF-8') <NEW_LINE> <DEDENT> if isinstance(message, bytes): <NEW_LINE> <INDENT> message = codecs.decode(message, 'UTF-8') <NEW_LINE> <DEDENT> if isinstance(origin, bytes): <NEW_LINE> <INDENT> origin = codecs.decode(origin, 'UTF-8') <NEW_LINE> <DEDENT> if isinstance(destination, bytes): <NEW_LINE> <INDENT> destination = codecs.decode(destination, 'UTF-8') <NEW_LINE> <DEDENT> getattr(self,topic)( message, origin, destination, ) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass | This class watches for incoming messages
Its only action will be to change the current mode | 62599084d486a94d0ba2daee |
class Ordering: <NEW_LINE> <INDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ordering): <NEW_LINE> <INDENT> other = self.literal(value=other) <NEW_LINE> <DEDENT> return self.operator(evaluator=operator.eq, operands=(self, other)) <NEW_LINE> <DEDENT> __hash__ = object.__hash__ <NEW_LINE> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ordering): <NEW_LINE> <INDENT> other = self.literal(value=other) <NEW_LINE> <DEDENT> return self.operator(evaluator=operator.ne, operands=(self, other)) <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ordering): <NEW_LINE> <INDENT> other = self.literal(value=other) <NEW_LINE> <DEDENT> return self.operator(evaluator=operator.le, operands=(self, other)) <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ordering): <NEW_LINE> <INDENT> other = self.literal(value=other) <NEW_LINE> <DEDENT> return self.operator(evaluator=operator.ge, operands=(self, other)) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ordering): <NEW_LINE> <INDENT> other = self.literal(value=other) <NEW_LINE> <DEDENT> return self.operator(evaluator=operator.lt, operands=(self, other)) <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ordering): <NEW_LINE> <INDENT> other = self.literal(value=other) <NEW_LINE> <DEDENT> return self.operator(evaluator=operator.gt, operands=(self, other)) | This is a mix-in class that traps comparisons
The point is to redirect comparisons among instances of subclasses of {Ordering} to methods
defined in these subclasses. These methods then build and return representations of the
corresponding operators and their operands.
{Ordering} expects its subclasses to define two methods: {literal} and {operator}. The
former is used to encapsulate operands that are not {Ordering} instances. The latter is
used to construct the operator representations. | 62599084167d2b6e312b8332 |
class ZMQClientBaseTest(BaseClientTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = ZMQClientBase(ENDPOINT_CLIENT_HANDLER, ENDPOINT_PUBLISHER) <NEW_LINE> self.server = Server(ENDPOINT_APPLICATION_HANDLER, ENDPOINT_CLIENT_HANDLER, ENDPOINT_PUBLISHER) <NEW_LINE> <DEDENT> def test___init__(self): <NEW_LINE> <INDENT> self.assertIsInstance(self.client, ZMQClientBase, "Instance is not of type ZMQClientBase.") <NEW_LINE> self.assertIsInstance(self.client.speaker, Speaker, "Instance is not of type Speaker.") <NEW_LINE> self.assertIsInstance(self.client.updates, Subscriber, "Instance is not of type Subscriber.") <NEW_LINE> <DEDENT> def test__prepare_reactor(self): <NEW_LINE> <INDENT> client = self.client <NEW_LINE> client._prepare_reactor() <NEW_LINE> self.assertIsInstance(client._loop, IOLoop) <NEW_LINE> self.assertIsInstance(client.speaker._socket, ZMQStream) <NEW_LINE> self.assertIsInstance(client.updates._socket, ZMQStream) <NEW_LINE> <DEDENT> def test_client_speaker_send(self): <NEW_LINE> <INDENT> self.server.frontend.run() <NEW_LINE> speaker = self.client.speaker <NEW_LINE> speaker._socket.connect(speaker._endpoint) <NEW_LINE> speaker._socket.setsockopt(zmq.SNDTIMEO, 1000) <NEW_LINE> ret = speaker._socket.send("Hello World") <NEW_LINE> self.assertIsNone(ret, msg="zmq.Socket.send failed on client.speaker") <NEW_LINE> <DEDENT> def test_client_speaker_recv(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_client_updates_recv(self): <NEW_LINE> <INDENT> pass | Test suite for zmq_transport.client.zmq_client.ZMQClientBase | 625990845fcc89381b266ef9 |
class AddPetForm(FlaskForm): <NEW_LINE> <INDENT> name = StringField("Pet Name", validators = [InputRequired()]) <NEW_LINE> photo_url = StringField("Photo URL", validators = [Optional(), URL()]) <NEW_LINE> age = IntegerField("Age", validators = [Optional()]) <NEW_LINE> notes = TextAreaField("Notes", validators = [Optional()]) <NEW_LINE> species = SelectField("Species", validators = [InputRequired()]) | Form for adding pets | 625990847047854f46340eed |
class SmartServerRepositoryGetPhysicalLockStatus(SmartServerRepositoryRequest): <NEW_LINE> <INDENT> def do_repository_request(self, repository): <NEW_LINE> <INDENT> if repository.get_physical_lock_status(): <NEW_LINE> <INDENT> return SuccessfulSmartServerResponse((b'yes', )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return SuccessfulSmartServerResponse((b'no', )) | Get the physical lock status for a repository.
New in 2.5. | 62599084aad79263cf4302f3 |
class SingleOccurrenceForm(DorsaleBaseModelForm): <NEW_LINE> <INDENT> start_time = forms.DateTimeField(widget=SplitDateTimeWidget, localize=True, label=_('start time')) <NEW_LINE> start_time.help_text = _('-') <NEW_LINE> end_time = forms.DateTimeField(widget=SplitDateTimeWidget, localize=True, label=_('end time')) <NEW_LINE> end_time.help_text = _('-') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Occurrence <NEW_LINE> fields = ['start_time', 'end_time', ] | A simple form for adding and updating single Occurrence attributes
Required keyword parameter: `user` | 625990844527f215b58eb73c |
class PdfGeneratorUncorrelated(PdfGeneratorBase): <NEW_LINE> <INDENT> def process(self, source_map): <NEW_LINE> <INDENT> source_map_local = source_map.map[:, 0, :] <NEW_LINE> source_map_local[:] = 0 <NEW_LINE> ls = source_map.map.local_shape[0] <NEW_LINE> gs = source_map.map.global_shape[0] <NEW_LINE> z_weights = mpiarray.MPIArray.wrap(1 / gs * np.ones(ls), axis=0) <NEW_LINE> pdf_map = self.make_pdf_map(source_map, z_weights) <NEW_LINE> return pdf_map | Generate uniform PDF for making uncorrelated mocks. | 625990845fdd1c0f98e5fab9 |
class BusQueryIntent(Intent): <NEW_LINE> <INDENT> def __init__(self, dictionary = None): <NEW_LINE> <INDENT> if dictionary is not None: <NEW_LINE> <INDENT> self.origin = None <NEW_LINE> self.destination = None <NEW_LINE> self.route = None <NEW_LINE> entities = dictionary.get("entities", {}) <NEW_LINE> stops = entities.get("stop", []) <NEW_LINE> routes = entities.get("bus_route", []) <NEW_LINE> origins = entities.get("origin", []) <NEW_LINE> destinations = entities.get("destination", []) <NEW_LINE> if len(stops) > 1: <NEW_LINE> <INDENT> self.origin = stops[0].get("value") <NEW_LINE> self.destination = stops[1].get("value") <NEW_LINE> <DEDENT> elif len(stops) > 0: <NEW_LINE> <INDENT> self.origin = stops[0].get("value") <NEW_LINE> <DEDENT> if len(origins) > 0: <NEW_LINE> <INDENT> self.origin = origins[0].get("value") <NEW_LINE> <DEDENT> if len(destinations) > 0: <NEW_LINE> <INDENT> self.destination = destinations[0].get("value") <NEW_LINE> <DEDENT> if len(routes) > 0: <NEW_LINE> <INDENT> self.route = routes[0].get("value") | Represents a Bus query intent | 6259908455399d3f0562804e |
class passivecoldhead(): <NEW_LINE> <INDENT> def gettemps(self): <NEW_LINE> <INDENT> a = self.serial.readline() <NEW_LINE> self.temperatures = [float(a[i:i+5].decode('utf-8')) for i in [0,6,12,18]] <NEW_LINE> self._t = time.time() <NEW_LINE> <DEDENT> def mean_temp(self): <NEW_LINE> <INDENT> if (time.time()-self._t)>1: <NEW_LINE> <INDENT> self.gettemps() <NEW_LINE> <DEDENT> else: pass <NEW_LINE> return np.mean(self.temperatures) <NEW_LINE> <DEDENT> def std_temp(self): <NEW_LINE> <INDENT> if (time.time()-self._t)>1: <NEW_LINE> <INDENT> self.gettemps() <NEW_LINE> <DEDENT> else: pass <NEW_LINE> return np.std(self.temperatures) <NEW_LINE> <DEDENT> def gettemp(self, i): <NEW_LINE> <INDENT> if (time.time()-self._t)>1: <NEW_LINE> <INDENT> self.gettemps() <NEW_LINE> <DEDENT> else: pass <NEW_LINE> if np.abs(i)<self.nofsensors: <NEW_LINE> <INDENT> return self.temperatures[i] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -99.0 <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, comport): <NEW_LINE> <INDENT> self.comport = comport <NEW_LINE> self.nofsensors = 4 <NEW_LINE> self.serial = Serial(self.comport) <NEW_LINE> self.serial.flushInput() <NEW_LINE> self.gettemps() | Class to get the mean temperature from a Serial connection to Arduino Nano. | 6259908471ff763f4b5e92e6 |
class GitPersonInfo(Info): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> self.email = None <NEW_LINE> self.date = None <NEW_LINE> self.timezone = None <NEW_LINE> <DEDENT> def tokenize(self): <NEW_LINE> <INDENT> yield Token.Text, self.name <NEW_LINE> yield Token.Whitespace, ' ' <NEW_LINE> yield Token.Punctuation, '<' <NEW_LINE> yield Token.Text, self.email <NEW_LINE> yield Token.Punctuation, '>' <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parse(data): <NEW_LINE> <INDENT> person = GitPersonInfo() <NEW_LINE> person.name = data['name'] <NEW_LINE> person.email = data['email'] <NEW_LINE> person.date = data['date'] <NEW_LINE> person.timezone = data['tz'] <NEW_LINE> return person | A git person object. | 625990842c8b7c6e89bd531f |
class AbstractProjectFavorite(AbstractTimeStamped): <NEW_LINE> <INDENT> user = models.ForeignKey(User) <NEW_LINE> project = models.ForeignKey('Project') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> unique_together = ('user', 'project') | Project favourite by an User | 6259908476e4537e8c3f10b9 |
class TroveSpecError(ParseError): <NEW_LINE> <INDENT> def __init__(self, spec, error): <NEW_LINE> <INDENT> self.spec = spec <NEW_LINE> ParseError.__init__(self, 'Error with spec "%s": %s' % (spec, error)) | Error parsing a trove spec (parseTroveSpec or TroveSpec) | 625990843346ee7daa3383ff |
class replaceeProp(SchemaProperty): <NEW_LINE> <INDENT> _prop_schema = 'replacee' <NEW_LINE> _expected_schema = 'Thing' <NEW_LINE> _enum = False <NEW_LINE> _format_as = "TextField" | SchemaField for replacee
Usage: Include in SchemaObject SchemaFields as your_django_field = replaceeProp()
schema.org description:A sub property of object. The object that is being replaced.
prop_schema returns just the property without url#
format_as is used by app templatetags based upon schema.org datatype
used to reference Thing | 6259908450812a4eaa621962 |
class McDatabaseHandlerException(Exception): <NEW_LINE> <INDENT> pass | Database handler exception. | 6259908444b2445a339b76fa |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.