code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class ActionStoreUserDetails(Action): <NEW_LINE> <INDENT> def name(self) -> Text: <NEW_LINE> <INDENT> return "action_store_user_details" <NEW_LINE> <DEDENT> def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: <NEW_LINE> <INDENT> print("Storing User details") <NEW_LINE> providerFirstName = tracker.get_slot('provider_first_name') <NEW_LINE> providerLastName = tracker.get_slot('provider_last_name') <NEW_LINE> providerNpi = tracker.get_slot('provider_npi') <NEW_LINE> claimId = tracker.get_slot('claim_id') <NEW_LINE> print("updating to local DB") <NEW_LINE> store_user_details(providerFirstName, providerLastName, providerNpi, claimId, 1) <NEW_LINE> return [] | Stores user details in database | 62599063e76e3b2f99fda108 |
class RpmScanner(Actor): <NEW_LINE> <INDENT> name = 'rpm_scanner' <NEW_LINE> consumes = () <NEW_LINE> produces = (InstalledRPM,) <NEW_LINE> tags = (IPUWorkflowTag, FactsPhaseTag) <NEW_LINE> def process(self): <NEW_LINE> <INDENT> output = check_output([ '/bin/rpm', '-qa', '--queryformat', r'%{NAME}|%{VERSION}|%{RELEASE}|%|EPOCH?{%{EPOCH}}:{(none)}||%|PACKAGER?{%{PACKAGER}}:{(none)}||%|' r'ARCH?{%{ARCH}}:{}||%|DSAHEADER?{%{DSAHEADER:pgpsig}}:{%|RSAHEADER?{%{RSAHEADER:pgpsig}}:{(none)}|}|\n' ]) <NEW_LINE> result = InstalledRPM() <NEW_LINE> for entry in output.split('\n'): <NEW_LINE> <INDENT> entry = entry.strip() <NEW_LINE> if not entry: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> name, version, release, epoch, packager, arch, pgpsig = entry.split('|') <NEW_LINE> result.items.append(RPM( name=name, version=version, epoch=epoch, packager=packager, arch=arch, release=release, pgpsig=pgpsig)) <NEW_LINE> <DEDENT> self.produce(result) | Provides data about installed RPM Packages.
After collecting data from RPM query, a message with relevant data will be produced. | 6259906399cbb53fe68325eb |
class UnitSystem(object): <NEW_LINE> <INDENT> def __init__(self: object, name: str, temperature: str, length: str, volume: str, mass: str) -> None: <NEW_LINE> <INDENT> errors = ', '.join(UNIT_NOT_RECOGNIZED_TEMPLATE.format(unit, unit_type) for unit, unit_type in [ (temperature, TEMPERATURE), (length, LENGTH), (volume, VOLUME), (mass, MASS), ] if not is_valid_unit(unit, unit_type)) <NEW_LINE> if errors: <NEW_LINE> <INDENT> raise ValueError(errors) <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.temperature_unit = temperature <NEW_LINE> self.length_unit = length <NEW_LINE> self.mass_unit = mass <NEW_LINE> self.volume_unit = volume <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_metric(self) -> bool: <NEW_LINE> <INDENT> return self.name == CONF_UNIT_SYSTEM_METRIC <NEW_LINE> <DEDENT> def temperature(self, temperature: float, from_unit: str) -> float: <NEW_LINE> <INDENT> if not isinstance(temperature, Number): <NEW_LINE> <INDENT> raise TypeError( '{} is not a numeric value.'.format(str(temperature))) <NEW_LINE> <DEDENT> return temperature_util.convert(temperature, from_unit, self.temperature_unit) <NEW_LINE> <DEDENT> def length(self, length: float, from_unit: str) -> float: <NEW_LINE> <INDENT> if not isinstance(length, Number): <NEW_LINE> <INDENT> raise TypeError('{} is not a numeric value.'.format(str(length))) <NEW_LINE> <DEDENT> return distance_util.convert(length, from_unit, self.length_unit) <NEW_LINE> <DEDENT> def as_dict(self) -> dict: <NEW_LINE> <INDENT> return { LENGTH: self.length_unit, MASS: self.mass_unit, TEMPERATURE: self.temperature_unit, VOLUME: self.volume_unit } | A container for units of measure. | 625990633eb6a72ae038bd68 |
class PublicationTracker(models.Model): <NEW_LINE> <INDENT> created_by = CurrentUserField(editable=False, related_name="%(app_label)s_%(class)s") <NEW_LINE> publication_datetime = models.DateTimeField(verbose_name="Publication datetime", auto_now_add=True, editable=False) <NEW_LINE> update_datetime = models.DateTimeField(verbose_name="Update datetime", auto_now=True, editable=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def get_current_user(self): <NEW_LINE> <INDENT> _user = self.created_by <NEW_LINE> pre_save.send(sender=self.__class__, instance=self, raw=True, using="default") <NEW_LINE> _current = self.created_by <NEW_LINE> self.created_by = _user <NEW_LINE> return _current | Stores the author, publication and update dates of entry. | 62599063be8e80087fbc0790 |
class InstitutionUser(CherryPyAPI): <NEW_LINE> <INDENT> uuid = UUIDField(primary_key=True, default=uuid.uuid4, index=True) <NEW_LINE> user = ForeignKeyField(Users, backref='institutions') <NEW_LINE> institution = ForeignKeyField(Institutions, backref='users') <NEW_LINE> relationship = ForeignKeyField(Relationships, backref='institution_user') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> database = DB <NEW_LINE> indexes = ( (('user', 'institution', 'relationship'), True), ) <NEW_LINE> <DEDENT> def to_hash(self, **flags): <NEW_LINE> <INDENT> obj = super(InstitutionUser, self).to_hash(**flags) <NEW_LINE> obj['uuid'] = str(self.__data__['uuid']) <NEW_LINE> obj['user'] = int(self.__data__['user']) <NEW_LINE> obj['institution'] = int(self.__data__['institution']) <NEW_LINE> obj['relationship'] = str(self.__data__['relationship']) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def from_hash(self, obj): <NEW_LINE> <INDENT> super(InstitutionUser, self).from_hash(obj) <NEW_LINE> self._set_only_if('uuid', obj, 'uuid', lambda: uuid.UUID(obj['uuid'])) <NEW_LINE> self._set_only_if_by_name('relationship', obj, Relationships) <NEW_LINE> self._set_only_if( 'user', obj, 'user', lambda: Users.get(Users.id == obj['user']) ) <NEW_LINE> self._set_only_if( 'institution', obj, 'institution', lambda: Institutions.get(Institutions.id == obj['institution']) ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def where_clause(cls, kwargs): <NEW_LINE> <INDENT> where_clause = super(InstitutionUser, cls).where_clause(kwargs) <NEW_LINE> attrs = ['uuid', 'user', 'institution', 'relationship'] <NEW_LINE> return cls._where_attr_clause(where_clause, kwargs, attrs) | Relates persons and institution objects.
Attributes:
+-------------------+-------------------------------------+
| Name | Description |
+===================+=====================================+
| user | Link to the Users model |
+-------------------+-------------------------------------+
| relationship | Link to the Relationships model |
+-------------------+-------------------------------------+
| institution | Link to the Institutions model |
+-------------------+-------------------------------------+ | 6259906345492302aabfdbe4 |
class FileTarget(Target): <NEW_LINE> <INDENT> def __init__( self, path: Path, recipe: Recipe, *prereqs: FileTargetLike, ) -> None: <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.prereqs = [file_target(p) for p in prereqs] <NEW_LINE> self._recipe = recipe <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> return str(self.path) <NEW_LINE> <DEDENT> async def recipe(self, context: Context) -> Path: <NEW_LINE> <INDENT> from picard.api import sync <NEW_LINE> prereqs = await sync(self.prereqs) <NEW_LINE> if not await self._is_up_to_date(context, prereqs): <NEW_LINE> <INDENT> context.log.info(f'start: {self.name}') <NEW_LINE> value = await self._recipe(self, context, *prereqs) <NEW_LINE> if value is not None and value != self.path: <NEW_LINE> <INDENT> context.log.warning( f'discarding value returned by {self._recipe}: {value}') <NEW_LINE> <DEDENT> if not await self._is_up_to_date(context, prereqs): <NEW_LINE> <INDENT> raise FileRecipePostConditionError(self.name) <NEW_LINE> <DEDENT> context.log.info(f'finish: {self.name}') <NEW_LINE> <DEDENT> return self.path <NEW_LINE> <DEDENT> async def _is_up_to_date( self, context: Context, prereqs: t.Iterable[t.Any] ) -> bool: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> mtime = os.stat(self.name).st_mtime <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for prereq in prereqs: <NEW_LINE> <INDENT> if not is_file_like(prereq): <NEW_LINE> <INDENT> context.log.warn( f'skipping non-filename dependency: {prereq}') <NEW_LINE> continue <NEW_LINE> <DEDENT> if os.stat(prereq).st_mtime > mtime: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | A file that must be newer than its prerequisite files. | 625990638e7ae83300eea796 |
class TestisCleanlib(test.MATTest): <NEW_LINE> <INDENT> def test_dirty(self): <NEW_LINE> <INDENT> for _, dirty in self.file_list: <NEW_LINE> <INDENT> current_file = MAT.mat.create_class_file(dirty, False, add2archive=True, low_pdf_quality=True) <NEW_LINE> self.assertFalse(current_file.is_clean()) <NEW_LINE> <DEDENT> <DEDENT> def test_clean(self): <NEW_LINE> <INDENT> for clean, _ in self.file_list: <NEW_LINE> <INDENT> current_file = MAT.mat.create_class_file(clean, False, add2archive=True, low_pdf_quality=True) <NEW_LINE> self.assertTrue(current_file.is_clean()) | test the is_clean() method | 625990638e7ae83300eea797 |
class Discoverable(MDNSDiscoverable): <NEW_LINE> <INDENT> def __init__(self, nd): <NEW_LINE> <INDENT> super(Discoverable, self).__init__(nd, '_miio._udp.local.') <NEW_LINE> <DEDENT> def info_from_entry(self, entry): <NEW_LINE> <INDENT> info = super().info_from_entry(entry) <NEW_LINE> if "poch" in info[ATTR_PROPERTIES]: <NEW_LINE> <INDENT> misparsed = info[ATTR_PROPERTIES]["poch"] <NEW_LINE> misparsed = misparsed.rstrip("\0") <NEW_LINE> for val in misparsed.split(":"): <NEW_LINE> <INDENT> if val.startswith("mac="): <NEW_LINE> <INDENT> info[ATTR_MAC_ADDRESS] = val[len("mac="):] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return info <NEW_LINE> <DEDENT> def get_entries(self): <NEW_LINE> <INDENT> return self.find_by_device_name('lumi-gateway-') | Add support for discovering Xiaomi Gateway | 62599063cb5e8a47e493cd09 |
class House: <NEW_LINE> <INDENT> def __init__(self, x=0, y=0, ): <NEW_LINE> <INDENT> self.__x = x <NEW_LINE> <DEDENT> def __checkValue(x): <NEW_LINE> <INDENT> if isinstance(x, int) or isinstance(x, float): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def corX(self): <NEW_LINE> <INDENT> return self.__x <NEW_LINE> <DEDENT> @corX.setter <NEW_LINE> def corX(self, x): <NEW_LINE> <INDENT> if House.__checkValue(x): <NEW_LINE> <INDENT> self.__x = x <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Введите число") <NEW_LINE> <DEDENT> <DEDENT> @corX.getter <NEW_LINE> def corX(self): <NEW_LINE> <INDENT> return self.__x <NEW_LINE> <DEDENT> @corX.deleter <NEW_LINE> def corX(self): <NEW_LINE> <INDENT> print('Удаление атрибута свойств') <NEW_LINE> del self.__x | Класс существительное с большой буквы
x = 1 y = 1 атрибуты == данные
def fun() - методы == функции | 625990637cff6e4e811b714f |
class Error(Exception): <NEW_LINE> <INDENT> def __init__(self, err): <NEW_LINE> <INDENT> self.error = err <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.error | description:自定义异常类
param {str} err - [异常信息]
return {type}
author: Senkita | 62599063009cb60464d02c41 |
class Core(Interface): <NEW_LINE> <INDENT> name = "wl_core" <NEW_LINE> version = 1 <NEW_LINE> the_enum = enum.Enum("the_enum", { "zero": 0, "one": 1, "hex_two": 0x2, }) | Interface object
The interface object with the most basic content. | 62599063e5267d203ee6cf43 |
class _CreateDropBase(DDLElement): <NEW_LINE> <INDENT> def __init__(self, element, bind=None): <NEW_LINE> <INDENT> self.element = element <NEW_LINE> self.bind = bind <NEW_LINE> <DEDENT> def _create_rule_disable(self, compiler): <NEW_LINE> <INDENT> return False | Base class for DDL constructs that represent CREATE and DROP or
equivalents.
The common theme of _CreateDropBase is a single
``element`` attribute which refers to the element
to be created or dropped. | 6259906307f4c71912bb0b41 |
class IndyRequestedCredsRequestedPredSchema(Schema): <NEW_LINE> <INDENT> cred_id = fields.Str( example="3fa85f64-5717-4562-b3fc-2c963f66afa6", description=( "Wallet credential identifier (typically but not necessarily a UUID)" ), ) | Schema for requested predicates within indy requested credentials structure. | 625990634a966d76dd5f05ff |
class TXPower(IntEnum): <NEW_LINE> <INDENT> MIN = -127 <NEW_LINE> ULTRA_LOW = -21 <NEW_LINE> LOW = -15 <NEW_LINE> MEDIUM = -7 <NEW_LINE> MAX = 1 | Advertising transmission (TX) power level constants.
https://developer.android.com/reference/android/bluetooth/le/AdvertisingSetParameters#TX_POWER_HIGH | 625990637d847024c075dae0 |
class TestBasicScalingInstancePoolChooseInstances(googletest.TestCase): <NEW_LINE> <INDENT> class Instance(object): <NEW_LINE> <INDENT> def __init__(self, can_accept_requests): <NEW_LINE> <INDENT> self.can_accept_requests = can_accept_requests <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> stub_util.setup_test_stubs() <NEW_LINE> self.mox = mox.Mox() <NEW_LINE> self.servr = BasicScalingModuleFacade( instance_factory=instance.InstanceFactory(object(), 10)) <NEW_LINE> self.mox.stubs.Set(time, 'time', lambda: self.time) <NEW_LINE> self.mox.StubOutWithMock(self.servr._condition, 'wait') <NEW_LINE> self.mox.StubOutWithMock(self.servr, '_start_any_instance') <NEW_LINE> self.time = 0 <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.mox.UnsetStubs() <NEW_LINE> <DEDENT> def advance_time(self, *unused_args): <NEW_LINE> <INDENT> self.time += 10 <NEW_LINE> <DEDENT> def test_choose_instance_first_can_accept(self): <NEW_LINE> <INDENT> instance1 = self.Instance(True) <NEW_LINE> instance2 = self.Instance(True) <NEW_LINE> self.servr._instances = [instance1, instance2] <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(instance1, self.servr._choose_instance(1)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_first_cannot_accept(self): <NEW_LINE> <INDENT> instance1 = self.Instance(False) <NEW_LINE> instance2 = self.Instance(True) <NEW_LINE> self.servr._instances = [instance1, instance2] <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(instance2, self.servr._choose_instance(1)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_none_can_accept(self): <NEW_LINE> <INDENT> instance1 = self.Instance(False) <NEW_LINE> instance2 = self.Instance(False) <NEW_LINE> self.servr._instance_running = [True, True] <NEW_LINE> self.servr._instances = [instance1, instance2] <NEW_LINE> self.servr._start_any_instance().AndReturn(None) <NEW_LINE> self.servr._condition.wait(1).WithSideEffects(self.advance_time) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(None, self.servr._choose_instance(1)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_start_an_instance(self): <NEW_LINE> <INDENT> instance1 = self.Instance(False) <NEW_LINE> instance2 = self.Instance(False) <NEW_LINE> mock_instance = self.mox.CreateMock(instance.Instance) <NEW_LINE> self.servr._instances = [instance1, instance2] <NEW_LINE> self.servr._instance_running = [True, False] <NEW_LINE> self.servr._start_any_instance().AndReturn(mock_instance) <NEW_LINE> mock_instance.wait(1) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(mock_instance, self.servr._choose_instance(1)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_no_instances(self): <NEW_LINE> <INDENT> self.servr._start_any_instance().AndReturn(None) <NEW_LINE> self.servr._condition.wait(1).WithSideEffects(self.advance_time) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(None, self.servr._choose_instance(1)) <NEW_LINE> self.mox.VerifyAll() | Tests for module.BasicScalingModule._choose_instance. | 625990633539df3088ecd9a8 |
class UnsuccessfulGetRequest(Exception): <NEW_LINE> <INDENT> def __init__(self, msg=None): <NEW_LINE> <INDENT> super().__init__(msg) | Exception to raise if a GET request to the API fails | 625990634f88993c371f10a4 |
class GdbProcess: <NEW_LINE> <INDENT> def __init__(self, env, cwd, testnum, path_to_exec, args_to_exec='', gdb_options='', timeout=15): <NEW_LINE> <INDENT> self._process = None <NEW_LINE> self._env = env <NEW_LINE> self.cwd = cwd <NEW_LINE> self.path_to_exec = path_to_exec <NEW_LINE> self.args_to_exec = args_to_exec <NEW_LINE> self.gdb_options = gdb_options <NEW_LINE> self.timeout = timeout <NEW_LINE> self.commands = [] <NEW_LINE> for command in DEFAULT_START_COMMANDS: <NEW_LINE> <INDENT> self.add_command(command) <NEW_LINE> <DEDENT> log_name = 'python_gdb{}.log'.format(testnum) <NEW_LINE> self.log_file = path.join(cwd, log_name) <NEW_LINE> logging.basicConfig(filename=self.log_file, level=logging.INFO, format='%(levelname)s:%(message)s') <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> tmp = self._env.copy() <NEW_LINE> futils.add_env_common(tmp, os.environ.copy()) <NEW_LINE> self.file_gdb = tempfile.NamedTemporaryFile(mode='r+') <NEW_LINE> self.file_gdb.writelines('%s' % command for command in self.commands) <NEW_LINE> self.file_gdb.seek(0) <NEW_LINE> run_list = self._prepare_args() <NEW_LINE> try: <NEW_LINE> <INDENT> self._process = sp.run(args=run_list, env=tmp, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE, universal_newlines=True, timeout=self.timeout) <NEW_LINE> <DEDENT> except sp.TimeoutExpired: <NEW_LINE> <INDENT> self._close() <NEW_LINE> raise <NEW_LINE> <DEDENT> logging.info(self._process.stdout) <NEW_LINE> logging.error(self._process.stderr) <NEW_LINE> self.validate_gdb() <NEW_LINE> self._close() <NEW_LINE> <DEDENT> def add_command(self, command): <NEW_LINE> <INDENT> if not command.endswith('\n'): <NEW_LINE> <INDENT> command = ''.join([command, '\n']) <NEW_LINE> <DEDENT> self.commands.append(command) <NEW_LINE> <DEDENT> def validate_gdb(self): <NEW_LINE> <INDENT> if path.isfile(self.log_file + '.match'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._process.stderr: <NEW_LINE> <INDENT> raise futils.Fail('Gdb validation failed') <NEW_LINE> <DEDENT> <DEDENT> def _prepare_args(self): <NEW_LINE> <INDENT> gdb_command = ''.join(['--command=', self.file_gdb.name]) <NEW_LINE> args_str = ' '.join(['gdb', self.gdb_options, gdb_command, self.file_gdb.name, '--args', self.path_to_exec, self.args_to_exec]) <NEW_LINE> args = shlex.split(args_str) <NEW_LINE> return args <NEW_LINE> <DEDENT> def _close(self): <NEW_LINE> <INDENT> for handler in logging.root.handlers[:]: <NEW_LINE> <INDENT> handler.close <NEW_LINE> logging.root.removeHandler(handler) <NEW_LINE> <DEDENT> self.file_gdb.close() | Class for invoking gdb from python program
It writes and executes commands from a temporary gdb command file.
Attributes:
_process (CompletedProcess): finished process returned from run().
_env (dict): environment variables | 625990633cc13d1c6d466e4d |
class MaskedActionsMLP(DistributionalQModel, TFModelV2): <NEW_LINE> <INDENT> def __init__(self, obs_space, action_space, num_outputs, model_config, name, **kwargs): <NEW_LINE> <INDENT> super().__init__(obs_space, action_space, num_outputs, model_config, name, **kwargs) <NEW_LINE> orig_space = obs_space.original_space['board'] <NEW_LINE> flat_obs_space = spaces.Box(low=np.min(orig_space.low), high=np.max(orig_space.high), shape=(np.prod(orig_space.shape),)) <NEW_LINE> self.mlp = FullyConnectedNetwork(flat_obs_space, action_space, num_outputs, model_config, name) <NEW_LINE> self.register_variables(self.mlp.variables()) <NEW_LINE> <DEDENT> def forward(self, input_dict, state, seq_lens): <NEW_LINE> <INDENT> action_mask = tf.maximum(tf.log(input_dict['obs']['action_mask']), tf.float32.min) <NEW_LINE> model_out, _ = self.mlp({'obs': flatten(input_dict['obs']['board'])}) <NEW_LINE> return action_mask + model_out, state <NEW_LINE> <DEDENT> def value_function(self): <NEW_LINE> <INDENT> return self.mlp.value_function() | Tensorflow model that supports policy gradient and DQN policies. | 6259906316aa5153ce401be7 |
class UnexpectedBasisException(Exception): <NEW_LINE> <INDENT> pass | A SkipList Exception to be used when basis can't be computed. | 625990634f6381625f19a029 |
class ServiceWebhooks(BaseModel): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'service_webhooks' <NEW_LINE> <DEDENT> service_id = models.CharField(max_length=32, help_text=u"组件id") <NEW_LINE> state = models.BooleanField(default=False, help_text=u"状态(开启,关闭)") <NEW_LINE> webhooks_type = models.CharField( max_length=128, help_text=u"webhooks类型(image_webhooks, code_webhooks, api_webhooks)") <NEW_LINE> deploy_keyword = models.CharField(max_length=128, default='deploy', help_text=u"触发自动部署关键字") <NEW_LINE> trigger = models.CharField(max_length=256, default='', help_text=u"触发正则表达式") | 组件的自动部署属性 | 62599063e64d504609df9f53 |
class Children(NodeSet): <NEW_LINE> <INDENT> def __init__(self, ns): <NEW_LINE> <INDENT> self.__dict__.update(ns.__dict__) <NEW_LINE> self.label = 'CHILDREN-OF-%s' % ns.label <NEW_LINE> self.xpath = ns.xpath + '[1]/*' | Gets children of the node set | 6259906321bff66bcd724370 |
class ReciprocalHyperbolicFunction(HyperbolicFunction): <NEW_LINE> <INDENT> _reciprocal_of = None <NEW_LINE> _is_even = None <NEW_LINE> _is_odd = None <NEW_LINE> @classmethod <NEW_LINE> def eval(cls, arg): <NEW_LINE> <INDENT> if arg.could_extract_minus_sign(): <NEW_LINE> <INDENT> if cls._is_even: <NEW_LINE> <INDENT> return cls(-arg) <NEW_LINE> <DEDENT> if cls._is_odd: <NEW_LINE> <INDENT> return -cls(-arg) <NEW_LINE> <DEDENT> <DEDENT> t = cls._reciprocal_of.eval(arg) <NEW_LINE> if hasattr(arg, 'inverse') and arg.inverse() == cls: <NEW_LINE> <INDENT> return arg.args[0] <NEW_LINE> <DEDENT> return 1/t if t is not None else t <NEW_LINE> <DEDENT> def _call_reciprocal(self, method_name, *args, **kwargs): <NEW_LINE> <INDENT> o = self._reciprocal_of(self.args[0]) <NEW_LINE> return getattr(o, method_name)(*args, **kwargs) <NEW_LINE> <DEDENT> def _calculate_reciprocal(self, method_name, *args, **kwargs): <NEW_LINE> <INDENT> t = self._call_reciprocal(method_name, *args, **kwargs) <NEW_LINE> return 1/t if t is not None else t <NEW_LINE> <DEDENT> def _rewrite_reciprocal(self, method_name, arg): <NEW_LINE> <INDENT> t = self._call_reciprocal(method_name, arg) <NEW_LINE> if t is not None and t != self._reciprocal_of(arg): <NEW_LINE> <INDENT> return 1/t <NEW_LINE> <DEDENT> <DEDENT> def _eval_rewrite_as_exp(self, arg, **kwargs): <NEW_LINE> <INDENT> return self._rewrite_reciprocal("_eval_rewrite_as_exp", arg) <NEW_LINE> <DEDENT> def _eval_rewrite_as_tractable(self, arg, **kwargs): <NEW_LINE> <INDENT> return self._rewrite_reciprocal("_eval_rewrite_as_tractable", arg) <NEW_LINE> <DEDENT> def _eval_rewrite_as_tanh(self, arg, **kwargs): <NEW_LINE> <INDENT> return self._rewrite_reciprocal("_eval_rewrite_as_tanh", arg) <NEW_LINE> <DEDENT> def _eval_rewrite_as_coth(self, arg, **kwargs): <NEW_LINE> <INDENT> return self._rewrite_reciprocal("_eval_rewrite_as_coth", arg) <NEW_LINE> <DEDENT> def as_real_imag(self, deep = True, **hints): <NEW_LINE> <INDENT> return (1 / self._reciprocal_of(self.args[0])).as_real_imag(deep, **hints) <NEW_LINE> <DEDENT> def _eval_conjugate(self): <NEW_LINE> <INDENT> return self.func(self.args[0].conjugate()) <NEW_LINE> <DEDENT> def _eval_expand_complex(self, deep=True, **hints): <NEW_LINE> <INDENT> re_part, im_part = self.as_real_imag(deep=True, **hints) <NEW_LINE> return re_part + S.ImaginaryUnit*im_part <NEW_LINE> <DEDENT> def _eval_as_leading_term(self, x): <NEW_LINE> <INDENT> return (1/self._reciprocal_of(self.args[0]))._eval_as_leading_term(x) <NEW_LINE> <DEDENT> def _eval_is_extended_real(self): <NEW_LINE> <INDENT> return self._reciprocal_of(self.args[0]).is_extended_real <NEW_LINE> <DEDENT> def _eval_is_finite(self): <NEW_LINE> <INDENT> return (1/self._reciprocal_of(self.args[0])).is_finite | Base class for reciprocal functions of hyperbolic functions. | 62599063627d3e7fe0e08596 |
class SVRExperimentConfiguration(ec.ExperimentConfiguration): <NEW_LINE> <INDENT> def __init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix): <NEW_LINE> <INDENT> super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix) <NEW_LINE> self.technique = ec.Technique.SVR <NEW_LINE> self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) <NEW_LINE> <DEDENT> def _compute_signature(self, prefix): <NEW_LINE> <INDENT> signature = prefix.copy() <NEW_LINE> signature.append("C_" + str(self._hyperparameters['C'])) <NEW_LINE> signature.append("epsilon_" + str(self._hyperparameters['epsilon'])) <NEW_LINE> signature.append("gamma_" + str(self._hyperparameters['gamma'])) <NEW_LINE> signature.append("kernel_" + str(self._hyperparameters['kernel'])) <NEW_LINE> signature.append("degree_" + str(self._hyperparameters['degree'])) <NEW_LINE> return signature <NEW_LINE> <DEDENT> def _train(self): <NEW_LINE> <INDENT> self._logger.debug("Building model for %s", self._signature) <NEW_LINE> assert self._regression_inputs <NEW_LINE> xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split["training"]) <NEW_LINE> self._regressor.fit(xdata, ydata) <NEW_LINE> self._logger.debug("Model built") <NEW_LINE> <DEDENT> def compute_estimations(self, rows): <NEW_LINE> <INDENT> xdata, _ = self._regression_inputs.get_xy_data(rows) <NEW_LINE> return self._regressor.predict(xdata) | Class representing a single experiment configuration for linear regression
Attributes
----------
_linear_regression : LinearRegression
The actual scikt object which performs the linear regression
Methods
-------
_train()
Performs the actual building of the linear model
compute_estimations()
Compute the estimated values for a give set of data | 62599063e76e3b2f99fda10b |
class File(object): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'File({abs}, {root})'.format(abs=repr(self.absolute), root=repr(self.root)) <NEW_LINE> <DEDENT> def __init__(self, abs_path, root): <NEW_LINE> <INDENT> if not abs_path.startswith(root): <NEW_LINE> <INDENT> raise ValueError('file abspath outside of root') <NEW_LINE> <DEDENT> self.root = root <NEW_LINE> self.absolute = abs_path <NEW_LINE> self.meta = FileMeta.for_path(abs_path, root) <NEW_LINE> <DEDENT> @property <NEW_LINE> def basename(self): <NEW_LINE> <INDENT> return os.path.basename(self.absolute) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_directory(self): <NEW_LINE> <INDENT> return os.path.isdir(self.absolute) <NEW_LINE> <DEDENT> @property <NEW_LINE> @ensure_stat <NEW_LINE> def mtime(self): <NEW_LINE> <INDENT> return self.stat.st_mtime <NEW_LINE> <DEDENT> @property <NEW_LINE> @ensure_stat <NEW_LINE> def last_modified(self): <NEW_LINE> <INDENT> return time.ctime(self.mtime) <NEW_LINE> <DEDENT> @property <NEW_LINE> @ensure_stat <NEW_LINE> def bytes(self): <NEW_LINE> <INDENT> return float(self.stat.st_size) <NEW_LINE> <DEDENT> @property <NEW_LINE> @ensure_stat <NEW_LINE> def size(self): <NEW_LINE> <INDENT> num = float(self.stat.st_size) <NEW_LINE> for x in ['bytes','KB','MB','GB']: <NEW_LINE> <INDENT> if -1024.0 < num < 1024.0: <NEW_LINE> <INDENT> return "%3.1f %s" % (num, x) <NEW_LINE> <DEDENT> num /= 1024.0 <NEW_LINE> <DEDENT> return "%3.1f%s" % (num, 'TB') <NEW_LINE> <DEDENT> @property <NEW_LINE> def relative(self): <NEW_LINE> <INDENT> return self.absolute[len(self.root):] <NEW_LINE> <DEDENT> @property <NEW_LINE> def file_url(self): <NEW_LINE> <INDENT> return '/files/' + self.relative <NEW_LINE> <DEDENT> @property <NEW_LINE> def thumb_url(self): <NEW_LINE> <INDENT> _, ext = os.path.splitext(self.absolute) <NEW_LINE> if ext in TEXT_EXT: <NEW_LINE> <INDENT> return '/static/icons/text.png' <NEW_LINE> <DEDENT> if ext in VID_EXT: <NEW_LINE> <INDENT> return '/static/icons/vid.png' <NEW_LINE> <DEDENT> if ext in IMAGE_EXT: <NEW_LINE> <INDENT> return '/static/icons/vid.png' <NEW_LINE> <DEDENT> return '/static/icons/generic.png' | Easily get properties for files
there was probably a library class like this already; things got out of
hand. | 6259906367a9b606de547628 |
class InvalidKeySignature(Exception): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Exception.__init__(self) | Exception to raise when we encounter an invalid key signature. | 625990639c8ee82313040d0e |
class Formula(object): <NEW_LINE> <INDENT> u <NEW_LINE> col_ind = '[A-Z]+' <NEW_LINE> row_ind = '[1-9][0-9]*' <NEW_LINE> cell_coord = '[$]?'.join(['', col_ind, row_ind]) <NEW_LINE> col_re = re.compile(col_ind) <NEW_LINE> row_re = re.compile(row_ind) <NEW_LINE> cell_coordinates_re = re.compile(cell_coord) <NEW_LINE> _cache = {} <NEW_LINE> @classmethod <NEW_LINE> def get_instance(cls, formula): <NEW_LINE> <INDENT> return Formula._cache.get(formula) or Formula(formula) <NEW_LINE> <DEDENT> def __init__(self, formula): <NEW_LINE> <INDENT> self.formula = formula <NEW_LINE> self.first = True <NEW_LINE> self._last_row = 0 <NEW_LINE> self._last_column = 'A' <NEW_LINE> if formula not in Formula._cache: <NEW_LINE> <INDENT> Formula._cache[formula] = self <NEW_LINE> <DEDENT> <DEDENT> def _get_cell_coord(self): <NEW_LINE> <INDENT> iterator = self.cell_coordinates_re.finditer(self.formula) <NEW_LINE> for i in iterator: <NEW_LINE> <INDENT> yield i.start(), i.end() <NEW_LINE> <DEDENT> <DEDENT> def _change_analyze(self, cell_coord): <NEW_LINE> <INDENT> column = re.compile('[$]?[A-Z]+') <NEW_LINE> row = re.compile('[$]?[1-9][0-9]*') <NEW_LINE> change_column = change_row = True <NEW_LINE> if column.search(cell_coord).group()[0] == '$': <NEW_LINE> <INDENT> change_column = False <NEW_LINE> <DEDENT> if row.search(cell_coord).group()[0] == '$': <NEW_LINE> <INDENT> change_row = False <NEW_LINE> <DEDENT> return change_column, change_row <NEW_LINE> <DEDENT> def _get_next_index(self, cell_coord, may_change, diff, is_column): <NEW_LINE> <INDENT> reg_exp = self.col_re if is_column else self.row_re <NEW_LINE> index = reg_exp.search(cell_coord).group() <NEW_LINE> if may_change: <NEW_LINE> <INDENT> if is_column: <NEW_LINE> <INDENT> index = ColumnHelper.add(index, diff) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> index = str(int(index) + diff) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> index = ''.join(['$', index]) <NEW_LINE> <DEDENT> return index <NEW_LINE> <DEDENT> def get_next_formula(self, row, column): <NEW_LINE> <INDENT> diff_row = row - self._last_row <NEW_LINE> self._last_row = row <NEW_LINE> diff_column = ColumnHelper.difference(column, self._last_column) <NEW_LINE> self._last_column = column <NEW_LINE> if not self.first: <NEW_LINE> <INDENT> formula = self.formula <NEW_LINE> for start, end in self._get_cell_coord(): <NEW_LINE> <INDENT> cell_coord = self.formula[start:end] <NEW_LINE> change_column, change_row = self._change_analyze(cell_coord) <NEW_LINE> column_index = self._get_next_index(cell_coord, change_column, diff_column, is_column=True) <NEW_LINE> row_index = self._get_next_index(cell_coord, change_row, diff_row, is_column=False) <NEW_LINE> new_cell_coord = ''.join([column_index, row_index]) <NEW_LINE> formula = formula.replace(cell_coord, new_cell_coord) <NEW_LINE> <DEDENT> self.formula = formula <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.first = False <NEW_LINE> <DEDENT> return self.formula | Изменение формулы для последующего вывода | 62599063b7558d5895464ab4 |
class Transport(): <NEW_LINE> <INDENT> def __init__(self, que): <NEW_LINE> <INDENT> self.log = logging.getLogger("ChessLinkPyBlue") <NEW_LINE> self.que = que <NEW_LINE> self.init = True <NEW_LINE> self.is_open = False <NEW_LINE> self.log.debug("init ok") <NEW_LINE> <DEDENT> def search_board(self): <NEW_LINE> <INDENT> self.log.debug("searching for boards") <NEW_LINE> return None <NEW_LINE> <DEDENT> def test_board(self, address): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def open_mt(self, address): <NEW_LINE> <INDENT> self.log.debug(f"open_mt {address}") <NEW_LINE> return False <NEW_LINE> <DEDENT> def write_mt(self, msg): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return "chess_link_pyblue" <NEW_LINE> <DEDENT> def is_init(self): <NEW_LINE> <INDENT> return self.init | non-functional frame | 625990635166f23b2e244adf |
class CarTypeSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = CarType <NEW_LINE> fields = '__all__' | 车型序列化器 | 625990634f88993c371f10a5 |
class BetGrouping: <NEW_LINE> <INDENT> GroupBets = 'true' <NEW_LINE> IndividualBets = 'false' <NEW_LINE> Default = IndividualBets | Group betting report to return average odds and total stake on runners or all bets individually.
:var true: group bets by runner and average odds, sum stakes
:var false: return all bets individually | 625990631f5feb6acb1642f7 |
class AuthMeta(models.Model): <NEW_LINE> <INDENT> def __unicode__(self): <NEW_LINE> <INDENT> return '%s - %s' % (self.user, self.provider) <NEW_LINE> <DEDENT> user = models.ForeignKey(User) <NEW_LINE> provider = models.CharField(max_length = 200) <NEW_LINE> provider_model = models.CharField(max_length=40) <NEW_LINE> provider_id = models.IntegerField() <NEW_LINE> is_email_filled = models.BooleanField(default = False) <NEW_LINE> is_profile_modified = models.BooleanField(default = False) | Metadata for Authentication | 6259906399cbb53fe68325ef |
class Person: <NEW_LINE> <INDENT> population = 0 <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> Person.population += 1 <NEW_LINE> print("Person Instantiated.{}".format(self.name)) <NEW_LINE> <DEDENT> def say_hi(self, myname=None): <NEW_LINE> <INDENT> if not myname: <NEW_LINE> <INDENT> print("Hello {}!".format(self.name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Hello {}!".format(myname)) <NEW_LINE> <DEDENT> <DEDENT> def die(self): <NEW_LINE> <INDENT> Person.population -= 1 <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def how_many(cls): <NEW_LINE> <INDENT> print("This class has now {} instances.".format(cls.population)) <NEW_LINE> return cls.population | Represents the person class | 6259906356ac1b37e630386d |
class PageDetailView(DetailView): <NEW_LINE> <INDENT> model = Page <NEW_LINE> context_object_name = 'page' <NEW_LINE> template_name = 'pages/page_detail.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(PageDetailView, self).get_context_data(**kwargs) <NEW_LINE> if self.object.text_format == 'markdown': <NEW_LINE> <INDENT> md = markdown.Markdown(extensions=['markdown.extensions.toc']) <NEW_LINE> html = md.convert(context['page'].text) <NEW_LINE> toc = md.toc <NEW_LINE> context['page'].text = html <NEW_LINE> context['page'].toc = toc <NEW_LINE> <DEDENT> return context | simple static view.
can render markdown or reStructured text | 625990637d43ff2487427f96 |
class MockODBCCursor(mock.Mock): <NEW_LINE> <INDENT> def __init__(self, existing_update_ids): <NEW_LINE> <INDENT> super(MockODBCCursor, self).__init__() <NEW_LINE> self.existing = existing_update_ids <NEW_LINE> <DEDENT> def execute(self, query, params): <NEW_LINE> <INDENT> if query.startswith('SELECT 1 FROM table_updates'): <NEW_LINE> <INDENT> self.fetchone_result = (1, ) if params[0] in self.existing else None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fetchone_result = None <NEW_LINE> <DEDENT> <DEDENT> def fetchone(self): <NEW_LINE> <INDENT> return self.fetchone_result | Keeps state to simulate executing SELECT queries and fetching results. | 6259906366673b3332c31b09 |
class Testcase_210_50_port_administratively_down(base_tests.SimpleDataPlane): <NEW_LINE> <INDENT> def tearDown(self): <NEW_LINE> <INDENT> in_port, out_port = openflow_ports(2) <NEW_LINE> request = ofp.message.port_desc_stats_request() <NEW_LINE> port_stats = get_stats(self, req = request) <NEW_LINE> port_config_set(self.controller, port_no=out_port, config=0, mask = 0) <NEW_LINE> logging.info("Set up port %d ", out_port) <NEW_LINE> reply, _ = self.controller.poll(exp_msg=ofp.OFPT_ERROR, timeout=3) <NEW_LINE> self.assertIsNone(reply, "Switch generated an error when setting up the port") <NEW_LINE> sleep(2) <NEW_LINE> self.controller.clear_queue() <NEW_LINE> base_tests.SimpleDataPlane.tearDown(self) <NEW_LINE> <DEDENT> @wireshark_capture <NEW_LINE> def runTest(self): <NEW_LINE> <INDENT> logging.info("Running testcase 210.50 port administrtively down") <NEW_LINE> in_port, out_port = openflow_ports(2) <NEW_LINE> actions = [ofp.action.output(ofp.OFPP_CONTROLLER)] <NEW_LINE> actions_out = [ofp.action.output(out_port)] <NEW_LINE> pkt = simple_tcp_packet() <NEW_LINE> delete_all_flows(self.controller) <NEW_LINE> logging.info("Inserting flow") <NEW_LINE> request = ofp.message.flow_add( table_id=test_param_get("table", 0), instructions=[ ofp.instruction.apply_actions(actions)], buffer_id=ofp.OFP_NO_BUFFER, priority=0) <NEW_LINE> self.controller.message_send(request) <NEW_LINE> logging.info("Inserting a table miss flow to forward packet to controller") <NEW_LINE> reply, _ = self.controller.poll(exp_msg=ofp.OFPT_ERROR, timeout=3) <NEW_LINE> self.assertIsNone(reply, "Switch generated an error when inserting flow") <NEW_LINE> do_barrier(self.controller) <NEW_LINE> request = ofp.message.port_desc_stats_request() <NEW_LINE> port_stats = get_stats(self, req = request) <NEW_LINE> port_config_set(self.controller, port_no=out_port, config=ofp.OFPPC_PORT_DOWN, mask = ofp.OFPPC_PORT_DOWN) <NEW_LINE> logging.info("Set down port %d ", out_port) <NEW_LINE> reply, _ = self.controller.poll(exp_msg=ofp.OFPT_ERROR, timeout=3) <NEW_LINE> self.assertIsNone(reply, "Switch generated an error when setting down the port") <NEW_LINE> self.dataplane.send(out_port, str(pkt)) <NEW_LINE> verify_no_packet_in(self, str(pkt), out_port) <NEW_LINE> request = ofp.message.packet_out(in_port = ofp.OFPP_CONTROLLER, data = str(pkt), buffer_id = ofp.OFP_NO_BUFFER, actions = actions_out) <NEW_LINE> self.controller.message_send(request) <NEW_LINE> verify_no_packet(self, str(pkt), out_port) | Purpose
Verify a port status change message is received, and the bitmap reflects the change in the port config.
Methodology
Configure and connect DUT to controller. After control channel establishment, install a table_miss flow entry to generate ofp_packet_in messages. Send an ofp_port_mod message that sets the all configuration bits to zero except OFPPC_PORT_DOWN, for a data plane port X. Verify that the port config bits are correctly set. Send traffic on data plane port X. Verify no ofp_packet_in message is received. Send an ofp_packet_out message with an output action to port X. Verify no traffic is forwarded. | 62599063be8e80087fbc0794 |
class VertAnimSprite(AnimSprite): <NEW_LINE> <INDENT> def __init__(self, image, init_pos, speed): <NEW_LINE> <INDENT> AnimSprite.__init__(self, image, init_pos, speed) <NEW_LINE> self.limit = self.image.get_width() <NEW_LINE> <DEDENT> def update(self, time_pass_sec): <NEW_LINE> <INDENT> AnimSprite.update(self, time_pass_sec) <NEW_LINE> self.rect.move_ip(0, self.distance_moved) <NEW_LINE> if self.rect.top >= self.area.bottom: <NEW_LINE> <INDENT> self.rect.left = randint(self.area.left + self.limit, self.area.right - self.limit) <NEW_LINE> self.rect.top = self.area.top - self.area.bottom | A vertical animated sprite. | 6259906345492302aabfdbe8 |
class merge(Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Module.__init__(self, "merge", MERGE) <NEW_LINE> self.conf.addArgument({"input": Argument.Required|Argument.List|typeId.Node, "name": "files", "description": "these files will be concatenated in the order they are provided", "parameters": {"type": Parameter.Editable, "minimum": 2} }) <NEW_LINE> self.conf.addArgument({"input": Argument.Optional|Argument.Single|typeId.String, "name": "output", "description": "the name of file corresponding to the concatenation" }) <NEW_LINE> self.conf.addArgument({"input": Argument.Optional|Argument.Single|typeId.Node, "name": "parent", "description": "parent of the resulting output file (default will be basefile)" }) <NEW_LINE> self.tags = "Node" | This module concatenates two or more files. | 625990634f6381625f19a02a |
class MagnetostaticFields(FieldDiagnostic): <NEW_LINE> <INDENT> def gatherfields(self): <NEW_LINE> <INDENT> if self.lparallel == 1: <NEW_LINE> <INDENT> self.bfield = self.solver.getb(bcast=0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bfield = [] <NEW_LINE> for dim in ['x', 'y', 'z']: <NEW_LINE> <INDENT> self.bfield.append(getb(comp=dim, bcast=0)) <NEW_LINE> <DEDENT> self.bfield = np.array(self.bfield) <NEW_LINE> <DEDENT> <DEDENT> def gathervectorpotential(self): <NEW_LINE> <INDENT> if self.lparallel == 1: <NEW_LINE> <INDENT> self.a = self.solver.geta() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.a = [] <NEW_LINE> for dim in ['x', 'y', 'z']: <NEW_LINE> <INDENT> self.a.append(geta(comp=dim)) <NEW_LINE> <DEDENT> self.a = np.array(self.a) <NEW_LINE> <DEDENT> <DEDENT> def write(self): <NEW_LINE> <INDENT> if not self.write_dir: <NEW_LINE> <INDENT> write_dir = 'diags/fields/magnetic' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> write_dir = self.write_dir <NEW_LINE> <DEDENT> if not super(MagnetostaticFields, self).write(write_dir): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.gatherfields() <NEW_LINE> self.gathervectorpotential() <NEW_LINE> if self.lparallel == 0 or self.comm_world.rank == 0: <NEW_LINE> <INDENT> self.writeDataset(self.bfield, prefix='%s%sB' % (self.basePath, self.meshPath)) <NEW_LINE> self.writeDataset(self.a, prefix='%s%svector_potential' % (self.basePath, self.meshPath)) <NEW_LINE> <DEDENT> self.file.close() | Produce an HDF5 file with magnetic fields and vector potential.
File tree:
/data/meshes/
/mesh
/x
/y
/z
Note that the coordinates will be replaced as appropriate for different
solver geometries (e.g. xyz -> rtz for RZgeom).
/vector_potential
/x
/y
/z
/B
/x
/y
/z | 625990638e7ae83300eea79b |
class ApproximateQAgent(PacmanQAgent): <NEW_LINE> <INDENT> def __init__(self, extractor='IdentityExtractor', **args): <NEW_LINE> <INDENT> self.featExtractor = util.lookup(extractor, globals())() <NEW_LINE> PacmanQAgent.__init__(self, **args) <NEW_LINE> self.weights = util.Counter() <NEW_LINE> <DEDENT> def getWeights(self): <NEW_LINE> <INDENT> return self.weights <NEW_LINE> <DEDENT> def getQValue(self, state, action): <NEW_LINE> <INDENT> total = 0.0 <NEW_LINE> features = self.featExtractor.getFeatures(state,action) <NEW_LINE> for feature in features: <NEW_LINE> <INDENT> total += self.weights[feature]*features[feature] <NEW_LINE> <DEDENT> return total <NEW_LINE> <DEDENT> def update(self, state, action, nextState, reward): <NEW_LINE> <INDENT> features = self.featExtractor.getFeatures(state, action) <NEW_LINE> curvalue = self.getQValue(state, action) <NEW_LINE> maxq = -9999.9 <NEW_LINE> maxaction = None <NEW_LINE> actions = self.getLegalActions(nextState) <NEW_LINE> for act in actions: <NEW_LINE> <INDENT> val = self.getQValue(nextState,act) <NEW_LINE> if val > maxq: <NEW_LINE> <INDENT> maxq = val <NEW_LINE> maxaction = act <NEW_LINE> <DEDENT> <DEDENT> if maxaction == None: <NEW_LINE> <INDENT> diff = reward - curvalue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> diff = (reward + self.discount*maxq) - curvalue <NEW_LINE> <DEDENT> for feature in features: <NEW_LINE> <INDENT> self.weights[feature] = self.weights[feature] + self.alpha*diff*features[feature] <NEW_LINE> <DEDENT> <DEDENT> def final(self, state): <NEW_LINE> <INDENT> PacmanQAgent.final(self, state) <NEW_LINE> if self.episodesSoFar == self.numTraining: <NEW_LINE> <INDENT> pass | ApproximateQLearningAgent
You should only have to overwrite getQValue
and update. All other QLearningAgent functions
should work as is. | 62599063009cb60464d02c44 |
class TmpFileCleanup(object): <NEW_LINE> <INDENT> def __enter__(self): <NEW_LINE> <INDENT> self.tmp_files = [] <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> for file_path in self.tmp_files: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(file_path) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass | Used to clean up tmp files.
| 6259906391af0d3eaad3b536 |
class PasswordResetTokenGenerator(object): <NEW_LINE> <INDENT> def make_token(self, user): <NEW_LINE> <INDENT> return self._make_token_with_timestamp(user, self._num_days(self._today())) <NEW_LINE> <DEDENT> def check_token(self, user, token): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ts_b36, hash = token.split("-") <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ts = base36_to_int(ts_b36) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not constant_time_compare(self._make_token_with_timestamp(user, ts), token): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def _make_token_with_timestamp(self, user, timestamp): <NEW_LINE> <INDENT> ts_b36 = int_to_base36(timestamp) <NEW_LINE> key_salt = "django.contrib.auth.tokens.PasswordResetTokenGenerator" <NEW_LINE> login_timestamp = user.last_login.replace(microsecond=0, tzinfo=None) <NEW_LINE> value = (six.text_type(user.pk) + user.password + six.text_type(login_timestamp) + six.text_type(timestamp)) <NEW_LINE> hash = salted_hmac(key_salt, value).hexdigest()[::2] <NEW_LINE> return "%s-%s" % (ts_b36, hash) <NEW_LINE> <DEDENT> def _num_days(self, dt): <NEW_LINE> <INDENT> return (dt - date(2001, 1, 1)).days <NEW_LINE> <DEDENT> def _today(self): <NEW_LINE> <INDENT> return date.today() | Strategy object used to generate and check tokens for the password
reset mechanism. | 625990637d847024c075dae4 |
class RaspVariantsParser(object): <NEW_LINE> <INDENT> def __init__(self, current_time, redis_client=None, url=None): <NEW_LINE> <INDENT> self.tmp_rasp_variants = util.tree() <NEW_LINE> self.current_time = current_time <NEW_LINE> self.redis_client = redis_client <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> def set(self, rv_enable_r, rv_enddateexists_r, rv_enddate_r, mr_id_r, srv_id_r, rv_id_r, rv_dow_r, rv_startdate_r, rv_checksum_r): <NEW_LINE> <INDENT> rv_enable = int(rv_enable_r) <NEW_LINE> if not rv_enable: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> rv_enddateexists = int(rv_enddateexists_r) <NEW_LINE> if rv_enddateexists: <NEW_LINE> <INDENT> rv_enddate = time.mktime(time.strptime(rv_enddate_r, "%Y-%m-%d %H:%M:%S")) <NEW_LINE> if rv_enddate < self.current_time: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> mr_id = int(mr_id_r) <NEW_LINE> srv_id = int(srv_id_r) <NEW_LINE> rv_id = int(rv_id_r) <NEW_LINE> try: <NEW_LINE> <INDENT> rv_dow = int(rv_dow_r) & 127 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> rv_dow = 127 <NEW_LINE> <DEDENT> rv_startdate = time.mktime(time.strptime(rv_startdate_r, "%Y-%m-%d %H:%M:%S")) <NEW_LINE> if rv_startdate > self.current_time: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> rv_checksum = int(rv_checksum_r) <NEW_LINE> change = True <NEW_LINE> if self.redis_client and self.url: <NEW_LINE> <INDENT> redis_key = 'checksum:tn:%s:raspvariants:%d:%d:%d' % (self.url, mr_id, srv_id, rv_id) <NEW_LINE> redis_checksum = self.redis_client.get(redis_key) <NEW_LINE> if redis_checksum: <NEW_LINE> <INDENT> if rv_checksum == int(redis_checksum): <NEW_LINE> <INDENT> change = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> mask = 1 <NEW_LINE> for day_week in xrange(7): <NEW_LINE> <INDENT> if mask & rv_dow: <NEW_LINE> <INDENT> self.tmp_rasp_variants[mr_id][day_week][rv_startdate][(srv_id, rv_id)] = { 'mask': rv_dow, 'checksum': rv_checksum, 'change': change } <NEW_LINE> <DEDENT> mask = mask << 1 <NEW_LINE> <DEDENT> <DEDENT> def end(self): <NEW_LINE> <INDENT> self.rasp_variants = util.tree() <NEW_LINE> for mr_id in self.tmp_rasp_variants: <NEW_LINE> <INDENT> for day_week in self.tmp_rasp_variants[mr_id]: <NEW_LINE> <INDENT> rv_startdate = sorted(self.tmp_rasp_variants[mr_id][day_week].keys())[-1] <NEW_LINE> (srv_id, rv_id) = sorted(self.tmp_rasp_variants[mr_id][day_week][rv_startdate].keys())[-1] <NEW_LINE> self.rasp_variants[mr_id][(srv_id, rv_id)] = self.tmp_rasp_variants[mr_id][day_week][rv_startdate][(srv_id, rv_id)] | Парсер запроса getRaspVariants | 6259906332920d7e50bc7754 |
class AbstractSelection(object): <NEW_LINE> <INDENT> def __init__(self, mutator, crossover, repairer = None): <NEW_LINE> <INDENT> self._mutator = mutator <NEW_LINE> self._crossover = crossover <NEW_LINE> self._repairer = repairer <NEW_LINE> <DEDENT> def mutate_and_crossover(self, org_1, org_2): <NEW_LINE> <INDENT> cross_org_1, cross_org_2 = self._crossover.do_crossover(org_1, org_2) <NEW_LINE> final_org_1 = self._mutator.mutate(cross_org_1) <NEW_LINE> final_org_2 = self._mutator.mutate(cross_org_2) <NEW_LINE> if self._repairer is not None: <NEW_LINE> <INDENT> final_org_1 = self._repairer.repair(final_org_1) <NEW_LINE> final_org_2 = self._repairer.repair(final_org_2) <NEW_LINE> <DEDENT> return final_org_1, final_org_2 <NEW_LINE> <DEDENT> def select(self, population): <NEW_LINE> <INDENT> raise NotImplementedError("Derived classes must implement.") | Base class for Selector classes.
This classes provides useful functions for different selector classes
and also defines the functions that all selector classes must
implement.
This class should not be used directly, but rather should be subclassed. | 625990633539df3088ecd9ab |
class ProductLabelPrintWizard(models.TransientModel): <NEW_LINE> <INDENT> _name = 'product.label.print.wizard' <NEW_LINE> _description = 'Wisaya Cetak Label Produk' <NEW_LINE> number_of_copy = fields.Integer(string="Jumlah Rangkap", default=1) <NEW_LINE> product_label_wizard = fields.One2many('product.label.wizard', 'print_wizard', string='Products') <NEW_LINE> @api.multi <NEW_LINE> def button_export_pdf(self): <NEW_LINE> <INDENT> self.ensure_one() <NEW_LINE> return self._export() <NEW_LINE> <DEDENT> def _prepare_data(self, product): <NEW_LINE> <INDENT> self.ensure_one() <NEW_LINE> return { 'qr_code': product.qr_code, 'default_code': product.default_code, 'name': product.name, 'print_wizard': self.id, } <NEW_LINE> <DEDENT> def _export(self): <NEW_LINE> <INDENT> products = self.env["product.template"].browse(self._context['active_ids']) <NEW_LINE> for product in products: <NEW_LINE> <INDENT> data = self._prepare_data(product) <NEW_LINE> self.env['product.label.wizard'].create(data) <NEW_LINE> <DEDENT> return self.env.ref( 'toserba23.report_product_label').report_action( self) | Wisaya Cetak Label Produk. | 62599063097d151d1a2c2779 |
class _Transaction(Transaction): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def addTransaction(cls): <NEW_LINE> <INDENT> cls._threadTransactionList().append(cls()) <NEW_LINE> return cls.transaction() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def removeTransaction(cls): <NEW_LINE> <INDENT> cls._threadTransactionList().pop() <NEW_LINE> <DEDENT> def undo(self, exception): <NEW_LINE> <INDENT> if self.__handleMessage is not None: <NEW_LINE> <INDENT> if self.__message is not None: <NEW_LINE> <INDENT> self.__handleMessage(_("{0}; {1}").format(self.__message, str(exception))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__handleMessage(str(exception)) <NEW_LINE> <DEDENT> <DEDENT> undoStages = self._undoStages()[:] <NEW_LINE> undoStages.reverse() <NEW_LINE> for undoStage in undoStages: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> undoStage() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> super(_Transaction, self).__init__() <NEW_LINE> self.__handleMessage = None <NEW_LINE> self.__message = None <NEW_LINE> <DEDENT> def setMessage(self, handleMessage, message = None): <NEW_LINE> <INDENT> self.__handleMessage = handleMessage <NEW_LINE> if self.__handleMessage is None: <NEW_LINE> <INDENT> self.__message = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__message = message | Decorator-local transaction object providing actual transaction
capabilities. | 6259906392d797404e3896e4 |
class Node: <NEW_LINE> <INDENT> def __init__(self, data, next_node=None): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.next_node = next_node <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self.__data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, value): <NEW_LINE> <INDENT> if not isinstance(value, int): <NEW_LINE> <INDENT> raise TypeError("data must be an integer") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__data = value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def next_node(self): <NEW_LINE> <INDENT> return self.__next_node <NEW_LINE> <DEDENT> @next_node.setter <NEW_LINE> def next_node(self, value): <NEW_LINE> <INDENT> if value is not None or not isinstance(value, Node): <NEW_LINE> <INDENT> raise TypeError("next_node must be a Node object") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__next_node = value | Class Node singly linked | 6259906399cbb53fe68325f1 |
class Curation(models.Model): <NEW_LINE> <INDENT> curation_id = models.AutoField(primary_key=True) <NEW_LINE> TF_species = models.CharField(max_length=500) <NEW_LINE> site_species = models.CharField(max_length=500) <NEW_LINE> experimental_process = models.TextField(null=True, blank=True) <NEW_LINE> forms_complex = models.BooleanField() <NEW_LINE> complex_notes = models.TextField(null=True, blank=True) <NEW_LINE> notes = models.TextField(blank=True) <NEW_LINE> confidence = models.BooleanField() <NEW_LINE> NCBI_submission_ready = models.BooleanField() <NEW_LINE> REVISION_REASONS = ( ('genome_not_available', "No comparable genome in NCBI"), ('in_progress', "Matching genome still in progress"), ('TF_not_available', "No comparable TF protein sequence in NCBI"), ('other', "Other reason (specify in notes)"),) <NEW_LINE> requires_revision = models.CharField( max_length=20, choices=REVISION_REASONS, null=True, blank=True) <NEW_LINE> validated_by = models.ForeignKey('Curator', null=True, blank=True, related_name='validated_by') <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> last_modified = models.DateTimeField(auto_now=True) <NEW_LINE> curator = models.ForeignKey('Curator') <NEW_LINE> publication = models.ForeignKey('Publication') <NEW_LINE> TF_instances = models.ManyToManyField('TFInstance') <NEW_LINE> site_instances = models.ManyToManyField('SiteInstance', through='Curation_SiteInstance') <NEW_LINE> chip_info = models.ForeignKey('ChipInfo', null=True, blank=True) <NEW_LINE> quantitative_data_format = models.TextField(null=True, blank=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s - %s - %s, %s, %s' % (self.curation_id, self.TF.name, self.publication.title, self.publication.authors, self.publication.publication_date) <NEW_LINE> <DEDENT> @property <NEW_LINE> def TF(self): <NEW_LINE> <INDENT> return self.TF_instances.all()[0].TF <NEW_LINE> <DEDENT> @property <NEW_LINE> def TF_instance_accessions(self): <NEW_LINE> <INDENT> return [TF_instance.uniprot_accession for TF_instance in self.TF_instances.all()] <NEW_LINE> <DEDENT> def TF_function_verbose(self): <NEW_LINE> <INDENT> return dict(self.TF_FUNCTION)[self.TF_function] <NEW_LINE> <DEDENT> def TF_type_verbose(self): <NEW_LINE> <INDENT> return dict(self.TF_TYPE)[self.TF_type] <NEW_LINE> <DEDENT> def PMID(self): <NEW_LINE> <INDENT> return self.publication.pmid <NEW_LINE> <DEDENT> def external_databases(self): <NEW_LINE> <INDENT> return Curation_ExternalDatabase.objects.filter(curation=self) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Curation" | Curation model.
Contains all the details about the curation, such as reported TF and
species, followed experimental process, link to curator, publication,
etc. Also keeps some meta-information about the curation, such as whether
it requires revision, ready for NCBI submission, validation status, etc. | 625990632ae34c7f260ac7f6 |
class Context(_messages.Message): <NEW_LINE> <INDENT> rules = _messages.MessageField('ContextRule', 1, repeated=True) | `Context` defines which contexts an API requests. Example:
context: rules: - selector: "*" requested: -
google.rpc.context.ProjectContext - google.rpc.context.OriginContext
The above specifies that all methods in the API request
`google.rpc.context.ProjectContext` and `google.rpc.context.OriginContext`.
Available context types are defined in package `google.rpc.context`. This
also provides mechanism to whitelist any protobuf message extension that can
be sent in grpc metadata using \u201cx-goog-ext-<extension_id>-bin\u201d and \u201cx-goog-
ext-<extension_id>-jspb\u201d format. For example, list any service specific
protobuf types that can appear in grpc metadata as follows in your yaml
file: Example: context: rules: - selector:
"google.example.library.v1.LibraryService.CreateBook"
allowed_request_extensions: - google.foo.v1.NewExtension
allowed_response_extensions: - google.foo.v1.NewExtension You can
also specify extension ID instead of fully qualified extension name here.
Fields:
rules: A list of RPC context rules that apply to individual API methods.
**NOTE:** All service configuration rules follow "last one wins" order. | 625990638e7ae83300eea79d |
class Bluewave(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def getDefaultManager(): <NEW_LINE> <INDENT> global defaultManager <NEW_LINE> if defaultManager == None: <NEW_LINE> <INDENT> defaultManager = BluewaveManager() <NEW_LINE> <DEDENT> return defaultManager | docstring for Bluewave | 625990638a43f66fc4bf389e |
class OpWrappedCache(Operator): <NEW_LINE> <INDENT> Input = InputSlot(level=1) <NEW_LINE> innerBlockShape = InputSlot() <NEW_LINE> outerBlockShape = InputSlot() <NEW_LINE> fixAtCurrent = InputSlot(value = False) <NEW_LINE> Output = OutputSlot(level=1) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super( OpWrappedCache, self ).__init__( *args, **kwargs ) <NEW_LINE> self._innerOperator = OperatorWrapper( OpSlicedBlockedArrayCache, parent=self ) <NEW_LINE> self._innerOperator.Input.connect( self.Input ) <NEW_LINE> self._innerOperator.fixAtCurrent.connect( self.fixAtCurrent ) <NEW_LINE> self._innerOperator.innerBlockShape.connect( self.innerBlockShape ) <NEW_LINE> self._innerOperator.outerBlockShape.connect( self.outerBlockShape ) <NEW_LINE> self.Output.connect( self._innerOperator.Output ) <NEW_LINE> <DEDENT> def execute(self, slot, subindex, roi, destination): <NEW_LINE> <INDENT> assert False, "Shouldn't get here." <NEW_LINE> <DEDENT> def propagateDirty(self, slot, subindex, roi): <NEW_LINE> <INDENT> pass | This quick hack is necessary because there's not currently a way to wrap an OperatorWrapper.
We need to double-wrap the cache, so we need this operator to provide the first level of wrapping. | 625990634e4d562566373b16 |
class NotesExtraZero(models.Model): <NEW_LINE> <INDENT> another_field = models.CharField( 'Note2', null=True, blank=True, max_length=255) <NEW_LINE> book = models.ForeignKey(SortableBook, null=True, on_delete=models.SET_NULL) <NEW_LINE> my_order = models.PositiveIntegerField(blank=False, null=True) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> ordering = ('my_order', 'another_field') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return 'NotesExtraZero: {0}'.format(self.another_field) | various SortableInlineMixon modes (testing "extra" on admin.Meta) | 62599063627d3e7fe0e0859a |
class OAuth2(object): <NEW_LINE> <INDENT> def __init__(self, client_id, scopes): <NEW_LINE> <INDENT> self.client_id = client_id <NEW_LINE> self.scopes = scopes <NEW_LINE> <DEDENT> def _build_authorization_request_url( self, response_type, redirect_url, state=None ): <NEW_LINE> <INDENT> if response_type not in auth.VALID_RESPONSE_TYPES: <NEW_LINE> <INDENT> message = '{} is not a valid response type.' <NEW_LINE> raise UberIllegalState(message.format(response_type)) <NEW_LINE> <DEDENT> args = OrderedDict([ ('scope', ' '.join(self.scopes)), ('state', state), ('redirect_uri', redirect_url), ('response_type', response_type), ('client_id', self.client_id), ]) <NEW_LINE> return build_url(auth.AUTH_HOST, auth.AUTHORIZE_PATH, args) <NEW_LINE> <DEDENT> def _extract_query(self, redirect_url): <NEW_LINE> <INDENT> qs = urlparse(redirect_url) <NEW_LINE> qs = qs.fragment if isinstance(self, ImplicitGrant) else qs.query <NEW_LINE> query_params = parse_qs(qs) <NEW_LINE> query_params = {qp: query_params[qp][0] for qp in query_params} <NEW_LINE> return query_params | The parent class for all OAuth 2.0 grant types. | 62599063e76e3b2f99fda10f |
class GraspTorStruct(OrderedDict): <NEW_LINE> <INDENT> def __init__(self, tor_struct=None): <NEW_LINE> <INDENT> OrderedDict.__init__(self) <NEW_LINE> if tor_struct: <NEW_LINE> <INDENT> self.fill(tor_struct) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> outstring = "struct(" <NEW_LINE> for v in iter(self.keys()): <NEW_LINE> <INDENT> outstring += v + ": " + repr(self[v]) + ", " <NEW_LINE> <DEDENT> outstring = outstring[:-2] + ")" <NEW_LINE> return outstring <NEW_LINE> <DEDENT> def fill(self, tor_struct): <NEW_LINE> <INDENT> if _debug_: <NEW_LINE> <INDENT> print("GraspTorStruct.fill received: {:}".format(tor_struct)) <NEW_LINE> <DEDENT> for t in tor_struct[1:]: <NEW_LINE> <INDENT> self[t[0]] = GraspTorMember(t) | A container for a GraspTorStruct, that has a number of members. Members are
stored as an OrderedDict. | 625990634428ac0f6e659c43 |
class Menu: <NEW_LINE> <INDENT> fps = 0.1 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.current = 0 <NEW_LINE> self.screens = NAV_SCREENS <NEW_LINE> self.screens.update(EXIT_SCREEN) <NEW_LINE> self.screen_names = list(self.screens.keys()) <NEW_LINE> self.number_of_screens = len(self.screens) - 1 <NEW_LINE> self.selected = None <NEW_LINE> <DEDENT> def process_events(self, events): <NEW_LINE> <INDENT> for event in events: <NEW_LINE> <INDENT> if event == LEFT: <NEW_LINE> <INDENT> self.current -= 1 <NEW_LINE> <DEDENT> elif event == RIGHT: <NEW_LINE> <INDENT> self.current += 1 <NEW_LINE> <DEDENT> elif event == BUTTON: <NEW_LINE> <INDENT> self.selected = self.screen_names[self.current] <NEW_LINE> <DEDENT> <DEDENT> self.current = 0 if self.current > self.number_of_screens else self.current <NEW_LINE> self.current = self.number_of_screens if self.current < 0 else self.current <NEW_LINE> <DEDENT> def run_logic(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update_display(self): <NEW_LINE> <INDENT> return self.screens[self.screen_names[self.current]] | Navigation/ Menu system for choosing a game | 62599063f548e778e596cc99 |
class DeleteBlockOff(DeleteView): <NEW_LINE> <INDENT> model = Appointment <NEW_LINE> template_name = 'HealthApps/confirm.html' <NEW_LINE> success_url = "/calView" <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(DeleteView, self).get_context_data(**kwargs) <NEW_LINE> context['user_type'] = get_user_type(self.request.user) <NEW_LINE> return context <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> if request.POST.get('goback'): <NEW_LINE> <INDENT> url = '/updateBlockOff/' + str(self.get_object().id) <NEW_LINE> return HttpResponseRedirect(url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> CreateLogItem.li_block_off_cancel(self.request.user, self.object) <NEW_LINE> return super(DeleteView, self).post(request, *args, **kwargs) | Deletes the block off | 6259906345492302aabfdbeb |
class MonthlyGenerator(Generator): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_name(prefix, date): <NEW_LINE> <INDENT> return monthly_name(prefix, date) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_day_delta(date): <NEW_LINE> <INDENT> days_in_month = calendar.monthrange(date.year, date.month)[1] <NEW_LINE> return (days_in_month - date.day + 1) | Generator that generates monthly names. | 6259906329b78933be26ac4c |
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = get_user_model() <NEW_LINE> fields = ('email', 'password', 'name') <NEW_LINE> extra_kwargs = {'password': {'write_only': True, 'min_length': 5}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> return get_user_model().objects.create_user(**validated_data) | Serializer for the users objects | 62599063d486a94d0ba2d6d9 |
class Node(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.next = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Node({!r})'.format(self.data) | inherits from Python object class; this creates a node to be used in LinkedList | 62599063460517430c432bdc |
class MonitoredPath(object): <NEW_LINE> <INDENT> def __init__(self, path, event_mask, fsmonitor_ref=None): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.event_mask = event_mask <NEW_LINE> self.fsmonitor_ref = fsmonitor_ref <NEW_LINE> self.monitoring = False | A simple container for all metadata related to a monitored path | 625990634f88993c371f10a7 |
class SensitiveReScrubber(SensitiveStringScrubber): <NEW_LINE> <INDENT> def __init__(self, sensitive_res): <NEW_LINE> <INDENT> self.sensitive_res = [re.compile(r) for r in sensitive_res] <NEW_LINE> <DEDENT> def FindSensitiveStrings(self, text): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for sensitive_re in self.sensitive_res: <NEW_LINE> <INDENT> result.extend(m.group() for m in sensitive_re.finditer(text)) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def FilterName(self): <NEW_LINE> <INDENT> return 'SENSITIVE_RE' | Helper class to find sensitive regular expression matches. | 62599063379a373c97d9a72e |
class ImageRepository(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Reponame = None <NEW_LINE> self.Repotype = None <NEW_LINE> self.TagCount = None <NEW_LINE> self.IsPublic = None <NEW_LINE> self.IsUserFavor = None <NEW_LINE> self.IsQcloudOfficial = None <NEW_LINE> self.FavorCount = None <NEW_LINE> self.PullCount = None <NEW_LINE> self.Description = None <NEW_LINE> self.CreationTime = None <NEW_LINE> self.UpdateTime = None <NEW_LINE> self.TcrRepoInfo = None <NEW_LINE> self.TcrBindingId = None <NEW_LINE> self.ApplicationId = None <NEW_LINE> self.ApplicationName = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Reponame = params.get("Reponame") <NEW_LINE> self.Repotype = params.get("Repotype") <NEW_LINE> self.TagCount = params.get("TagCount") <NEW_LINE> self.IsPublic = params.get("IsPublic") <NEW_LINE> self.IsUserFavor = params.get("IsUserFavor") <NEW_LINE> self.IsQcloudOfficial = params.get("IsQcloudOfficial") <NEW_LINE> self.FavorCount = params.get("FavorCount") <NEW_LINE> self.PullCount = params.get("PullCount") <NEW_LINE> self.Description = params.get("Description") <NEW_LINE> self.CreationTime = params.get("CreationTime") <NEW_LINE> self.UpdateTime = params.get("UpdateTime") <NEW_LINE> if params.get("TcrRepoInfo") is not None: <NEW_LINE> <INDENT> self.TcrRepoInfo = TcrRepoInfo() <NEW_LINE> self.TcrRepoInfo._deserialize(params.get("TcrRepoInfo")) <NEW_LINE> <DEDENT> self.TcrBindingId = params.get("TcrBindingId") <NEW_LINE> self.ApplicationId = params.get("ApplicationId") <NEW_LINE> if params.get("ApplicationName") is not None: <NEW_LINE> <INDENT> self.ApplicationName = ScalableRule() <NEW_LINE> self.ApplicationName._deserialize(params.get("ApplicationName")) <NEW_LINE> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 镜像仓库
| 625990633cc13d1c6d466e53 |
class TestRiskDeliveryAddress(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testRiskDeliveryAddress(self): <NEW_LINE> <INDENT> pass | RiskDeliveryAddress unit test stubs | 62599063baa26c4b54d509b4 |
class JSONFileStorage(FileStorage): <NEW_LINE> <INDENT> def load(self): <NEW_LINE> <INDENT> content = self._load_raw_content() <NEW_LINE> return self._deserialize(content) <NEW_LINE> <DEDENT> def save(self, content): <NEW_LINE> <INDENT> content = self._serialize(content) <NEW_LINE> self._save_raw_content(content) <NEW_LINE> <DEDENT> def _serialize(self, data): <NEW_LINE> <INDENT> return json.dumps(data, sort_keys=False, indent=4, separators=(',', ': ')) <NEW_LINE> <DEDENT> def _deserialize(self, string): <NEW_LINE> <INDENT> return json.loads(string) | Utility object to save and load serialized JSON objects stored in a file. | 62599063d7e4931a7ef3d70d |
@dataclass <NEW_LINE> class CyclicLRParams(SchedulerParams): <NEW_LINE> <INDENT> base_lr: float = 0.001 <NEW_LINE> max_lr: float = 0.1 <NEW_LINE> step_size_up: int = 2000 <NEW_LINE> step_size_down: Optional[int] = None <NEW_LINE> mode: str = 'triangular' <NEW_LINE> gamma: float = 1.0 <NEW_LINE> scale_mode: str = 'cycle' <NEW_LINE> cycle_momentum: bool = True <NEW_LINE> base_momentum: float = 0.8 <NEW_LINE> max_momentum: float = 0.9 | Config for CyclicLR.
NOTE:
# `scale_fn` is not supported
It is not derived from Config as it is not a NeMo object (and in particular it doesn't need a name). | 6259906356b00c62f0fb3fdc |
class PatchManager: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ungrown_patches = [] <NEW_LINE> <DEDENT> def add_patch(self, patch): <NEW_LINE> <INDENT> self.ungrown_patches.append(patch) <NEW_LINE> <DEDENT> def remove_patch(self, patch): <NEW_LINE> <INDENT> self.ungrown_patches.remove(patch) <NEW_LINE> <DEDENT> def grow_patches(self): <NEW_LINE> <INDENT> self.ungrown_patches = random.sample(self.ungrown_patches, len(self.ungrown_patches)) <NEW_LINE> while len(self.ungrown_patches) > 0: <NEW_LINE> <INDENT> updated_ungrown_patches = [] <NEW_LINE> for patch in self.ungrown_patches: <NEW_LINE> <INDENT> patch.become_patch() <NEW_LINE> if not patch.is_grown(): <NEW_LINE> <INDENT> updated_ungrown_patches.append(patch) <NEW_LINE> <DEDENT> <DEDENT> self.ungrown_patches = updated_ungrown_patches | Class that helps defining the patches into the model.
attributes:
ungrown_patches: list of all non grown patches
methods:
grow_patches
add_patch
remove_patch | 625990637d43ff2487427f98 |
class ImageDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, file_name, length, class_num, transform=None): <NEW_LINE> <INDENT> with open(file_name) as fh: <NEW_LINE> <INDENT> self.img_and_label = fh.readlines() <NEW_LINE> <DEDENT> self.length = length <NEW_LINE> self.transform = transform <NEW_LINE> self.class_num = class_num <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.length <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> img_and_label = self.img_and_label[idx].strip() <NEW_LINE> pth, word = img_and_label.split(' ') <NEW_LINE> image = cv2.imread(pth,0) <NEW_LINE> image = cv2.pyrDown(image).astype('float32') <NEW_LINE> word = [ord(var)-97 for var in word] <NEW_LINE> label = np.zeros((self.class_num+1)).astype('float32') <NEW_LINE> for ln in word: <NEW_LINE> <INDENT> label[int(ln+1)] += 1 <NEW_LINE> <DEDENT> label[0] = len(word) <NEW_LINE> sample = {'image': torch.from_numpy(image).unsqueeze(0), 'label': torch.from_numpy(label)} <NEW_LINE> if self.transform: <NEW_LINE> <INDENT> sample = self.transform(sample) <NEW_LINE> <DEDENT> return sample | Face Landmarks dataset. | 62599063498bea3a75a59188 |
class UserBooksSessionsListSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> url = serializers.HyperlinkedIdentityField(view_name='user-session-detail', read_only=True) <NEW_LINE> user = serializers.ReadOnlyField(source='user.username') <NEW_LINE> library = serializers.ReadOnlyField(source='library.title') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = UserBookSession <NEW_LINE> fields = ('user', 'library', 'is_accepted', 'is_closed', 'created_at', 'url') | Сериализатор для получения списка сессий пользователей
с гиперссылками на их экземпляры | 6259906397e22403b383c61d |
class Timeout(socket.timeout): <NEW_LINE> <INDENT> def __init__(self, message: str) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.message = message | Wrapper for network timeouts.
This wraps both "socket.timeout" and "asyncio.TimeoutError" | 625990638e7ae83300eea79f |
class Hunk(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.startsrc = None <NEW_LINE> self.linessrc = None <NEW_LINE> self.starttgt = None <NEW_LINE> self.linestgt = None <NEW_LINE> self.invalid = False <NEW_LINE> self.hasplus = False <NEW_LINE> self.hasminus = False <NEW_LINE> self.text = [] <NEW_LINE> <DEDENT> def originalText(self): <NEW_LINE> <INDENT> return("@@ -" + str(self.startsrc) + "," + str(self.linessrc) + " +" + str(self.starttgt) + "," + str(self.linestgt) + "\n" + self.printableText()) <NEW_LINE> <DEDENT> def printableText(self): <NEW_LINE> <INDENT> printable = "" <NEW_LINE> for line in self.text: <NEW_LINE> <INDENT> printable += line <NEW_LINE> <DEDENT> return printable | Parsed hunk data container (hunk starts with @@ -R +R @@) | 6259906321bff66bcd724376 |
class TestConverter(TestCase): <NEW_LINE> <INDENT> def test_converter(self): <NEW_LINE> <INDENT> c = UpcaseConverter() <NEW_LINE> test_cases = [ ('big bad wolf', 'BIG BAD WOLF'), ('big <strong>bad</strong> wolf', 'BIG <strong>BAD</strong> WOLF'), ('big <b>bad</b> <i>wolf</i>', 'BIG <b>BAD</b> <i>WOLF</i>'), ('big %(adjective)s wolf', 'BIG %(adjective)s WOLF'), ('big %(adjective)s %(noun)s', 'BIG %(adjective)s %(noun)s'), ('<strong>big</strong> %(adjective)s %(noun)s', '<strong>BIG</strong> %(adjective)s %(noun)s'), ('The {0} barn is {1!r}.', 'THE {0} BARN IS {1!r}.'), ('<b>© 2013 edX,  </b>', '<b>© 2013 EDX,  </b>'), ] <NEW_LINE> for source, expected in test_cases: <NEW_LINE> <INDENT> result = c.convert(source) <NEW_LINE> self.assertEquals(result, expected) | Tests functionality of i18n/converter.py | 62599063d268445f2663a6e5 |
class UTC(datetime.tzinfo): <NEW_LINE> <INDENT> def __init__(self, offset=0): <NEW_LINE> <INDENT> if not -24 < offset < 24: <NEW_LINE> <INDENT> raise ValueError("offset must be greater than -24 " "and less than 24") <NEW_LINE> <DEDENT> self.offset = datetime.timedelta(hours=offset) <NEW_LINE> <DEDENT> def utcoffset(self, dt): <NEW_LINE> <INDENT> return self.offset <NEW_LINE> <DEDENT> def dst(self, dt): <NEW_LINE> <INDENT> return datetime.timedelta(0) <NEW_LINE> <DEDENT> def tzname(self, dt=None): <NEW_LINE> <INDENT> total_seconds = SECONDS_PER_DAY * self.offset.days + self.offset.seconds <NEW_LINE> if total_seconds == 0: <NEW_LINE> <INDENT> return "UTC" <NEW_LINE> <DEDENT> sign = "-" if total_seconds < 0 else "+" <NEW_LINE> total_seconds = abs(total_seconds) <NEW_LINE> components = { "sign": sign, "hours": int(total_seconds / SECONDS_PER_HOUR), "minutes": int((total_seconds % SECONDS_PER_HOUR) / SECONDS_PER_MINUTE), "seconds": total_seconds % SECONDS_PER_MINUTE, } <NEW_LINE> if components["seconds"]: <NEW_LINE> <INDENT> spec = "UTC{sign}{hours:02d}:{minutes:02d}:{seconds:02d}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> spec = "UTC{sign}{hours:02d}:{minutes:02d}" <NEW_LINE> <DEDENT> return spec.format(**components) <NEW_LINE> <DEDENT> def localize(self, dt): <NEW_LINE> <INDENT> return dt.replace(tzinfo=self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.tzname() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> total_hours = ( HOURS_PER_DAY * self.offset.days + float(self.offset.seconds) / SECONDS_PER_HOUR ) <NEW_LINE> return f"UTC({total_hours:+.8g})" if total_hours else "UTC()" | UTC timezone with optional offset. | 62599063ac7a0e7691f73bf6 |
class GroupViewSet(mixins.RetrieveModelMixin, mixins.ListModelMixin, mixins.CreateModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = Group.objects.order_by('name') <NEW_LINE> serializer_class = GroupSerializer <NEW_LINE> permission_classes = (IsAdminUser,) <NEW_LINE> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> serializer = self.get_serializer(data=request.data) <NEW_LINE> serializer.is_valid(raise_exception=True) <NEW_LINE> serializer.instance = serializer.save() <NEW_LINE> data = serializer.data <NEW_LINE> try: <NEW_LINE> <INDENT> permissions = request.data['permissions'] <NEW_LINE> for perm in permissions: <NEW_LINE> <INDENT> serializer.instance.permissions.add(perm) <NEW_LINE> <DEDENT> serializer.save() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> headers = self.get_success_headers(serializer.data) <NEW_LINE> return Response(data, status=status.HTTP_201_CREATED, headers=headers) | Returns a list of all User Groups | 625990637047854f46340ace |
class ExistingSourceError(PyfaError): <NEW_LINE> <INDENT> pass | Raised on attempt to add source with alias which
already exists. | 625990638e71fb1e983bd1dd |
class ProdConfig(Config): <NEW_LINE> <INDENT> SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL") | Production configuration child class
Args:
Config: The parent configuration class with general configuration settings | 62599063f548e778e596cc9b |
class FirstLevelSpells: <NEW_LINE> <INDENT> pass | First-level spell class
This class defines first-level spells as attributes, and their
incantations as those attributes' values. | 62599063462c4b4f79dbd118 |
class User: <NEW_LINE> <INDENT> headers = ['username', 'name', 'avatar', 'email', 'summary', 'id', 'extra'] <NEW_LINE> def __init__(self, data): <NEW_LINE> <INDENT> self.colors = ['green', 'red', 'blue', '#EFE630', '#E10D87', '#11E3D3', '#E49434'] <NEW_LINE> self.colors = iter(self.colors) <NEW_LINE> options = collections.namedtuple('options', ['tags', 'receive_invites', 'display_email', 'reputation']) <NEW_LINE> self.__dict__ = {a:b if a != 'extra' else options(*[[[tag, next(self.colors, 'blue')] for tag in b[i]] if i == 'tags' else b.get(i, 0) for i in ['tags', 'receive_invites', 'display_email', 'rep']]) if b is not None else options(*[[], ["I wish to receive developer invitations"], ["Show email on profile"]]) for a, b in zip(self.headers, data)} <NEW_LINE> <DEDENT> @property <NEW_LINE> def tag_length(self): <NEW_LINE> <INDENT> return len([i for i in self.extra.tags if i and i[0]]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def score(self): <NEW_LINE> <INDENT> score_sheet = {'summary':12, 'email':5, 'is_visible':6, 'tags':8} <NEW_LINE> return (sum(score_sheet[a] if a in ['summary', 'email'] and b else (14 if a == 'extra' and b.tags and b.display_email and b.display_email[0] == 'Show email on profile' else 6 if a == 'extra' and b.display_email and b.display_email[0] == 'Show email on profile' and not b.tags else (8 if a == 'extra' and b.tags and not b.display_email else 0)) for a, b in self.__dict__.items())/float(31))*100 <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_headers(cls): <NEW_LINE> <INDENT> return cls.headers | simple wrapper for user database listings
username text, name text, avatar text, email text, summary text, id int, extra text | 62599063435de62698e9d51a |
class DATA_OT_jet_low_res_list_hide(Operator): <NEW_LINE> <INDENT> bl_idname = "low_res_obj_list_hide.btn" <NEW_LINE> bl_label = "Show/Hide Objects" <NEW_LINE> bl_description = "Show/Hide Objects all object from the list" <NEW_LINE> hide = BoolProperty(default=False) <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> prop = context.scene.Jet.list_low_res <NEW_LINE> return len(prop.obj_list)>0 <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> prop = context.scene.Jet.list_low_res <NEW_LINE> hide_objs(prop, self.hide) <NEW_LINE> return {'FINISHED'} | Operator to show/hide objects from the list | 62599063adb09d7d5dc0bc7c |
class Task(models.Model): <NEW_LINE> <INDENT> date_created = models.DateTimeField(auto_now_add=True, blank=True, null=True) <NEW_LINE> date_done = models.DateTimeField(blank=True, null=True) <NEW_LINE> description = models.CharField('What needs to be done?', max_length=255) <NEW_LINE> is_checked = models.BooleanField(default=False) <NEW_LINE> user = models.ForeignKey(User, default=None) <NEW_LINE> objects = TaskManager() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s' % self.description | Task that needs to be done. | 6259906329b78933be26ac4d |
class MappingAction(APIView): <NEW_LINE> <INDENT> def post(self, request, format=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> action = request.data.get('action') <NEW_LINE> username = request.data.get('username') <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise APIException('An action and username are required') <NEW_LINE> <DEDENT> Fullname.objects.update_or_create(username=username, defaults={ 'username': username, 'fullname': request.data.get('fullname', '') }) <NEW_LINE> logger.debug(request.data.get('fullname')) <NEW_LINE> Mapping.objects.filter(username=username).filter(expired=False).update(expired=True) <NEW_LINE> new_mapping_serializer = MappingSerializer(data=request.data) <NEW_LINE> if new_mapping_serializer.is_valid(): <NEW_LINE> <INDENT> if action == 'logout': <NEW_LINE> <INDENT> r = new_mapping_serializer.save(expired=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_mapping_serializer.save() <NEW_LINE> <DEDENT> return Response('success') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(new_mapping_serializer.errors) | Generic API endpoint for the login/update/logout actions. | 62599063d486a94d0ba2d6db |
class Task(EndpointsModel): <NEW_LINE> <INDENT> _message_fields_schema = ('id', 'text', 'task_list_id', 'details', 'complete', 'assigned_to_email', 'created') <NEW_LINE> text = ndb.StringProperty(required=True) <NEW_LINE> task_list_id = ndb.IntegerProperty(required=True) <NEW_LINE> details = ndb.StringProperty(required=False) <NEW_LINE> complete = ndb.BooleanProperty(default=False) <NEW_LINE> assigned_to_email = ndb.StringProperty(required=False) <NEW_LINE> created = ndb.DateTimeProperty(auto_now=True) <NEW_LINE> creator = ndb.UserProperty(required=False) | Model to store a single task | 625990633539df3088ecd9b0 |
class InvalidQueryError(BaseMongoSqlException): <NEW_LINE> <INDENT> def __init__(self, err: str): <NEW_LINE> <INDENT> super(InvalidQueryError, self).__init__('Query object error: {err}'.format(err=err)) | Invalid input provided by the User | 625990637b25080760ed886a |
class ListAPIMixin(mixins.ListModelMixin): <NEW_LINE> <INDENT> exclusive_params = () <NEW_LINE> required_params = () <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return self.list(request, *args, **kwargs) <NEW_LINE> <DEDENT> def list(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.check_query(self.request.query_params) <NEW_LINE> if not kwargs.get('format', None): <NEW_LINE> <INDENT> return Response([]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(ListAPIMixin, self).list(request, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def check_query(self, params): <NEW_LINE> <INDENT> if sum([(1 if params.get(p) else 0) for p in self.exclusive_params]) > 1: <NEW_LINE> <INDENT> raise InvalidQueryError("You may only specify one of the %s parameters" % ", ".join(self.exclusive_params)) <NEW_LINE> <DEDENT> if self.required_params: <NEW_LINE> <INDENT> if sum([(1 if params.get(p) else 0) for p in self.required_params]) != 1: <NEW_LINE> <INDENT> raise InvalidQueryError("You must specify one of the %s parameters" % ", ".join(self.required_params)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def filter_before_after(self, queryset, field): <NEW_LINE> <INDENT> before = self.request.query_params.get('before') <NEW_LINE> if before: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> before = json_date_to_datetime(before) <NEW_LINE> queryset = queryset.filter(**{field + '__lte': before}) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> queryset = queryset.filter(pk=-1) <NEW_LINE> <DEDENT> <DEDENT> after = self.request.query_params.get('after') <NEW_LINE> if after: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> after = json_date_to_datetime(after) <NEW_LINE> queryset = queryset.filter(**{field + '__gte': after}) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> queryset = queryset.filter(pk=-1) <NEW_LINE> <DEDENT> <DEDENT> return queryset <NEW_LINE> <DEDENT> def paginate_queryset(self, queryset): <NEW_LINE> <INDENT> page = super(ListAPIMixin, self).paginate_queryset(queryset) <NEW_LINE> self.prepare_for_serialization(page) <NEW_LINE> return page <NEW_LINE> <DEDENT> def prepare_for_serialization(self, page): <NEW_LINE> <INDENT> pass | Mixin for any endpoint which returns a list of objects from a GET request | 6259906338b623060ffaa3da |
class GrainsTargetingTest(ShellCase): <NEW_LINE> <INDENT> def test_grains_targeting_os_running(self): <NEW_LINE> <INDENT> test_ret = ['sub_minion:', ' True', 'minion:', ' True'] <NEW_LINE> os_grain = '' <NEW_LINE> for item in self.run_salt('minion grains.get os'): <NEW_LINE> <INDENT> if item != 'minion:': <NEW_LINE> <INDENT> os_grain = item.strip() <NEW_LINE> <DEDENT> <DEDENT> ret = self.run_salt('-G \'os:{0}\' test.ping'.format(os_grain)) <NEW_LINE> self.assertEqual(sorted(ret), sorted(test_ret)) <NEW_LINE> <DEDENT> def test_grains_targeting_minion_id_running(self): <NEW_LINE> <INDENT> minion = self.run_salt('-G \'id:minion\' test.ping') <NEW_LINE> self.assertEqual(sorted(minion), sorted(['minion:', ' True'])) <NEW_LINE> sub_minion = self.run_salt('-G \'id:sub_minion\' test.ping') <NEW_LINE> self.assertEqual(sorted(sub_minion), sorted(['sub_minion:', ' True'])) <NEW_LINE> <DEDENT> def test_grains_targeting_disconnected(self): <NEW_LINE> <INDENT> test_ret = 'Minion did not return. [No response]' <NEW_LINE> key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'disconnected') <NEW_LINE> with salt.utils.files.fopen(key_file, 'a'): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ret = '' <NEW_LINE> for item in self.run_salt('-t 1 -G \'id:disconnected\' test.ping', timeout=40): <NEW_LINE> <INDENT> if item != 'disconnected:': <NEW_LINE> <INDENT> ret = item.strip() <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(ret, test_ret) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> os.unlink(key_file) | Integration tests for targeting with grains. | 625990637d43ff2487427f99 |
class ObtainJSONWebToken(CustomJSONWebTokenAPIView): <NEW_LINE> <INDENT> serializer_class = JSONWebTokenSerializer | API View that receives a POST with a user's username and password.
Returns a JSON Web Token that can be used for authenticated requests. | 62599063498bea3a75a59189 |
class EventContactUpdated(object): <NEW_LINE> <INDENT> swagger_types = { 'id': 'str', 'created': 'datetime', 'type': 'str', 'contact': 'ContactBase' } <NEW_LINE> attribute_map = { 'id': 'id', 'created': 'created', 'type': '$type', 'contact': 'contact' } <NEW_LINE> def __init__(self, id=None, created=None, type=None, contact=None): <NEW_LINE> <INDENT> self._id = None <NEW_LINE> self._created = None <NEW_LINE> self._type = None <NEW_LINE> self._contact = None <NEW_LINE> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> if created is not None: <NEW_LINE> <INDENT> self.created = created <NEW_LINE> <DEDENT> self.type = type <NEW_LINE> if contact is not None: <NEW_LINE> <INDENT> self.contact = contact <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> @property <NEW_LINE> def created(self): <NEW_LINE> <INDENT> return self._created <NEW_LINE> <DEDENT> @created.setter <NEW_LINE> def created(self, created): <NEW_LINE> <INDENT> self._created = created <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> if type is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `type`, must not be `None`") <NEW_LINE> <DEDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def contact(self): <NEW_LINE> <INDENT> return self._contact <NEW_LINE> <DEDENT> @contact.setter <NEW_LINE> def contact(self, contact): <NEW_LINE> <INDENT> self._contact = contact <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, EventContactUpdated): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599063be8e80087fbc079a |
class RT(Enum): <NEW_LINE> <INDENT> WHITESPACE = 1 <NEW_LINE> NEWLINE = 2 <NEW_LINE> ISOLATED_CONSTITUENT = 3 <NEW_LINE> CONSTITUENT = 4 <NEW_LINE> MACRO = 5 <NEW_LINE> INVALID = 6 <NEW_LINE> CLOSING = 7 <NEW_LINE> PUNCTUATION = 8 | Enum of readtable entry types. | 625990638e71fb1e983bd1de |
class ProductComment(models.Model): <NEW_LINE> <INDENT> product = models.ForeignKey('Product') <NEW_LINE> facebook_id = models.CharField(max_length=30, blank=True) <NEW_LINE> name = models.CharField(max_length=400) <NEW_LINE> thumb_url = models.URLField(verify_exists=False, blank=True) <NEW_LINE> comment = models.TextField() <NEW_LINE> created_ts = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_ts = models.DateTimeField(auto_now=True) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> objects = ProductCommentManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['created_ts'] <NEW_LINE> verbose_name = u'comment' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return '%s comment (%s)' % (self.product.name, self.created_ts) | A comment on a product. | 6259906399cbb53fe68325f6 |
class FronteggRESTClient(BaseFronteggClient[None]): <NEW_LINE> <INDENT> def __init__(self, client_id: str, api_key: str, context_callback: typing.Optional[typing.Callable[[ RequestT], FronteggContext]] = None, base_url: str = 'https://api.frontegg.com/', authentication_service_url: typing.Optional[str] = None) -> None: <NEW_LINE> <INDENT> super().__init__(client_id, api_key, context_callback) <NEW_LINE> self._base_url = base_url <NEW_LINE> if authentication_service_url: <NEW_LINE> <INDENT> self._authentication_service_url = authentication_service_url <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._authentication_service_url = urljoin( base_url, '/vendors/auth/token') <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def base_url(self) -> str: <NEW_LINE> <INDENT> return self._base_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def authentication_service_url(self) -> str: <NEW_LINE> <INDENT> return self._authentication_service_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_request(self): <NEW_LINE> <INDENT> return None | A standalone Frontegg REST client. | 62599063f7d966606f749443 |
class FUNCTION(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> start = _swig_property(_x64dbgapi.FUNCTION_start_get, _x64dbgapi.FUNCTION_start_set) <NEW_LINE> end = _swig_property(_x64dbgapi.FUNCTION_end_get, _x64dbgapi.FUNCTION_end_set) <NEW_LINE> instrcount = _swig_property(_x64dbgapi.FUNCTION_instrcount_get, _x64dbgapi.FUNCTION_instrcount_set) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> this = _x64dbgapi.new_FUNCTION() <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> __swig_destroy__ = _x64dbgapi.delete_FUNCTION <NEW_LINE> __del__ = lambda self : None; | Proxy of C++ FUNCTION class | 625990638a43f66fc4bf38a2 |
class HERAgent(GCDQNAgent): <NEW_LINE> <INDENT> def __init__(self, state_space, action_space, goal_size=None, name="DQN + HER", gamma=0.95, epsilon_min=0.01, epsilon_max=1., epsilon_decay_period=1000, epsilon_decay_delay=20, buffer_size=1000000, learning_rate=0.001, update_target_freq=100, batch_size=125, layer_1_size=250, layer_2_size=200, nb_gradient_steps=1): <NEW_LINE> <INDENT> super().__init__(state_space, action_space, goal_size=goal_size, name=name, gamma=gamma, epsilon_min=epsilon_min, epsilon_max=epsilon_max, epsilon_decay_period=epsilon_decay_period, epsilon_decay_delay=epsilon_decay_delay, buffer_size=buffer_size, learning_rate=learning_rate, update_target_freq=update_target_freq, batch_size=batch_size, layer_1_size=layer_1_size, layer_2_size=layer_2_size, nb_gradient_steps=nb_gradient_steps) <NEW_LINE> self.last_trajectory = [] <NEW_LINE> self.nb_resample_per_states = 5 <NEW_LINE> <DEDENT> def action(self, state): <NEW_LINE> <INDENT> return super().action(state) <NEW_LINE> <DEDENT> def on_episode_start(self, episode_info, episode_id): <NEW_LINE> <INDENT> res = super().on_episode_start(episode_info, episode_id) <NEW_LINE> self.last_trajectory = [] <NEW_LINE> return res <NEW_LINE> <DEDENT> def on_action_stop(self, action, new_state, reward, done): <NEW_LINE> <INDENT> self.last_trajectory.append((self.last_state, action)) <NEW_LINE> return super().on_action_stop(action, new_state, reward, done) <NEW_LINE> <DEDENT> def on_episode_stop(self): <NEW_LINE> <INDENT> if len(self.last_trajectory) <= self.nb_resample_per_states: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for state_index, (state, action) in enumerate(self.last_trajectory[:-4]): <NEW_LINE> <INDENT> new_state_index = state_index + 1 <NEW_LINE> new_state, _ = self.last_trajectory[new_state_index] <NEW_LINE> for relabelling_id in range(self.nb_resample_per_states): <NEW_LINE> <INDENT> goal_index = randrange(new_state_index, len(self.last_trajectory)) <NEW_LINE> goal, _ = self.last_trajectory[goal_index] <NEW_LINE> reward = new_state_index / goal_index <NEW_LINE> self.replay_buffer.append(state, action, reward, new_state, goal_index == new_state_index, goal[-2:]) | An agent that learn an approximated Q-Function using a neural network.
This Q-Function is used to find the best action to execute in a given state. | 625990637cff6e4e811b7159 |
class Order(ARMBaseModel): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'current_status': {'readonly': True}, 'order_history': {'readonly': True}, 'serial_number': {'readonly': True}, 'delivery_tracking_info': {'readonly': True}, 'return_tracking_info': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'contact_information': {'key': 'properties.contactInformation', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'properties.shippingAddress', 'type': 'Address'}, 'current_status': {'key': 'properties.currentStatus', 'type': 'OrderStatus'}, 'order_history': {'key': 'properties.orderHistory', 'type': '[OrderStatus]'}, 'serial_number': {'key': 'properties.serialNumber', 'type': 'str'}, 'delivery_tracking_info': {'key': 'properties.deliveryTrackingInfo', 'type': '[TrackingInfo]'}, 'return_tracking_info': {'key': 'properties.returnTrackingInfo', 'type': '[TrackingInfo]'}, 'shipment_type': {'key': 'properties.shipmentType', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(Order, self).__init__(**kwargs) <NEW_LINE> self.contact_information = kwargs.get('contact_information', None) <NEW_LINE> self.shipping_address = kwargs.get('shipping_address', None) <NEW_LINE> self.current_status = None <NEW_LINE> self.order_history = None <NEW_LINE> self.serial_number = None <NEW_LINE> self.delivery_tracking_info = None <NEW_LINE> self.return_tracking_info = None <NEW_LINE> self.shipment_type = kwargs.get('shipment_type', None) | The order details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The path ID that uniquely identifies the object.
:vartype id: str
:ivar name: The object name.
:vartype name: str
:ivar type: The hierarchical type of the object.
:vartype type: str
:param contact_information: The contact details.
:type contact_information: ~azure.mgmt.databoxedge.v2020_09_01_preview.models.ContactDetails
:param shipping_address: The shipping address.
:type shipping_address: ~azure.mgmt.databoxedge.v2020_09_01_preview.models.Address
:ivar current_status: Current status of the order.
:vartype current_status: ~azure.mgmt.databoxedge.v2020_09_01_preview.models.OrderStatus
:ivar order_history: List of status changes in the order.
:vartype order_history: list[~azure.mgmt.databoxedge.v2020_09_01_preview.models.OrderStatus]
:ivar serial_number: Serial number of the device.
:vartype serial_number: str
:ivar delivery_tracking_info: Tracking information for the package delivered to the customer
whether it has an original or a replacement device.
:vartype delivery_tracking_info:
list[~azure.mgmt.databoxedge.v2020_09_01_preview.models.TrackingInfo]
:ivar return_tracking_info: Tracking information for the package returned from the customer
whether it has an original or a replacement device.
:vartype return_tracking_info:
list[~azure.mgmt.databoxedge.v2020_09_01_preview.models.TrackingInfo]
:param shipment_type: ShipmentType of the order. Possible values include: "NotApplicable",
"ShippedToCustomer", "SelfPickup".
:type shipment_type: str or ~azure.mgmt.databoxedge.v2020_09_01_preview.models.ShipmentType | 625990637d847024c075dae9 |
class LambdaActionStep(ActionStep): <NEW_LINE> <INDENT> def __init__(self, action: Callable, compensation: Callable, **action_step_kwargs): <NEW_LINE> <INDENT> super().__init__(**action_step_kwargs) <NEW_LINE> self.__action = action <NEW_LINE> self.__compensation = compensation <NEW_LINE> <DEDENT> @property <NEW_LINE> def _action(self) -> Callable[..., Dict[any, any]]: <NEW_LINE> <INDENT> return self.__action <NEW_LINE> <DEDENT> @property <NEW_LINE> def _compensation(self) -> Callable[..., bool]: <NEW_LINE> <INDENT> return self.__compensation | The LambdaActionStep class represents an abstract action that gets an action and compensation as a callable.
The LambdaActionStep handles the logic of act and compensate with the given callables. | 625990633d592f4c4edbc5f0 |
class Splitter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Splitter, self).__init__() <NEW_LINE> <DEDENT> def __call__(self, f): <NEW_LINE> <INDENT> Parser.splitters.append(f) <NEW_LINE> return f | Decorator for a Splitter | 62599063627d3e7fe0e0859e |
class GacsIgslAnalysisJobForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = GacsIgslAnalysisJob <NEW_LINE> exclude = ['request', 'expected_runtime'] | Form used to start GACS-dev IGSL jobs | 62599063e76e3b2f99fda113 |
class CacheImpl: <NEW_LINE> <INDENT> def __init__(self, cache): <NEW_LINE> <INDENT> self.cache = cache <NEW_LINE> <DEDENT> pass_context = False <NEW_LINE> def get_or_create(self, key, creation_function, **kw): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def set(self, key, value, **kw): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get(self, key, **kw): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def invalidate(self, key, **kw): <NEW_LINE> <INDENT> raise NotImplementedError() | Provide a cache implementation for use by :class:`.Cache`. | 625990634e4d562566373b1b |
class EditPostForm(CreatePostForm): <NEW_LINE> <INDENT> pic_forms = FieldList(FormField(EditImageDataForm)) <NEW_LINE> submit = SubmitField("Update") | Post editing and updating posts. | 6259906367a9b606de54762c |
class ADUsersService(UsersService): <NEW_LINE> <INDENT> readonly_fields = ['username', 'display_name', 'password', 'email', 'phone', 'first_name', 'last_name'] <NEW_LINE> def on_fetched(self, doc): <NEW_LINE> <INDENT> super().on_fetched(doc) <NEW_LINE> for document in doc['_items']: <NEW_LINE> <INDENT> document['_readonly'] = ADUsersService.readonly_fields <NEW_LINE> <DEDENT> <DEDENT> def on_fetched_item(self, doc): <NEW_LINE> <INDENT> super().update_user_defaults(doc) <NEW_LINE> doc['_readonly'] = ADUsersService.readonly_fields | Service class for UsersResource and should be used when AD is active. | 625990636e29344779b01d63 |
class ShowMacAddressTableLearningSchema(MetaParser): <NEW_LINE> <INDENT> schema = { 'vlans': { Any(): { 'mac_learning': bool, 'vlan': Or(int, str) } } } | Schema for show mac address-table learning | 62599063460517430c432bde |
class OrderPayView(View): <NEW_LINE> <INDENT> def post(self, request): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> if not user.is_authenticated(): <NEW_LINE> <INDENT> return JsonResponse({'res': 0, 'errmsg': '用户未登录'}) <NEW_LINE> <DEDENT> order_id = request.POST.get('order_id') <NEW_LINE> if not all([order_id]): <NEW_LINE> <INDENT> return JsonResponse({'res': 1, 'errmsg': '数据不完整'}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> order = OrderInfo.objects.get(order_id=order_id, user=user, pay_method=3, order_status=1) <NEW_LINE> <DEDENT> except OrderInfo.DoesNotExist: <NEW_LINE> <INDENT> return JsonResponse({'res': 2, 'errmsg': '订单信息错误'}) <NEW_LINE> <DEDENT> alipay = AliPay( appid="2016090800464054", app_notify_url=None, app_private_key_path=os.path.join(settings.BASE_DIR, 'apps/order/app_private_key.pem'), alipay_public_key_path=os.path.join(settings.BASE_DIR, 'apps/order/alipay_public_key.pem'), sign_type="RSA2", debug=True ) <NEW_LINE> total_amount = order.total_price + order.transit_price <NEW_LINE> order_string = alipay.api_alipay_trade_page_pay( out_trade_no=order_id, total_amount=str(total_amount), subject='天天生鲜%s'%order_id, return_url=None, notify_url=None ) <NEW_LINE> pay_url = "https://openapi.alipaydev.com/gateway.do?" + order_string <NEW_LINE> return JsonResponse({'res': 3, 'pay_url': pay_url}) | 订单支付 | 6259906329b78933be26ac4e |
class TestPOSTOrderPreviewRequestTypeSubscriptions(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testPOSTOrderPreviewRequestTypeSubscriptions(self): <NEW_LINE> <INDENT> pass | POSTOrderPreviewRequestTypeSubscriptions unit test stubs | 625990633539df3088ecd9b2 |
class EventCreate(CreateView): <NEW_LINE> <INDENT> form_class = EventForm <NEW_LINE> model = Event <NEW_LINE> template_name = 'evenio/event_form.html' <NEW_LINE> context_object_name = 'event' | Creates an event
| 625990631f5feb6acb1642ff |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.