code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class RequiredIfActive(object): <NEW_LINE> <INDENT> missing_message = _('Required when active.') <NEW_LINE> def __init__(self, fields): <NEW_LINE> <INDENT> self.fields = fields <NEW_LINE> self.serializer_field = None <NEW_LINE> <DEDENT> def set_context(self, serializer): <NEW_LINE> <INDENT> self.instance = getattr(serializer, 'instance', None) <NEW_LINE> <DEDENT> def enforce_required_fields(self, attrs): <NEW_LINE> <INDENT> missing_items = { field_name: self.missing_message for field_name in self.fields if field_name not in attrs or attrs[field_name] == None or attrs[field_name] == "" } <NEW_LINE> if missing_items: <NEW_LINE> <INDENT> raise ValidationError(missing_items, code='required') <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, attrs): <NEW_LINE> <INDENT> if 'is_active' in attrs and attrs['is_active']: <NEW_LINE> <INDENT> self.enforce_required_fields(attrs) | This validator makes it easy to add required fields to an Integration
serializer that are only required when the integration `is_active`. | 625990643cc13d1c6d466e63 |
class PreWikiCloseParams(object): <NEW_LINE> <INDENT> def __init__(self, wikiroot: 'outwiker.core.tree.WikiDocument'): <NEW_LINE> <INDENT> self.wikiroot = wikiroot <NEW_LINE> self.abortClose = False | Parameters set for onPreWikiClose event | 62599064a8ecb03325872938 |
class PrivateEndpointConnection(ProxyResource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpointProperty'}, 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionStateProperty'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(PrivateEndpointConnection, self).__init__(**kwargs) <NEW_LINE> self.private_endpoint = kwargs.get('private_endpoint', None) <NEW_LINE> self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) <NEW_LINE> self.provisioning_state = None | A private endpoint connection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:param private_endpoint: Private endpoint which the connection belongs to.
:type private_endpoint: ~azure.mgmt.rdbms.mariadb.models.PrivateEndpointProperty
:param private_link_service_connection_state: Connection state of the private endpoint
connection.
:type private_link_service_connection_state:
~azure.mgmt.rdbms.mariadb.models.PrivateLinkServiceConnectionStateProperty
:ivar provisioning_state: State of the private endpoint connection.
:vartype provisioning_state: str | 625990641f5feb6acb16430b |
class MessageEntity(_MessageEntityBase): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> @staticmethod <NEW_LINE> def from_result(result): <NEW_LINE> <INDENT> if result is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return MessageEntity( type=result.get('type'), offset=result.get('offset'), length=result.get('length'), url=result.get('url'), user=User.from_result(result.get('user')) ) | This object represents a chat.
Attributes:
type (str) :Type of the entity. One of mention (@username), hashtag, bot_command, url, email, bold (bold text),
italic (italic text), code (monowidth string), pre (monowidth block), text_link (for clickable text URLs),
text_mention (for users without usernames)
offset (int) :Offset in UTF-16 code units to the start of the entity
length (int) :Length of the entity in UTF-16 code units
url (str) :*Optional.* For “text_link” only, url that will be opened after user taps on the text
user (User) :*Optional.* For “text_mention” only, the mentioned user | 625990642ae34c7f260ac808 |
class Workspace(_Workspace): <NEW_LINE> <INDENT> TASKNAME = 'refresh-pool-avoid-groups-hostnames-dispatcher' <NEW_LINE> @step <NEW_LINE> def entry(self) -> None: <NEW_LINE> <INDENT> self.handle_success('entered-task') <NEW_LINE> <DEDENT> @step <NEW_LINE> def dispatch_refresh(self) -> None: <NEW_LINE> <INDENT> for pool in self.pools: <NEW_LINE> <INDENT> if self.result: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if not isinstance(pool, BeakerDriver): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.dispatch_task( refresh_pool_avoid_groups_hostnames, pool.poolname, logger=get_pool_logger(Workspace.TASKNAME, self.logger, pool.poolname) ) <NEW_LINE> <DEDENT> <DEDENT> @step <NEW_LINE> def exit(self) -> None: <NEW_LINE> <INDENT> self.result = self.handle_success('finished-task') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create( cls, logger: gluetool.log.ContextAdapter, db: DB, session: sqlalchemy.orm.session.Session, cancel: threading.Event ) -> 'Workspace': <NEW_LINE> <INDENT> return cls(logger, session, cancel, db=db, task=Workspace.TASKNAME) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def refresh_pool_avoid_groups_hostnames_dispatcher( cls, logger: gluetool.log.ContextAdapter, db: DB, session: sqlalchemy.orm.session.Session, cancel: threading.Event ) -> DoerReturnType: <NEW_LINE> <INDENT> return cls.create(logger, db, session, cancel) .entry() .load_pools() .dispatch_refresh() .exit() .final_result | Workspace for hostname groups refresh dispatcher. | 62599064442bda511e95d8ea |
class CheckIfFileExists(Action): <NEW_LINE> <INDENT> def execute(self, context, obj): <NEW_LINE> <INDENT> connection = S3Connection() <NEW_LINE> bucket = Bucket(connection=connection, name=context['bucket']) <NEW_LINE> key = Key(bucket=bucket, name=context['name']) <NEW_LINE> if key.exists(): <NEW_LINE> <INDENT> return 'done' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'missing' | Checks if the file exists. | 625990648e71fb1e983bd1eb |
class NexellFastbootBootAction(Action): <NEW_LINE> <INDENT> def __init__(self,parameters): <NEW_LINE> <INDENT> super(NexellFastbootBootAction, self).__init__() <NEW_LINE> self.name = "nexell-boot-on-uboot" <NEW_LINE> self.summary = "attempt to boot" <NEW_LINE> self.description = "nexell boot into system" <NEW_LINE> self.command = '' <NEW_LINE> self.cmd_script = parameters['nexell_ext']['command'] <NEW_LINE> self.cmd_param = parameters['nexell_ext']['command_param'] <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> super(NexellFastbootBootAction, self).validate() <NEW_LINE> <DEDENT> def run(self, connection, args=None): <NEW_LINE> <INDENT> connection = super(NexellFastbootBootAction, self).run(connection, args) <NEW_LINE> cmd = [self.cmd_script, self.cmd_param] <NEW_LINE> command_output = self.run_command(cmd) <NEW_LINE> self.data['boot-result'] = 'success' <NEW_LINE> return connection | This action calls fastboot to boot into the system. | 625990643eb6a72ae038bd80 |
@dataclass <NEW_LINE> class TAITime: <NEW_LINE> <INDENT> seconds: int <NEW_LINE> nanos: int <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> secs = self.seconds + self.nanos / 1e9 <NEW_LINE> dt = datetime.fromtimestamp(secs, timezone.utc) <NEW_LINE> return dt.strftime('%Y-%m-%dT%H:%M:%S.%f') + "Z" <NEW_LINE> <DEDENT> def _asDict(self): <NEW_LINE> <INDENT> return asdict(self) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _fromDict(obj: dict): <NEW_LINE> <INDENT> return TAITime(**obj) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_str(timeStr: str): <NEW_LINE> <INDENT> t = parser.isoparse(timeStr).timestamp() <NEW_LINE> seconds = int(t) <NEW_LINE> nanos = int((t - seconds) * 1e9) <NEW_LINE> return TAITime(seconds, nanos) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fromSystem(): <NEW_LINE> <INDENT> t = Time.now().tai.value.timestamp() <NEW_LINE> seconds = int(t) <NEW_LINE> nanos = int((t - seconds) * 1e9) <NEW_LINE> return TAITime(seconds, nanos) | Creates a TAITime containing seconds since the epoch (1970) and the offset from seconds in nanoseconds | 62599064f7d966606f74944a |
class OffPolicyRLModel(BaseRLModel): <NEW_LINE> <INDENT> def __init__(self, policy, env, replay_buffer, verbose=0, *, requires_vec_env, policy_base, policy_kwargs=None): <NEW_LINE> <INDENT> super(OffPolicyRLModel, self).__init__(policy, env, verbose=verbose, requires_vec_env=requires_vec_env, policy_base=policy_base, policy_kwargs=policy_kwargs) <NEW_LINE> self.replay_buffer = replay_buffer <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def setup_model(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def learn(self, total_timesteps, callback=None, seed=None, log_interval=100, tb_log_name="run", reset_num_timesteps=True): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def predict(self, observation, state=None, mask=None, deterministic=False): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def action_probability(self, observation, state=None, mask=None, actions=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def save(self, save_path): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @abstractmethod <NEW_LINE> def load(cls, load_path, env=None, **kwargs): <NEW_LINE> <INDENT> pass | The base class for off policy RL model
:param policy: (BasePolicy) Policy object
:param env: (Gym environment) The environment to learn from
(if registered in Gym, can be str. Can be None for loading trained models)
:param replay_buffer: (ReplayBuffer) the type of replay buffer
:param verbose: (int) the verbosity level: 0 none, 1 training information, 2 tensorflow debug
:param requires_vec_env: (bool) Does this model require a vectorized environment
:param policy_base: (BasePolicy) the base policy used by this method | 6259906463d6d428bbee3e19 |
class TaskPolicy: <NEW_LINE> <INDENT> sensitive_list = [] <NEW_LINE> openapi_types = { 'schedule_time': 'str', 'retry_count': 'int', 'retry_interval': 'int' } <NEW_LINE> attribute_map = { 'schedule_time': 'schedule_time', 'retry_count': 'retry_count', 'retry_interval': 'retry_interval' } <NEW_LINE> def __init__(self, schedule_time=None, retry_count=None, retry_interval=None): <NEW_LINE> <INDENT> self._schedule_time = None <NEW_LINE> self._retry_count = None <NEW_LINE> self._retry_interval = None <NEW_LINE> self.discriminator = None <NEW_LINE> if schedule_time is not None: <NEW_LINE> <INDENT> self.schedule_time = schedule_time <NEW_LINE> <DEDENT> if retry_count is not None: <NEW_LINE> <INDENT> self.retry_count = retry_count <NEW_LINE> <DEDENT> if retry_interval is not None: <NEW_LINE> <INDENT> self.retry_interval = retry_interval <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def schedule_time(self): <NEW_LINE> <INDENT> return self._schedule_time <NEW_LINE> <DEDENT> @schedule_time.setter <NEW_LINE> def schedule_time(self, schedule_time): <NEW_LINE> <INDENT> self._schedule_time = schedule_time <NEW_LINE> <DEDENT> @property <NEW_LINE> def retry_count(self): <NEW_LINE> <INDENT> return self._retry_count <NEW_LINE> <DEDENT> @retry_count.setter <NEW_LINE> def retry_count(self, retry_count): <NEW_LINE> <INDENT> self._retry_count = retry_count <NEW_LINE> <DEDENT> @property <NEW_LINE> def retry_interval(self): <NEW_LINE> <INDENT> return self._retry_interval <NEW_LINE> <DEDENT> @retry_interval.setter <NEW_LINE> def retry_interval(self, retry_interval): <NEW_LINE> <INDENT> self._retry_interval = retry_interval <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if attr in self.sensitive_list: <NEW_LINE> <INDENT> result[attr] = "****" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TaskPolicy): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition. | 62599064e64d504609df9f5e |
class OidcServiceProviderViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = ServiceProvider.objects.all() <NEW_LINE> permission_classes = [IsAuthenticated] <NEW_LINE> serializer_class = OidcServiceProviderSerializer <NEW_LINE> filter_backends = [filters.SearchFilter, DjangoFilterBackend] <NEW_LINE> search_fields = ['entity_id'] <NEW_LINE> filterset_class = ServiceProviderFilter <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return get_service_provider_queryset(user=self.request.user, service_type='oidc') <NEW_LINE> <DEDENT> def perform_destroy(self, instance): <NEW_LINE> <INDENT> instance.end_at = timezone.now() <NEW_LINE> instance.save() <NEW_LINE> logger.info("ServiceProvider {service} deleted by {user}" .format(service=instance, user=self.request.user)) | API endpoint for OIDC relying partys.
list:
Returns a list of all the existing OIDC relying partys.
retrieve:
Returns the given OIDC relying party.
create:
Creates a new OIDC relying party instance.
update:
Updates the given OIDC relying party.
partial_update:
Updates the given OIDC relying party.
destroy:
Removes the given OIDC relying party. | 6259906445492302aabfdbfd |
class GenericRemotePlugin(HookBaseClass): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "Publish Plugin that runs REMOTELY" <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return "This plugin should NOT process items locally" <NEW_LINE> <DEDENT> @property <NEW_LINE> def settings(self): <NEW_LINE> <INDENT> return { "run_on_farm": { "type": "bool", "default": "True", "description": "Indicates whether this plugin should run on farm.", } } <NEW_LINE> <DEDENT> @property <NEW_LINE> def item_filters(self): <NEW_LINE> <INDENT> return ["generic.item"] <NEW_LINE> <DEDENT> def accept(self, settings, item): <NEW_LINE> <INDENT> item.local_properties.plugin_name = "remote" <NEW_LINE> publisher = self.parent <NEW_LINE> if not publisher.engine.has_ui: <NEW_LINE> <INDENT> settings["run_on_farm"].value = True <NEW_LINE> <DEDENT> return {"accepted": True} <NEW_LINE> <DEDENT> def validate(self, settings, item): <NEW_LINE> <INDENT> if "TEST_LOCAL_PROPERTIES" in os.environ: <NEW_LINE> <INDENT> if item.local_properties.plugin_name == "remote": <NEW_LINE> <INDENT> raise Exception("local_properties was serialized properly.") <NEW_LINE> <DEDENT> <DEDENT> self.logger.debug("Executing remote plugin validate.") <NEW_LINE> return True <NEW_LINE> <DEDENT> def publish(self, settings, item): <NEW_LINE> <INDENT> publisher = self.parent <NEW_LINE> run_on_farm = settings["run_on_farm"].value <NEW_LINE> if publisher.engine.has_ui and run_on_farm: <NEW_LINE> <INDENT> self.logger.debug("Skipping remote plugin execution.") <NEW_LINE> return <NEW_LINE> <DEDENT> self.logger.debug("Executing remote plugin publish.") <NEW_LINE> <DEDENT> def finalize(self, settings, item): <NEW_LINE> <INDENT> publisher = self.parent <NEW_LINE> run_on_farm = settings["run_on_farm"].value <NEW_LINE> if publisher.engine.has_ui and run_on_farm: <NEW_LINE> <INDENT> self.logger.debug("Skipping remote plugin execution.") <NEW_LINE> return <NEW_LINE> <DEDENT> self.logger.debug("Executing remote plugin finalize.") | This should NOT process the item locally... | 625990644a966d76dd5f0617 |
class NetworkDevicesGridRemote(RemoteModel): <NEW_LINE> <INDENT> properties = ("id", "DeviceID", "DeviceIPDotted", "DeviceName", "DeviceType", ) | | ``id:`` none
| ``attribute type:`` string
| ``DeviceID:`` none
| ``attribute type:`` string
| ``DeviceIPDotted:`` none
| ``attribute type:`` string
| ``DeviceName:`` none
| ``attribute type:`` string
| ``DeviceType:`` none
| ``attribute type:`` string | 62599064e5267d203ee6cf4f |
class ChapterParser(BaseParser): <NEW_LINE> <INDENT> CATEGORY = "chapter" <NEW_LINE> @classmethod <NEW_LINE> def _accepts(cls, str_element): <NEW_LINE> <INDENT> return re.match("[0-9][.][a-z][)]", str_element, re.U) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _parse(cls, chapter): <NEW_LINE> <INDENT> res = {} <NEW_LINE> token = chapter.index(")") <NEW_LINE> res["id"] = chapter[:token] <NEW_LINE> res["desc"] = chapter[token + 2:] <NEW_LINE> return res | Parser for chapter. | 6259906429b78933be26ac55 |
class PostgresqlDb(IDB): <NEW_LINE> <INDENT> __db = {} <NEW_LINE> DEBUG = False <NEW_LINE> def __init__(self, dbname, host="", username="", passwd="", multiThreaded=True, connect=True): <NEW_LINE> <INDENT> self.dbname = dbname <NEW_LINE> self.host = host <NEW_LINE> self.username = username <NEW_LINE> self.passwd = passwd <NEW_LINE> self.db = None <NEW_LINE> self.multiThreaded = multiThreaded <NEW_LINE> if connect: <NEW_LINE> <INDENT> self.connect() <NEW_LINE> <DEDENT> <DEDENT> def connect(self): <NEW_LINE> <INDENT> if self.multiThreaded: <NEW_LINE> <INDENT> self.db = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( self.dbname, self.username, self.host, self.passwd)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dbKey = (self.dbname, self.username, self.host, self.passwd) <NEW_LINE> if dbKey not in self.__db: <NEW_LINE> <INDENT> self.__db[dbKey] = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( self.dbname, self.username, self.host, self.passwd)) <NEW_LINE> <DEDENT> self.db = self.__db[dbKey] <NEW_LINE> <DEDENT> <DEDENT> def query(self, qu): <NEW_LINE> <INDENT> if type(qu) in StringTypes: <NEW_LINE> <INDENT> qu = (qu,) <NEW_LINE> <DEDENT> if PostgresqlDb.DEBUG: <NEW_LINE> <INDENT> log.debug("Query: %s" % qu) <NEW_LINE> <DEDENT> cur = self.db.cursor(cursor_factory=psycopg2.extras.DictCursor) <NEW_LINE> [cur.execute(q) for q in qu] <NEW_LINE> return cur.fetchall() <NEW_LINE> <DEDENT> def execute(self, q): <NEW_LINE> <INDENT> cur = self.db.cursor(cursor_factory=psycopg2.extras.DictCursor) <NEW_LINE> return cur.execute(q) <NEW_LINE> <DEDENT> def getCursor(self): <NEW_LINE> <INDENT> return self.db.cursor() <NEW_LINE> <DEDENT> def commit(self): <NEW_LINE> <INDENT> self.db.commit() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.db.close() | @class PostgresqlDb
provides generall database access | 62599064d7e4931a7ef3d716 |
class LinkingStage(PipelineStage): <NEW_LINE> <INDENT> def transform(self, ast, env): <NEW_LINE> <INDENT> func_env = env.translation.crnt <NEW_LINE> env.context.intrinsic_library.link(func_env.lfunc.module) <NEW_LINE> env.constants_manager.link(func_env.lfunc.module) <NEW_LINE> if func_env.link: <NEW_LINE> <INDENT> func_env.lfunc = env.llvm_context.link(func_env.lfunc) <NEW_LINE> func_env.translator.lfunc = func_env.lfunc <NEW_LINE> <DEDENT> func_env.lfunc_pointer = func_env.translator.lfunc_pointer <NEW_LINE> return ast | Link the resulting LLVM function into the global fat module. | 62599064796e427e5384fe98 |
class PTracker(object): <NEW_LINE> <INDENT> def __init__(self, user=None, password=None, token=None, ssl=True): <NEW_LINE> <INDENT> self.client = Client(ssl=ssl) <NEW_LINE> if token is None: <NEW_LINE> <INDENT> token = self._get_token_for_credentials(user, password) <NEW_LINE> <DEDENT> self.client.token = token <NEW_LINE> self._projects = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> return self.client.token <NEW_LINE> <DEDENT> @property <NEW_LINE> def projects(self): <NEW_LINE> <INDENT> if self._projects is None: <NEW_LINE> <INDENT> self._projects = ProjectManager(self.client) <NEW_LINE> <DEDENT> return self._projects <NEW_LINE> <DEDENT> def Project(self): <NEW_LINE> <INDENT> p = Project() <NEW_LINE> p.client = self.client <NEW_LINE> return p <NEW_LINE> <DEDENT> def Membership(self): <NEW_LINE> <INDENT> m = Membership() <NEW_LINE> m.person = Person() <NEW_LINE> m.client = self.client <NEW_LINE> return m <NEW_LINE> <DEDENT> def Story(self): <NEW_LINE> <INDENT> s = Story() <NEW_LINE> s.client = self.client <NEW_LINE> return s <NEW_LINE> <DEDENT> def Task(self): <NEW_LINE> <INDENT> t = Task() <NEW_LINE> t.client = self.client <NEW_LINE> return t <NEW_LINE> <DEDENT> def _get_token_for_credentials(self, user=None, password=None): <NEW_LINE> <INDENT> if user is None or password is None: <NEW_LINE> <INDENT> raise PyvotalException("Provide user AND password") <NEW_LINE> <DEDENT> tree = self.client.get('tokens/active', auth=(user, password)) <NEW_LINE> return tree.find('guid').text | Base api entry point | 625990642ae34c7f260ac809 |
class Odometry: <NEW_LINE> <INDENT> def __init__(self, line): <NEW_LINE> <INDENT> self.parse(line) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Odometry: %s %s %s %s" % (self.x, self.y, self.theta, self.ts) <NEW_LINE> <DEDENT> def parse(self, line): <NEW_LINE> <INDENT> self.x, self.y, self.theta, self.ts = map(float, line.split()[1:]) <NEW_LINE> <DEDENT> def has_changed(self): <NEW_LINE> <INDENT> prev = self.prev_odometry <NEW_LINE> if any([self.x != prev.x, self.y != prev.y, self.theta != prev.theta]): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Odometry data type. Contains x, y, theta, and timestamp data, all in
reference to the standard odometry frame. | 62599064a8370b77170f1af1 |
class Entity(BaseModel): <NEW_LINE> <INDENT> is_member = BooleanField() <NEW_LINE> name = CharField() <NEW_LINE> email = CharField(null=True) <NEW_LINE> phone = CharField(null=True) <NEW_LINE> reminder_date = DateField(null=True) <NEW_LINE> joined_date = DateField(null=True) <NEW_LINE> agreement_date = DateField(null=True) <NEW_LINE> is_keyholder = BooleanField(null=True) <NEW_LINE> token = CharField(null=True) <NEW_LINE> token_expiry = DateTimeField(null=True) <NEW_LINE> def active_member(self): <NEW_LINE> <INDENT> one_year_ago = (datetime.now() - relativedelta(years=1)).date() <NEW_LINE> if self.agreement_date <= one_year_ago: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True | An Entity sends money to the organisation or recieves money from the
organistaion. Members are a special type of entity. | 6259906499cbb53fe6832606 |
class Timeseries(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'tags': 'dict(str, str)', 'points': 'list[Point]' } <NEW_LINE> self.attribute_map = { 'tags': 'tags', 'points': 'points' } <NEW_LINE> self._tags = None <NEW_LINE> self._points = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def tags(self): <NEW_LINE> <INDENT> return self._tags <NEW_LINE> <DEDENT> @tags.setter <NEW_LINE> def tags(self, tags): <NEW_LINE> <INDENT> self._tags = tags <NEW_LINE> <DEDENT> @property <NEW_LINE> def points(self): <NEW_LINE> <INDENT> return self._points <NEW_LINE> <DEDENT> @points.setter <NEW_LINE> def points(self, points): <NEW_LINE> <INDENT> self._points = points <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906497e22403b383c62f |
class LoginPageBaseModel(page_base.PageModel): <NEW_LINE> <INDENT> username = elements.TextInput(byset=by.By.ID, locator='username') <NEW_LINE> password = elements.PasswordInput(byset=by.By.ID, locator='password') <NEW_LINE> domain = elements.Select(byset=by.By.ID, locator='profile') <NEW_LINE> login_btn = elements.Button(byset=by.By.CLASS_NAME, locator='btn-lg') <NEW_LINE> page_body = elements.PageElement(by.By.TAG_NAME, 'body') <NEW_LINE> msg_login_failed = ('Login failed. Please verify your login ' 'information or contact the system administrator.') <NEW_LINE> msg_login_failed2 = 'The user name or password is incorrect.' | Common page model for the login page.
| 625990644428ac0f6e659c54 |
class Dog(): <NEW_LINE> <INDENT> def __init__(self, name, age): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def sit(self): <NEW_LINE> <INDENT> print(self.name.title() + " is now sitting!") <NEW_LINE> <DEDENT> def roll_over(self): <NEW_LINE> <INDENT> print (self.name.title() + ' rolled over!') | A simple attempt to model a dog. | 625990648e7ae83300eea7b0 |
class GameStats(): <NEW_LINE> <INDENT> def __init__(self, ai_settings): <NEW_LINE> <INDENT> self.ai_settings = ai_settings <NEW_LINE> self.reset_stats() <NEW_LINE> self.ships_left = self.ai_settings.ship_limit <NEW_LINE> self.game_active = True <NEW_LINE> <DEDENT> def reset_stats(self): <NEW_LINE> <INDENT> self.ships_left = self.ai_settings.ship_limit | Track statistics for Alien Invasion. | 625990647d847024c075daf9 |
class TestReferenceWithUserIDLink(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testReferenceWithUserIDLink(self): <NEW_LINE> <INDENT> pass | ReferenceWithUserIDLink unit test stubs | 62599064009cb60464d02c5b |
class TreeState(BaseModel): <NEW_LINE> <INDENT> name = models.CharField(max_length=150) <NEW_LINE> question = models.ForeignKey(Question) <NEW_LINE> num_retries = models.PositiveIntegerField( blank=True, null=True, help_text="The number of tries the user has to get out of this state" ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.question.text <NEW_LINE> <DEDENT> def add_all_unique_children(self, added): <NEW_LINE> <INDENT> transitions = self.transition_set.select_related( 'next_state__question') <NEW_LINE> for transition in transitions: <NEW_LINE> <INDENT> if transition.next_state: <NEW_LINE> <INDENT> if transition.next_state not in added: <NEW_LINE> <INDENT> added.append(transition.next_state) <NEW_LINE> transition.next_state.add_all_unique_children(added) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def has_loops_below(self): <NEW_LINE> <INDENT> return TreeState.path_has_loops([self]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def path_has_loops(klass, path): <NEW_LINE> <INDENT> last_node = path[len(path) - 1] <NEW_LINE> transitions = last_node.transition_set.all() <NEW_LINE> for transition in transitions: <NEW_LINE> <INDENT> if transition.next_state: <NEW_LINE> <INDENT> if path.__contains__(transition.next_state): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> next_path = path[:] <NEW_LINE> next_path.append(transition.next_state) <NEW_LINE> if TreeState.path_has_loops(next_path): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False | A TreeState is a location in a tree. It is associated with a question and
a set of answers (transitions) that allow traversal to other states. | 625990644e4d562566373b2a |
class InvalidEndpoint(Exception): <NEW_LINE> <INDENT> pass | Raised when the provided endpoint was deemed invalid. | 62599064462c4b4f79dbd12a |
class OrderMain(Base): <NEW_LINE> <INDENT> __tablename__ = 'OrderMain' <NEW_LINE> OMid = Column(String(64), primary_key=True) <NEW_LINE> OMno = Column(String(64), nullable=False, comment='订单编号') <NEW_LINE> OPayno = Column(String(64), comment='付款流水号,与orderpay对应') <NEW_LINE> USid = Column(String(64), nullable=False, comment='用户id') <NEW_LINE> UseCoupon = Column(Boolean, default=False, comment='是否优惠券') <NEW_LINE> OMfrom = Column(Integer, default=0, comment='来源: 0: 购物车, 10: 商品详情 20: 店主权限, 30: 猜数字奖品, 40: 新人商品, 50: 帮拆礼盒, 60: 试用商品') <NEW_LINE> PBname = Column(String(32), nullable=False, comment='品牌名') <NEW_LINE> PBid = Column(String(64), nullable=False, comment='品牌id') <NEW_LINE> OMclient = Column(Integer, default=0, comment='下单设备: 0: 微信, 10: app') <NEW_LINE> OMfreight = Column(Float, default=0, comment='运费') <NEW_LINE> OMmount = Column(DECIMAL(precision=28, scale=2), nullable=False, comment='总价') <NEW_LINE> OMtrueMount = Column(DECIMAL(precision=28, scale=2), nullable=False, comment='实际总价') <NEW_LINE> OMstatus = Column(Integer, default=0, comment='订单状态 0待付款,10待发货,20待收货, 35 待评价, 30完成 -40取消交易') <NEW_LINE> OMinRefund = Column(Boolean, default=False, comment='主单是否在售后状态') <NEW_LINE> OMmessage = Column(String(255), comment='留言') <NEW_LINE> OMrecvPhone = Column(String(11), nullable=False, comment='收货电话') <NEW_LINE> OMrecvName = Column(String(11), nullable=False, comment='收货人姓名') <NEW_LINE> OMrecvAddress = Column(String(255), nullable=False, comment='地址') <NEW_LINE> PRcreateId = Column(String(64), comment='发布者id') <NEW_LINE> OMlogisticType = Column(Integer, default=0, comment='发货类型 0 正常发货, 10线上发货(无物流)') | 订单主单, 下单时每种品牌单独一个订单, 但是一并付费 | 6259906467a9b606de547634 |
class Join(Node): <NEW_LINE> <INDENT> def template(self, items): <NEW_LINE> <INDENT> for item in items: <NEW_LINE> <INDENT> if item: <NEW_LINE> <INDENT> if isinstance(item, list): <NEW_LINE> <INDENT> self.template(item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(item, str): <NEW_LINE> <INDENT> item = Text(item) <NEW_LINE> <DEDENT> self.add_child(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def format(self, **kwargs): <NEW_LINE> <INDENT> parent = kwargs.get('parent') <NEW_LINE> if parent is None: <NEW_LINE> <INDENT> parent = ( docutils.nodes.inline('', '', classes=self.classes) if self.classes else docutils.nodes.inline('', '')) <NEW_LINE> <DEDENT> sep = self.kwargs.get('sep') <NEW_LINE> last_sep = self.kwargs.get('last_sep') <NEW_LINE> children = [] <NEW_LINE> for child in self.children: <NEW_LINE> <INDENT> for element in child: <NEW_LINE> <INDENT> element = format_node(element, **kwargs) <NEW_LINE> if element: <NEW_LINE> <INDENT> children.append(element) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for i, child in enumerate(children[:-1]): <NEW_LINE> <INDENT> parent += child <NEW_LINE> if i < len(children) - 2: <NEW_LINE> <INDENT> if sep: <NEW_LINE> <INDENT> parent += docutils.nodes.inline(sep, sep) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> current_sep = last_sep or sep <NEW_LINE> if current_sep: <NEW_LINE> <INDENT> parent += docutils.nodes.inline(current_sep, current_sep) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if children: <NEW_LINE> <INDENT> parent += children[-1] <NEW_LINE> <DEDENT> return parent | Node class for joining nodes. | 625990641f5feb6acb16430f |
class MailTemplate(models.Model): <NEW_LINE> <INDENT> name = models.CharField(_(u"Name"), max_length=255) <NEW_LINE> plain = models.TextField(_(u"Plaintext Body")) <NEW_LINE> html = models.TextField(_(u"HTML Body"), blank=True, null=True) <NEW_LINE> subject = models.CharField(_(u"Subject"), max_length=255) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _("mail template") <NEW_LINE> verbose_name_plural = _("mail templates") <NEW_LINE> ordering = ('name',) | Holds a template for the email. Both, HTML and plaintext, versions
can be stored. If both are present the email will be send out as HTML
with an alternate plain part. If only plaintext is entered, the email will
be send as text-only. HTML-only emails are currently not supported because
I don't like them. | 62599064435de62698e9d52d |
class FieldSelectorResolutionTests(unittest.TestCase): <NEW_LINE> <INDENT> @unittest.skipIf(not current.deployment_settings.has_module("project"), "project module disabled") <NEW_LINE> def testResolveSelectorsWithoutComponents(self): <NEW_LINE> <INDENT> resource = current.s3db.resource("project_project") <NEW_LINE> selectors = ["id", "name", "organisation_id$name", "task.description"] <NEW_LINE> fields, joins, left, distinct = resource.resolve_selectors(selectors) <NEW_LINE> self.assertEqual(len(fields), 3) <NEW_LINE> self.assertEqual(fields[0].colname, "project_project.id") <NEW_LINE> self.assertEqual(fields[1].colname, "project_project.name") <NEW_LINE> self.assertEqual(fields[2].colname, "org_organisation.name") <NEW_LINE> self.assertEqual(joins, Storage()) <NEW_LINE> self.assertTrue(isinstance(left, Storage)) <NEW_LINE> self.assertEqual(left.keys(), ["org_organisation"]) <NEW_LINE> self.assertEqual(len(left["org_organisation"]), 1) <NEW_LINE> self.assertEqual(str(left["org_organisation"][0]), "org_organisation ON " "(project_project.organisation_id = org_organisation.id)") <NEW_LINE> self.assertTrue(distinct) <NEW_LINE> <DEDENT> @unittest.skipIf(not current.deployment_settings.has_module("project"), "project module disabled") <NEW_LINE> def testResolveSelectorsWithComponents(self): <NEW_LINE> <INDENT> resource = current.s3db.resource("project_project") <NEW_LINE> selectors = ["id", "name", "organisation_id$name", "task.description"] <NEW_LINE> fields, joins, left, distinct = resource.resolve_selectors(selectors, skip_components=False) <NEW_LINE> self.assertEqual(len(fields), 4) <NEW_LINE> self.assertEqual(fields[0].colname, "project_project.id") <NEW_LINE> self.assertEqual(fields[1].colname, "project_project.name") <NEW_LINE> self.assertEqual(fields[2].colname, "org_organisation.name") <NEW_LINE> self.assertEqual(fields[3].colname, "project_task.description") <NEW_LINE> self.assertEqual(joins, Storage()) <NEW_LINE> self.assertTrue(isinstance(left, Storage)) <NEW_LINE> self.assertEqual(left.keys(), [ "org_organisation", "project_task"]) <NEW_LINE> self.assertEqual(len(left["org_organisation"]), 1) <NEW_LINE> self.assertEqual(str(left["org_organisation"][0]), "org_organisation ON " "(project_project.organisation_id = org_organisation.id)") <NEW_LINE> self.assertEqual(len(left["project_task"]), 2) <NEW_LINE> self.assertEqual(str(left["project_task"][0]), "project_task_project ON " "((project_task_project.project_id = project_project.id) AND " "(project_task_project.deleted <> 'T'))") <NEW_LINE> self.assertEqual(str(left["project_task"][1]), "project_task ON " "(project_task_project.task_id = project_task.id)") <NEW_LINE> self.assertTrue(distinct) | Test field selector resolution | 6259906491f36d47f2231a21 |
class PortfolioCreateForTeam(LoginRequiredMixin, CreateView): <NEW_LINE> <INDENT> model = Portfolio <NEW_LINE> form_class = PortfolioCreateForm <NEW_LINE> http_method_names = ['get', 'post', ] <NEW_LINE> template_name = 'gallery/portfolio_form_create_for_team.html' <NEW_LINE> success_url = reverse_lazy('gallery:portfolio.tile') <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> team = get_object_or_404(Team, pk=self.kwargs.get('team_pk')) <NEW_LINE> writer = self.request.user <NEW_LINE> portfolio = form.save(commit=False) <NEW_LINE> portfolio.writer = writer <NEW_LINE> portfolio.team = team <NEW_LINE> portfolio.save() <NEW_LINE> form.save_m2m() <NEW_LINE> messages.success(self.request, _('Your portfolio was created successfully.')) <NEW_LINE> return super(PortfolioCreateForTeam, self).form_valid(form) <NEW_LINE> <DEDENT> def form_invalid(self, form): <NEW_LINE> <INDENT> return super(PortfolioCreateForTeam, self).form_invalid(form) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(PortfolioCreateForTeam, self).get_context_data(**kwargs) <NEW_LINE> team = get_object_or_404(Team, pk=self.kwargs.get('team_pk')) <NEW_LINE> context['team'] = team <NEW_LINE> return context | Create Portfolio for Project | 6259906445492302aabfdc00 |
class StationItem(ButtonListItem): <NEW_LINE> <INDENT> BUTTON_PLAY = "play" <NEW_LINE> BUTTON_RENAME = "rename" <NEW_LINE> BUTTON_REMOVE = "remove" <NEW_LINE> def __init__(self, freq, name): <NEW_LINE> <INDENT> self.__title = self.escape_xml(name) <NEW_LINE> self.__freq = freq <NEW_LINE> ButtonListItem.__init__(self) <NEW_LINE> self.set_colors(theme.color_mb_listitem_text, theme.color_mb_listitem_subtext) <NEW_LINE> self.set_font(theme.font_mb_tiny) <NEW_LINE> self.set_grip(theme.mb_item_grip) <NEW_LINE> self.set_buttons((self.BUTTON_PLAY, theme.mb_item_btn_play), (self.BUTTON_REMOVE, theme.mb_item_btn_remove)) <NEW_LINE> <DEDENT> def render_this(self, cnv): <NEW_LINE> <INDENT> self.render_bg(cnv) <NEW_LINE> self.render_grip(cnv) <NEW_LINE> self.render_label(cnv, 32, self.__title, self.__freq) <NEW_LINE> self.render_selection_frame(cnv) <NEW_LINE> self.render_buttons(cnv) | List item for radio stations. | 62599064442bda511e95d8ec |
class WaterSideEconomizerDBTemperatureMaximum(BSElement): <NEW_LINE> <INDENT> element_type = "xs:decimal" | The control temperature of the outside air dry-bulb temperature above which the water-side economizer is disabled. (°F) | 625990640c0af96317c578f1 |
class Solution1: <NEW_LINE> <INDENT> def minSubArrayLen(self, s: int, nums: List[int]) -> int: <NEW_LINE> <INDENT> if not nums: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> n = len(nums) <NEW_LINE> min_len = n+1 <NEW_LINE> left = 0 <NEW_LINE> sum_val = 0 <NEW_LINE> for right in range(n): <NEW_LINE> <INDENT> sum_val += nums[right] <NEW_LINE> while sum_val >= s: <NEW_LINE> <INDENT> min_len = min(min_len, right - left + 1) <NEW_LINE> sum_val -= nums[left] <NEW_LINE> left += 1 <NEW_LINE> <DEDENT> <DEDENT> if min_len == n + 1: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return min_len | Time:O(n)
space:O(1) | 625990648e71fb1e983bd1ed |
class TestIOReaderWriter(unittest.TestCase): <NEW_LINE> <INDENT> def test_io_dump(self): <NEW_LINE> <INDENT> vcheck = {"x": 25282, "y": 43770, "spatialReference": {"wkid": 4326}} <NEW_LINE> if IS_PY3: <NEW_LINE> <INDENT> with tempfile.TemporaryDirectory() as d: <NEW_LINE> <INDENT> fp = os.path.join(d, "test.json") <NEW_LINE> with open(fp, "w") as write_file: <NEW_LINE> <INDENT> esri.dump(gj_pt, write_file) <NEW_LINE> <DEDENT> with open(fp, 'r') as r: <NEW_LINE> <INDENT> data = r.read() <NEW_LINE> data = json.loads(data) <NEW_LINE> self.assertTrue(data == vcheck) <NEW_LINE> <DEDENT> self.assertTrue(os.path.isfile(fp)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> d = tempfile.gettempdir() <NEW_LINE> fp = os.path.join(d, "test.json") <NEW_LINE> with open(fp, "w") as write_file: <NEW_LINE> <INDENT> esri.dump(gj_pt, write_file) <NEW_LINE> <DEDENT> with open(fp, 'r') as r: <NEW_LINE> <INDENT> self.assertTrue(json.loads(r.read()) == vcheck) <NEW_LINE> <DEDENT> self.assertTrue(os.path.isfile(fp)) <NEW_LINE> os.remove(fp) <NEW_LINE> <DEDENT> <DEDENT> def test_io_load(self): <NEW_LINE> <INDENT> vcheck = {'type': 'Point', 'coordinates': (25282, 43770)} <NEW_LINE> if IS_PY3: <NEW_LINE> <INDENT> with tempfile.TemporaryDirectory() as d: <NEW_LINE> <INDENT> fp = os.path.join(d, "test.json") <NEW_LINE> with open(fp, 'w') as w: <NEW_LINE> <INDENT> esri.dump(gj_pt, w) <NEW_LINE> <DEDENT> with open(fp, 'r') as r: <NEW_LINE> <INDENT> self.assertEqual( esri.load(r), { 'spatialReference': { 'wkid': 4326}, 'x': 25282, 'y': 43770}) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> d = tempfile.gettempdir() <NEW_LINE> fp = os.path.join(d, "test.json") <NEW_LINE> with open(fp, 'w') as w: <NEW_LINE> <INDENT> esri.dump(gj_pt, w) <NEW_LINE> <DEDENT> with open(fp, 'r') as r: <NEW_LINE> <INDENT> self.assertEqual( esri.load(r), { 'spatialReference': { 'wkid': 4326}, 'x': 25282, 'y': 43770}) | Tests the load/dump methods | 6259906497e22403b383c631 |
class DummyUpload(object): <NEW_LINE> <INDENT> def __init__(self, path, name): <NEW_LINE> <INDENT> self.stream = open(path, 'rb') <NEW_LINE> self.name = name <NEW_LINE> self.size = os.path.getsize(path) <NEW_LINE> <DEDENT> def read(self, number_of_bytes=None): <NEW_LINE> <INDENT> return self.stream.read(number_of_bytes) <NEW_LINE> <DEDENT> def seek(self, offset): <NEW_LINE> <INDENT> return self.stream.seek(offset) | Upload and read file. | 625990644428ac0f6e659c56 |
class AbinitEvent(yaml.YAMLObject): <NEW_LINE> <INDENT> color = None <NEW_LINE> def __init__(self, src_file, src_line, message): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> self.src_file = src_file <NEW_LINE> self.src_line = src_line <NEW_LINE> <DEDENT> @pmg_serialize <NEW_LINE> def as_dict(self): <NEW_LINE> <INDENT> src_file = getattr(self, "src_file", "Unknown") <NEW_LINE> src_line = getattr(self, "src_line", 0) <NEW_LINE> return dict(message=self.message, src_file=src_file, src_line=src_line, yaml_tag=self.yaml_tag) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, d): <NEW_LINE> <INDENT> cls = as_event_class(d.get("yaml_tag")) <NEW_LINE> return cls(**{k: v for k, v in d.items() if k != "yaml_tag" and not k.startswith("@")}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def header(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return "<%s at %s:%s>" % (self.name, self.src_file, self.src_line) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return "<%s at %s:%s>" % (self.name, "Unknown", 0) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.header <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "\n".join((self.header, self.message)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if other is None: return False <NEW_LINE> return self.message == other.message <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.__class__.__name__ <NEW_LINE> <DEDENT> @property <NEW_LINE> def baseclass(self): <NEW_LINE> <INDENT> for cls in _BASE_CLASSES: <NEW_LINE> <INDENT> if isinstance(self, cls): <NEW_LINE> <INDENT> return cls <NEW_LINE> <DEDENT> <DEDENT> raise ValueError("Cannot determine the base class of %s" % self.__class__.__name__) <NEW_LINE> <DEDENT> def correct(self, task): <NEW_LINE> <INDENT> return 0 | Example (YAML syntax)::
Normal warning without any handler:
--- !Warning
message: |
This is a normal warning that won't
trigger any handler in the python code!
src_file: routine_name
src_line: 112
...
Critical warning that will trigger some action in the python code.
--- !ScfConvergeWarning
message: |
The human-readable message goes here!
src_file: foo.F90
src_line: 112
tolname: tolwfr
actual_tol: 1.0e-8
required_tol: 1.0e-10
nstep: 50
...
The algorithm to extract the YAML sections is very simple.
1) We use YamlTokenizer to extract the documents from the output file
2) If we have a tag that ends with "Warning", "Error", "Bug", "Comment
we know we have encountered a new ABINIT event
3) We parse the document with yaml.load(doc.text) and we get the object
Note that:
# --- and ... become reserved words (whey they are placed at
the begining of a line) since they are used to mark the beginning and
the end of YAML documents.
# All the possible events should subclass `AbinitEvent` and define
the class attribute yaml_tag so that yaml.load will know how to
build the instance. | 625990647d847024c075dafb |
class ParenteiaObjetos(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.parenteia_objetos" <NEW_LINE> bl_label = "Parenteia objetos" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> ParenteiaObjetosDef() <NEW_LINE> return {'FINISHED'} | Tooltip | 62599064a17c0f6771d5d738 |
class Insert(ValuesBase): <NEW_LINE> <INDENT> __visit_name__ = 'insert' <NEW_LINE> _supports_multi_parameters = True <NEW_LINE> def __init__(self, table, values=None, inline=False, bind=None, prefixes=None, returning=None, return_defaults=False, **dialect_kw): <NEW_LINE> <INDENT> ValuesBase.__init__(self, table, values, prefixes) <NEW_LINE> self._bind = bind <NEW_LINE> self.select = self.select_names = None <NEW_LINE> self.inline = inline <NEW_LINE> self._returning = returning <NEW_LINE> self._validate_dialect_kwargs(dialect_kw) <NEW_LINE> self._return_defaults = return_defaults <NEW_LINE> <DEDENT> def get_children(self, **kwargs): <NEW_LINE> <INDENT> if self.select is not None: <NEW_LINE> <INDENT> return self.select, <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> <DEDENT> @_generative <NEW_LINE> def from_select(self, names, select): <NEW_LINE> <INDENT> if self.parameters: <NEW_LINE> <INDENT> raise exc.InvalidRequestError( "This construct already inserts value expressions") <NEW_LINE> <DEDENT> self.parameters, self._has_multi_parameters = self._process_colparams(dict((n, Null()) for n in names)) <NEW_LINE> self.select_names = names <NEW_LINE> self.inline = True <NEW_LINE> self.select = _interpret_as_select(select) <NEW_LINE> <DEDENT> def _copy_internals(self, clone=_clone, **kw): <NEW_LINE> <INDENT> self.parameters = self.parameters.copy() <NEW_LINE> if self.select is not None: <NEW_LINE> <INDENT> self.select = _clone(self.select) | Represent an INSERT construct.
The :class:`.Insert` object is created using the
:func:`~.expression.insert()` function.
.. seealso::
:ref:`coretutorial_insert_expressions` | 6259906463d6d428bbee3e1b |
class MicrosoftPartnerSdkContractsV1CollectionsResourceCollectionMicrosoftPartnerSdkContractsAnalyticsCustomerLicensesDeploymentInsights(Model): <NEW_LINE> <INDENT> _validation = { 'total_count': {'readonly': True}, 'items': {'readonly': True}, 'attributes': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'total_count': {'key': 'totalCount', 'type': 'int'}, 'items': {'key': 'items', 'type': '[MicrosoftPartnerSdkContractsAnalyticsCustomerLicensesDeploymentInsights]'}, 'links': {'key': 'links', 'type': 'MicrosoftPartnerSdkContractsV1CommonResourceLinks'}, 'attributes': {'key': 'attributes', 'type': 'MicrosoftPartnerSdkContractsV1CommonResourceAttributes'}, } <NEW_LINE> def __init__(self, links=None): <NEW_LINE> <INDENT> super(MicrosoftPartnerSdkContractsV1CollectionsResourceCollectionMicrosoftPartnerSdkContractsAnalyticsCustomerLicensesDeploymentInsights, self).__init__() <NEW_LINE> self.total_count = None <NEW_LINE> self.items = None <NEW_LINE> self.links = links <NEW_LINE> self.attributes = None | Contains a collection of resources with JSON properties to represent the
output.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar total_count: Gets the total count.
:vartype total_count: int
:ivar items: Gets the collection items.
:vartype items:
list[~microsoft.store.partnercenterservices.models.MicrosoftPartnerSdkContractsAnalyticsCustomerLicensesDeploymentInsights]
:param links: Gets or sets the links.
:type links:
~microsoft.store.partnercenterservices.models.MicrosoftPartnerSdkContractsV1CommonResourceLinks
:ivar attributes: Gets the attributes.
:vartype attributes:
~microsoft.store.partnercenterservices.models.MicrosoftPartnerSdkContractsV1CommonResourceAttributes | 6259906467a9b606de547635 |
class Visualizer(pg.QtGui.QMainWindow, RemoteObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Visualizer, self).__init__() <NEW_LINE> RemoteObject.__init__(self, rep_port=REPLY_PORT) <NEW_LINE> self._setup_ui() <NEW_LINE> self.data_file = None <NEW_LINE> self.buffer = np.zeros((32,32,3), dtype=np.uint8) <NEW_LINE> <DEDENT> def set_data_file(self, path): <NEW_LINE> <INDENT> self.release_file() <NEW_LINE> try: <NEW_LINE> <INDENT> self.data_file = open(path, 'rb') <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> self.data_file = path <NEW_LINE> <DEDENT> <DEDENT> def release_file(self): <NEW_LINE> <INDENT> if self.data_file: <NEW_LINE> <INDENT> if not isinstance(self.data_file, str): <NEW_LINE> <INDENT> self.data_file.close() <NEW_LINE> self.data_file = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _setup_ui(self): <NEW_LINE> <INDENT> self.resize(800,600) <NEW_LINE> self.cw = pg.QtGui.QWidget() <NEW_LINE> self.setCentralWidget(self.cw) <NEW_LINE> self.layout = pg.QtGui.QVBoxLayout() <NEW_LINE> self.cw.setLayout(self.layout) <NEW_LINE> self.label_bits = pg.QtGui.QLabel(text="0"*32) <NEW_LINE> self.layout.addWidget(self.label_bits) <NEW_LINE> self.gw = pg.GraphicsLayoutWidget() <NEW_LINE> self.layout.addWidget(self.gw) <NEW_LINE> self.vb = self.gw.addPlot() <NEW_LINE> self.vb.showAxis('top', True) <NEW_LINE> self.vb.showAxis('left', False) <NEW_LINE> self.img = pg.ImageItem() <NEW_LINE> self.vb.addItem(self.img) <NEW_LINE> self.update_timer = pg.QtCore.QTimer() <NEW_LINE> self.update_timer.timeout.connect(self._update) <NEW_LINE> self.update_timer.start(100) <NEW_LINE> self.setWindowTitle("Visualizer") <NEW_LINE> self.icon = pg.QtGui.QIcon() <NEW_LINE> self.icon.addFile("res/visualizer.png", pg.QtCore.QSize(16,16)) <NEW_LINE> self.setWindowIcon(self.icon) <NEW_LINE> with open("res/darkorange_stylesheet.css", 'r') as f: <NEW_LINE> <INDENT> stylesheet = f.read() <NEW_LINE> self.setStyleSheet(stylesheet) <NEW_LINE> <DEDENT> <DEDENT> def _update(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._update_buffer() <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> logging.warning("IOError: {}".format(e)) <NEW_LINE> <DEDENT> self._check_rep() <NEW_LINE> <DEDENT> def _update_buffer(self): <NEW_LINE> <INDENT> if not self.data_file: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if isinstance(self.data_file, str): <NEW_LINE> <INDENT> self.set_data_file(self.data_file) <NEW_LINE> return <NEW_LINE> <DEDENT> self.data_file.seek(-4,2) <NEW_LINE> data = np.fromfile(self.data_file, dtype=np.uint32, count=1) <NEW_LINE> binary = np.binary_repr(data, width=32) <NEW_LINE> row = np.array(list(binary)[::-1], dtype=np.uint8) <NEW_LINE> self.buffer = np.roll(self.buffer, -1, axis=1) <NEW_LINE> self.buffer[:,-1,2] = row <NEW_LINE> self.label_bits.setText(binary) <NEW_LINE> self.img.setImage(self.buffer) <NEW_LINE> <DEDENT> def closeEvent(self, evnt): <NEW_LINE> <INDENT> self.release_file() | docstring for Visualizer | 6259906491af0d3eaad3b54e |
class code(object): <NEW_LINE> <INDENT> def __init__(self, code: int): <NEW_LINE> <INDENT> if code > 999: <NEW_LINE> <INDENT> raise Exception( "Numeric code must be an integer less than 999 for a code hook." ) <NEW_LINE> <DEDENT> self._code = code <NEW_LINE> <DEDENT> def __call__(self, func: Callable): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def wrapped_command(_self, info): <NEW_LINE> <INDENT> return func(_self, info) <NEW_LINE> <DEDENT> wrapped_command._type = 'CODE' <NEW_LINE> wrapped_command._match = {'verb': '{:03d}'.format(self._code)} <NEW_LINE> return wrapped_command | TODO: Documentation | 625990648da39b475be0490f |
@functional_datapipe('filter') <NEW_LINE> class FilterIterDataPipe(MapIterDataPipe[T_co]): <NEW_LINE> <INDENT> def __init__(self, datapipe: IterDataPipe[T_co], filter_fn: Callable[..., bool], fn_args: Optional[Tuple] = None, fn_kwargs: Optional[Dict] = None, ) -> None: <NEW_LINE> <INDENT> super().__init__(datapipe, fn=filter_fn, fn_args=fn_args, fn_kwargs=fn_kwargs) <NEW_LINE> <DEDENT> def __iter__(self) -> Iterator[T_co]: <NEW_LINE> <INDENT> res: bool <NEW_LINE> for data in self.datapipe: <NEW_LINE> <INDENT> res = self.fn(data, *self.args, **self.kwargs) <NEW_LINE> if not isinstance(res, bool): <NEW_LINE> <INDENT> raise ValueError("Boolean output is required for " "`filter_fn` of FilterIterDataPipe") <NEW_LINE> <DEDENT> if res: <NEW_LINE> <INDENT> yield data <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> raise(NotImplementedError) | :class:`FilterIterDataPipe`.
Iterable DataPipe to filter elements from datapipe according to filter_fn.
args:
datapipe: Iterable DataPipe being filterd
filter_fn: Customized function mapping an element to a boolean.
fn_args: Positional arguments for `filter_fn`
fn_kwargs: Keyword arguments for `filter_fn` | 625990644e4d562566373b2d |
@gin.configurable <NEW_LINE> class DriftingLinearEnvironment(nsse.NonStationaryStochasticEnvironment): <NEW_LINE> <INDENT> def __init__(self, observation_distribution: types.Distribution, observation_to_reward_distribution: types.Distribution, drift_distribution: types.Distribution, additive_reward_distribution: types.Distribution): <NEW_LINE> <INDENT> super(DriftingLinearEnvironment, self).__init__( DriftingLinearDynamics( observation_distribution, observation_to_reward_distribution, drift_distribution, additive_reward_distribution)) | Implements a drifting linear environment. | 62599064097d151d1a2c2791 |
class RPRecord(DNSRecord): <NEW_LINE> <INDENT> def __init__(self, zone, fqdn, *args, **kwargs): <NEW_LINE> <INDENT> if 'create' in kwargs: <NEW_LINE> <INDENT> super(RPRecord, self).__init__(zone, fqdn, kwargs['create']) <NEW_LINE> del kwargs['create'] <NEW_LINE> self._build(kwargs) <NEW_LINE> self.logger = logging.getLogger(str(self.__class__)) <NEW_LINE> self._record_type = 'RPRecord' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(RPRecord, self).__init__(zone, fqdn) <NEW_LINE> self.logger = logging.getLogger(str(self.__class__)) <NEW_LINE> self._record_type = 'RPRecord' <NEW_LINE> self._mbox = self._txtdname = None <NEW_LINE> if 'record_id' in kwargs: <NEW_LINE> <INDENT> self._get_record(kwargs['record_id']) <NEW_LINE> <DEDENT> elif 'mbox' in kwargs or 'txtdname' in kwargs or 'ttl' in kwargs: <NEW_LINE> <INDENT> self._post(*args, **kwargs) <NEW_LINE> <DEDENT> elif len(args) + len(kwargs) > 1: <NEW_LINE> <INDENT> self._post(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _post(self, mbox, txtdname, ttl=0): <NEW_LINE> <INDENT> if '@' in mbox: <NEW_LINE> <INDENT> mbox = mbox.replace('@', '.') <NEW_LINE> <DEDENT> self._mbox = mbox <NEW_LINE> self._txtdname = txtdname <NEW_LINE> self._ttl = ttl <NEW_LINE> self.api_args = {'rdata': {'mbox': self._mbox, 'txtdname': self._txtdname}, 'ttl': self._ttl} <NEW_LINE> self._create_record(self.api_args) <NEW_LINE> <DEDENT> def rdata(self): <NEW_LINE> <INDENT> guts = super(RPRecord, self).rdata() <NEW_LINE> shell = {'rp_rdata': guts} <NEW_LINE> return shell <NEW_LINE> <DEDENT> @property <NEW_LINE> def mbox(self): <NEW_LINE> <INDENT> return self._mbox <NEW_LINE> <DEDENT> @mbox.setter <NEW_LINE> def mbox(self, value): <NEW_LINE> <INDENT> self._mbox = value <NEW_LINE> self.api_args['rdata']['mbox'] = self._mbox <NEW_LINE> self._update_record(self.api_args) <NEW_LINE> <DEDENT> @property <NEW_LINE> def txtdname(self): <NEW_LINE> <INDENT> return self._txtdname <NEW_LINE> <DEDENT> @txtdname.setter <NEW_LINE> def txtdname(self, value): <NEW_LINE> <INDENT> self._txtdname = value <NEW_LINE> self.api_args['rdata']['txtdname'] = self._txtdname <NEW_LINE> self._update_record(self.api_args) | The Respnosible Person record allows an email address and some optional
human readable text to be associated with a host. Due to privacy and spam
considerations, :class:`RPRecords` are not widely used on public servers
but can provide very useful contact data during diagnosis and debugging
network problems. | 62599064dd821e528d6da514 |
@attrs <NEW_LINE> class Configuration(object): <NEW_LINE> <INDENT> source_project_credentials = attrib() <NEW_LINE> target_project_credentials = attrib() <NEW_LINE> source_storage_client = attrib() <NEW_LINE> target_storage_client = attrib() <NEW_LINE> target_logging_client = attrib() <NEW_LINE> source_project = attrib() <NEW_LINE> target_project = attrib() <NEW_LINE> bucket_name = attrib() <NEW_LINE> temp_bucket_name = attrib() <NEW_LINE> use_bucket_lock = attrib() <NEW_LINE> lock_file_name = attrib() <NEW_LINE> @classmethod <NEW_LINE> def from_conf(cls, conf): <NEW_LINE> <INDENT> temp_bucket_name = conf.bucket_name + '-temp' <NEW_LINE> if conf.tempBucketName: <NEW_LINE> <INDENT> temp_bucket_name = conf.tempBucketName <NEW_LINE> <DEDENT> return cls( source_project_credentials=service_account.Credentials. from_service_account_file( conf.gcp_source_project_service_account_key), target_project_credentials=service_account.Credentials. from_service_account_file( conf.gcp_target_project_service_account_key), source_storage_client=storage.Client.from_service_account_json( conf.gcp_source_project_service_account_key), target_storage_client=storage.Client.from_service_account_json( conf.gcp_target_project_service_account_key), target_logging_client=logging.Client.from_service_account_json( conf.gcp_target_project_service_account_key), source_project=conf.source_project, target_project=conf.target_project, bucket_name=conf.bucket_name, temp_bucket_name=temp_bucket_name, use_bucket_lock=conf.useBucketLock, lock_file_name=conf.lock_file_name) | Class to hold all of the config values set up on initial script run. | 62599064d6c5a102081e384b |
class Role(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=32, verbose_name='名称') <NEW_LINE> permissions = models.ManyToManyField('Permission', verbose_name='角色拥有的权限', blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name | 角色表 | 6259906444b2445a339b74f3 |
class TSQR(object): <NEW_LINE> <INDENT> def __init__(self, matrix, block_size, sc): <NEW_LINE> <INDENT> self.matrix = matrix <NEW_LINE> self.block_size = block_size <NEW_LINE> self.sc = sc <NEW_LINE> <DEDENT> def tsqr(self): <NEW_LINE> <INDENT> partitioned_rdd = partition_rdd(self.matrix.matrix, self.matrix.m, self.block_size, self.sc).map(lambda x: np.array(x)) <NEW_LINE> blocked_matrices = partitioned_rdd.map(lambda x: np.array(x)) <NEW_LINE> qr_blocks_mappers = blocked_matrices.map(lambda x: np.linalg.qr(x, 'r')) <NEW_LINE> each_r_dim = qr_blocks_mappers.map(lambda x: np.shape(x)).take(1)[0] <NEW_LINE> num_rows = qr_blocks_mappers.count() <NEW_LINE> flat_list = qr_blocks_mappers.flatMap(lambda x: -1*x).flatMap(lambda x: x).collect() <NEW_LINE> reshaped_matrix = np.reshape(np.array(flat_list), (each_r_dim[0]*num_rows, each_r_dim[1])) <NEW_LINE> r = np.linalg.qr(reshaped_matrix, 'r') <NEW_LINE> return r | Based on http://simons.berkeley.edu/sites/default/files/docs/782/gleichslides.pdf | 625990644f6381625f19a037 |
@default_for_key('top_10_accuracy', k=10) <NEW_LINE> @default_for_key('top_5_accuracy') <NEW_LINE> @default_for_key('top_10_acc', k=10) <NEW_LINE> @default_for_key('top_5_acc') <NEW_LINE> @running_mean <NEW_LINE> @mean <NEW_LINE> class TopKCategoricalAccuracy(Metric): <NEW_LINE> <INDENT> def __init__(self, pred_key=torchbearer.Y_PRED, target_key=torchbearer.Y_TRUE, k=5, ignore_index=-100): <NEW_LINE> <INDENT> super(TopKCategoricalAccuracy, self).__init__('top_' + str(k) + '_acc') <NEW_LINE> self.pred_key = pred_key <NEW_LINE> self.target_key = target_key <NEW_LINE> self.k = k <NEW_LINE> self.ignore_index = ignore_index <NEW_LINE> <DEDENT> def process(self, *args): <NEW_LINE> <INDENT> state = args[0] <NEW_LINE> y_pred = state[self.pred_key] <NEW_LINE> y_true = state[self.target_key] <NEW_LINE> mask = y_true.eq(self.ignore_index).eq(0) <NEW_LINE> y_pred = y_pred[mask] <NEW_LINE> y_true = y_true[mask] <NEW_LINE> sorted_indices = torch.topk(y_pred, self.k, dim=1)[1] <NEW_LINE> expanded_y = y_true.view(-1, 1).expand(-1, self.k) <NEW_LINE> return torch.sum(torch.eq(sorted_indices, expanded_y), dim=1).float() | Top K Categorical accuracy metric. Uses torch.topk to determine the top k predictions and compares to targets.
Decorated with a mean, running_mean and std. Default for keys: 'top_5_acc', 'top_10_acc'.
Args:
pred_key (StateKey): The key in state which holds the predicted values
target_key (StateKey): The key in state which holds the target values
ignore_index (int): Specifies a target value that is ignored and does not contribute to the metric output.
See `<https://pytorch.org/docs/stable/nn.html#crossentropyloss>`_ | 62599064a8370b77170f1af5 |
class ESP8266ROMFirmwareImage(BaseFirmwareImage): <NEW_LINE> <INDENT> ROM_LOADER = ESP8266ROM <NEW_LINE> def __init__(self, load_file=None): <NEW_LINE> <INDENT> super(ESP8266ROMFirmwareImage, self).__init__() <NEW_LINE> self.flash_mode = 0 <NEW_LINE> self.flash_size_freq = 0 <NEW_LINE> self.version = 1 <NEW_LINE> if load_file is not None: <NEW_LINE> <INDENT> segments = self.load_common_header(load_file, ESPLoader.ESP_IMAGE_MAGIC) <NEW_LINE> for _ in range(segments): <NEW_LINE> <INDENT> self.load_segment(load_file) <NEW_LINE> <DEDENT> self.checksum = self.read_checksum(load_file) <NEW_LINE> <DEDENT> <DEDENT> def default_output_name(self, input_file): <NEW_LINE> <INDENT> return input_file + '-' <NEW_LINE> <DEDENT> def save(self, basename): <NEW_LINE> <INDENT> irom_segment = self.get_irom_segment() <NEW_LINE> if irom_segment is not None: <NEW_LINE> <INDENT> with open("%s0x%05x.bin" % (basename, irom_segment.addr - ESP8266ROM.IROM_MAP_START), "wb") as f: <NEW_LINE> <INDENT> f.write(irom_segment.data) <NEW_LINE> <DEDENT> <DEDENT> normal_segments = self.get_non_irom_segments() <NEW_LINE> with open("%s0x00000.bin" % basename, 'wb') as f: <NEW_LINE> <INDENT> self.write_common_header(f, normal_segments) <NEW_LINE> checksum = ESPLoader.ESP_CHECKSUM_MAGIC <NEW_LINE> for segment in normal_segments: <NEW_LINE> <INDENT> checksum = self.save_segment(f, segment, checksum) <NEW_LINE> <DEDENT> self.append_checksum(f, checksum) | 'Version 1' firmware image, segments loaded directly by the ROM bootloader. | 6259906455399d3f05627c46 |
class Send(Get): <NEW_LINE> <INDENT> class Help: <NEW_LINE> <INDENT> synopsis = "send an email" <NEW_LINE> <DEDENT> xmlns = namespaces.email <NEW_LINE> dst = Attribute( "Destination to store exception object", type="reference", required=False ) <NEW_LINE> src = Attribute("Source email", type="index", default=None) <NEW_LINE> smtp = Attribute("SMTP server", default="") <NEW_LINE> failsilently = Attribute( "Should mail exceptions be ignored?", type="boolean", default=True ) <NEW_LINE> def logic(self, context): <NEW_LINE> <INDENT> fail_silently = self.failsilently(context) <NEW_LINE> _email = self.src(context) <NEW_LINE> if _email is None: <NEW_LINE> <INDENT> email = self.get_email(context) <NEW_LINE> with self.call(context, app=email.app, email=email): <NEW_LINE> <INDENT> yield logic.DeferNodeContents(email.email_element) <NEW_LINE> <DEDENT> <DEDENT> dst = self.dst(context) <NEW_LINE> if dst is not None: <NEW_LINE> <INDENT> context[self.dst(context)] = email <NEW_LINE> <DEDENT> if context.get(".debug", False): <NEW_LINE> <INDENT> context[".console"].obj(context, email) <NEW_LINE> <DEDENT> mail_server = self.archive.get_mailserver(self.smtp(context)) <NEW_LINE> try: <NEW_LINE> <INDENT> mail_server.send(email) <NEW_LINE> log.info( 'sent email to "{}", subject "{}"'.format( email.to_text, email.subject or "" ) ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.error( 'failed to send email to "%s", with subject "%s" (%s)', email.to_text, email.subject or "", e, ) <NEW_LINE> if not fail_silently: <NEW_LINE> <INDENT> self.throw( "email.send-failed", "Moya was unable to send email '{}' ({})".format(_email, e), diagnosis="Check the [smtp:] section in settings, and that the mail server is running.", info={"email": _email, "pyerror": e}, ) | Send an email. | 6259906499cbb53fe683260a |
class All2AllTanh(All2All): <NEW_LINE> <INDENT> __id__ = "b3a2bd5c-3c01-46ef-978a-fef22e008f31" <NEW_LINE> A = 1.7159 <NEW_LINE> B = 0.6666 <NEW_LINE> C = 9.0 <NEW_LINE> MAPPING = {"all2all_tanh"} <NEW_LINE> def initialize(self, device, **kwargs): <NEW_LINE> <INDENT> self.activation_mode = "ACTIVATION_TANH" <NEW_LINE> retval = super(All2AllTanh, self).initialize(device=device, **kwargs) <NEW_LINE> self.output.max_supposed = All2AllTanh.A <NEW_LINE> return retval <NEW_LINE> <DEDENT> def numpy_run(self): <NEW_LINE> <INDENT> super(All2AllTanh, self).numpy_run() <NEW_LINE> self.output.map_write() <NEW_LINE> mem = self.output.mem <NEW_LINE> mem *= All2AllTanh.B <NEW_LINE> numpy.tanh(mem, mem) <NEW_LINE> mem *= All2AllTanh.A | All2All with scaled tanh() activation f(x) = 1.7159 * tanh(0.6666 * x).
| 6259906445492302aabfdc02 |
class ModelBuilder: <NEW_LINE> <INDENT> def __init__(self, model_path: str = None, save: bool = None): <NEW_LINE> <INDENT> self.path = model_path <NEW_LINE> self.save = save <NEW_LINE> self.reg = LinearRegression() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"ModelBuilder(path: {self.path}, save: {self.save}, reg: {self.reg})" <NEW_LINE> <DEDENT> def predict_test(self, X) -> np.ndarray: <NEW_LINE> <INDENT> return self.reg.predict(X) <NEW_LINE> <DEDENT> def predict_from_dump(self, X) -> np.ndarray: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def save_model(self): <NEW_LINE> <INDENT> joblib.dump(self.reg, "{}/model.joblib".format(self.path)) <NEW_LINE> <DEDENT> def print_accuracy(self, X_test, y_test): <NEW_LINE> <INDENT> print("\n• Accuracy : {}%".format(self.reg.score(X_test, y_test) * 100)) <NEW_LINE> <DEDENT> def load_model(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return joblib.load("{}/model.joblib".format(self.path)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("File not found") <NEW_LINE> <DEDENT> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.reg.fit(X, y) | Class for train and print results of ml model | 62599064442bda511e95d8ed |
class Register(Signup): <NEW_LINE> <INDENT> def done(self): <NEW_LINE> <INDENT> u = User.by_name(self.username) <NEW_LINE> if u: <NEW_LINE> <INDENT> msg = 'That user already exists.' <NEW_LINE> self.render('signup.html', error_username = msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> u = User.register(self.username, self.password, self.email) <NEW_LINE> u.put() <NEW_LINE> self.login(u) <NEW_LINE> self.redirect('/') | Register : creates new user for blog | 62599064f7d966606f74944d |
class TestDockerNetDef(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testDockerNetDef(self): <NEW_LINE> <INDENT> pass | DockerNetDef unit test stubs | 62599064a17c0f6771d5d739 |
class Pop3(UpdateMonitorMixin, Resource): <NEW_LINE> <INDENT> def __init__(self, pop3s): <NEW_LINE> <INDENT> super(Pop3, self).__init__(pop3s) <NEW_LINE> self._meta_data['required_json_kind'] = 'tm:ltm:monitor:pop3:pop3state' | BIG-IP® Pop3 monitor resource. | 62599064cc0a2c111447c663 |
class VizQuadType(object): <NEW_LINE> <INDENT> VIZ_QUAD_GENERIC_2D = 0 <NEW_LINE> VIZ_QUAD_GENERIC_3D = 1 <NEW_LINE> VIZ_QUAD_MAT_MARKER = 2 <NEW_LINE> VIZ_QUAD_PLANNER_OBSTACLE = 3 <NEW_LINE> VIZ_QUAD_PLANNER_OBSTACLE_REPLAN = 4 <NEW_LINE> VIZ_QUAD_ROBOT_BOUNDING_BOX = 5 <NEW_LINE> VIZ_QUAD_POSE_MARKER = 6 <NEW_LINE> NUM_VIZ_QUAD_TYPES = 7 | Automatically-generated uint_8 enumeration. | 62599064e64d504609df9f61 |
class SudokuTreeNodeTests(unittest.TestCase): <NEW_LINE> <INDENT> def setup_empty(self): <NEW_LINE> <INDENT> self.board = Board() <NEW_LINE> self.number = random.randint(1, 9) <NEW_LINE> <DEDENT> def setup_solved(self): <NEW_LINE> <INDENT> x = Actual() <NEW_LINE> x.create() <NEW_LINE> self.board = x.board <NEW_LINE> self.number = random.randint(1, 9) <NEW_LINE> <DEDENT> def test_get_row_empty(self): <NEW_LINE> <INDENT> self.setup_empty() <NEW_LINE> node = SudokuTreeNode(self.number, 0, 5, self.board, 0) <NEW_LINE> checker = [0, 0, 0, 0, 0, 0] <NEW_LINE> self.assertEqual(node.get_row(), checker) | unittests created for all the methods of class SudokuTreeNode
2 setup methods are created so that the code runs faster as a solved board will not be created
when it is not needed | 6259906445492302aabfdc03 |
class SparseArray(object): <NEW_LINE> <INDENT> def __init__(self,_set=None): <NEW_LINE> <INDENT> self._set = _set if _set else {0}^{0} <NEW_LINE> <DEDENT> def __setitem__(self,offset, val): <NEW_LINE> <INDENT> if val: <NEW_LINE> <INDENT> self._set.add(offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._set.discard(offset) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self,offset): <NEW_LINE> <INDENT> return int(bool(offset in self._set)) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return SparseArray(self._set.copy()) | a mtuable sequence facade for set() | 62599064e5267d203ee6cf52 |
class UDPStatusGetter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.key = cfg.CONF.health_manager.heartbeat_key <NEW_LINE> self.ip = cfg.CONF.health_manager.bind_ip <NEW_LINE> self.port = cfg.CONF.health_manager.bind_port <NEW_LINE> self.sockaddr = None <NEW_LINE> LOG.info('attempting to listen on %(ip)s port %(port)s', {'ip': self.ip, 'port': self.port}) <NEW_LINE> self.sock = None <NEW_LINE> self.update(self.key, self.ip, self.port) <NEW_LINE> self.health_executor = futures.ProcessPoolExecutor( max_workers=CONF.health_manager.health_update_threads) <NEW_LINE> self.stats_executor = futures.ProcessPoolExecutor( max_workers=CONF.health_manager.stats_update_threads) <NEW_LINE> self.repo = repositories.Repositories().amphorahealth <NEW_LINE> <DEDENT> def update(self, key, ip, port): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> for addrinfo in socket.getaddrinfo(ip, port, 0, socket.SOCK_DGRAM): <NEW_LINE> <INDENT> ai_family = addrinfo[0] <NEW_LINE> self.sockaddr = addrinfo[4] <NEW_LINE> if self.sock is not None: <NEW_LINE> <INDENT> self.sock.close() <NEW_LINE> <DEDENT> self.sock = socket.socket(ai_family, socket.SOCK_DGRAM) <NEW_LINE> self.sock.settimeout(1) <NEW_LINE> self.sock.bind(self.sockaddr) <NEW_LINE> if cfg.CONF.health_manager.sock_rlimit > 0: <NEW_LINE> <INDENT> rlimit = cfg.CONF.health_manager.sock_rlimit <NEW_LINE> LOG.info("setting sock rlimit to %s", rlimit) <NEW_LINE> self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, rlimit) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> if self.sock is None: <NEW_LINE> <INDENT> raise exceptions.NetworkConfig("unable to find suitable socket") <NEW_LINE> <DEDENT> <DEDENT> def dorecv(self, *args, **kw): <NEW_LINE> <INDENT> (data, srcaddr) = self.sock.recvfrom(UDP_MAX_SIZE) <NEW_LINE> LOG.debug('Received packet from %s', srcaddr) <NEW_LINE> try: <NEW_LINE> <INDENT> obj = status_message.unwrap_envelope(data, self.key) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.warning('Health Manager experienced an exception processing a ' 'heartbeat message from %s. Ignoring this packet. ' 'Exception: %s', srcaddr, e) <NEW_LINE> raise exceptions.InvalidHMACException() <NEW_LINE> <DEDENT> obj['recv_time'] = time.time() <NEW_LINE> return obj, srcaddr[0] <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obj, srcaddr = self.dorecv() <NEW_LINE> <DEDENT> except socket.timeout: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except exceptions.InvalidHMACException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.warning('Health Manager experienced an exception processing a' 'heartbeat packet. Ignoring this packet. ' 'Exception: %s', e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.health_executor.submit(update_health, obj, srcaddr) <NEW_LINE> self.stats_executor.submit(update_stats, obj, srcaddr) | This class defines methods that will gather heatbeats
The heartbeats are transmitted via UDP and this class will bind to a port
and absorb them | 62599064baa26c4b54d509cc |
class ngamsInitCmdTest(ngamsTestSuite): <NEW_LINE> <INDENT> def test_handleCmdInit_1(self): <NEW_LINE> <INDENT> self.prepExtSrv(8888, 1, 1, 1) <NEW_LINE> info(1,"TODO: Change some cfg. parameter") <NEW_LINE> tmpStatFile = sendExtCmd(getHostName(), 8888, NGAMS_INIT_CMD) <NEW_LINE> refStatFile = "ref/ngamsInitCmdTest_test_handleCmdInit_1_1_ref" <NEW_LINE> self.checkFilesEq(refStatFile, tmpStatFile, "Incorrect status returned for INIT Command") <NEW_LINE> info(1,"TODO: Check that server has initialized with new parameter") | Synopsis:
Test Suite for the INIT Command.
Description:
The purpose of the Test Suite is to exercise the INIT Command.
Missing Test Cases:
- Missing Test Cases for abnormal conditions.
- Test normal case when loading cfg. from the DB. | 625990647b25080760ed8875 |
class ParenthesesVector(TokenVector, ValueType, ListType): <NEW_LINE> <INDENT> def parse(self, expect_single: bool=False) -> object: <NEW_LINE> <INDENT> if not self.tokens: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if not any(isinstance(token, LexicalSeparator) for token in self.tokens): <NEW_LINE> <INDENT> return super().parse() <NEW_LINE> <DEDENT> groups = [TokenVector()] <NEW_LINE> for token in self.tokens: <NEW_LINE> <INDENT> if isinstance(token, LexicalSeparator): <NEW_LINE> <INDENT> groups.append(TokenVector()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> groups[-1].append(token) <NEW_LINE> <DEDENT> <DEDENT> return [group.parse() for group in groups] | Describes a vector of tokens bounded in parentheses, such as those in a method call's arguments or those
signifying order-of-operations in an arithmetic operations | 62599064796e427e5384fe9e |
class DataLoader(object): <NEW_LINE> <INDENT> IID = False <NEW_LINE> MAX_NUM_CLASSES_PER_CLIENT = 5 <NEW_LINE> NUM_CLASSES = 10 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> img_rows, img_cols = 28, 28 <NEW_LINE> (x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data() <NEW_LINE> if tf.keras.backend.image_data_format() == 'channels_first': <NEW_LINE> <INDENT> x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols) <NEW_LINE> x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols) <NEW_LINE> input_shape = (1, img_rows, img_cols) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1) <NEW_LINE> x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1) <NEW_LINE> input_shape = (img_rows, img_cols, 1) <NEW_LINE> <DEDENT> self.x_train = x_train.astype('float32') <NEW_LINE> self.x_test = x_test.astype('float32') <NEW_LINE> self.x_train /= 255 <NEW_LINE> self.x_test /= 255 <NEW_LINE> print('x_train shape:', x_train.shape) <NEW_LINE> print(x_train.shape[0], 'train samples') <NEW_LINE> print(x_test.shape[0], 'test samples') <NEW_LINE> self.y_train = tf.keras.utils.to_categorical(y_train, DataLoader.NUM_CLASSES) <NEW_LINE> self.y_test = tf.keras.utils.to_categorical(y_test, DataLoader.NUM_CLASSES) | Generic dataloading object | 6259906499cbb53fe683260b |
class TypeDetailView(ListView): <NEW_LINE> <INDENT> paginate_by = settings.GEARTRACKER_PAGINATE_BY <NEW_LINE> def category(self, **kwargs): <NEW_LINE> <INDENT> return get_object_or_404(Category, slug=self.kwargs['category']) <NEW_LINE> <DEDENT> def type(self, **kwargs): <NEW_LINE> <INDENT> return get_object_or_404(Type, category=self.category, slug=self.kwargs['slug']) <NEW_LINE> <DEDENT> def get_queryset(self, **kwargs): <NEW_LINE> <INDENT> return Item.objects.published().filter(type=self.type) <NEW_LINE> <DEDENT> def get_template_names(self, **kwargs): <NEW_LINE> <INDENT> return 'geartracker/type_detail.html' <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(TypeDetailView, self).get_context_data(**kwargs) <NEW_LINE> context['category'] = self.category <NEW_LINE> context['type'] = self.type <NEW_LINE> return context | Display all items of a given type. | 6259906476e4537e8c3f0cab |
class AzureDataLakeStorageRESTAPI(object): <NEW_LINE> <INDENT> def __init__( self, url, **kwargs ): <NEW_LINE> <INDENT> base_url = '{url}' <NEW_LINE> self._config = AzureDataLakeStorageRESTAPIConfiguration(url, **kwargs) <NEW_LINE> self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._serialize.client_side_validation = False <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self.service = ServiceOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.file_system = FileSystemOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.path = PathOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> <DEDENT> def _send_request(self, http_request, **kwargs): <NEW_LINE> <INDENT> path_format_arguments = { 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), } <NEW_LINE> http_request.url = self._client.format_url(http_request.url, **path_format_arguments) <NEW_LINE> stream = kwargs.pop("stream", True) <NEW_LINE> pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) <NEW_LINE> return pipeline_response.http_response <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._client.close() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._client.__enter__() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, *exc_details): <NEW_LINE> <INDENT> self._client.__exit__(*exc_details) | Azure Data Lake Storage provides storage for Hadoop and other big data workloads.
:ivar service: ServiceOperations operations
:vartype service: azure.storage.filedatalake.operations.ServiceOperations
:ivar file_system: FileSystemOperations operations
:vartype file_system: azure.storage.filedatalake.operations.FileSystemOperations
:ivar path: PathOperations operations
:vartype path: azure.storage.filedatalake.operations.PathOperations
:param url: The URL of the service account, container, or blob that is the target of the desired operation.
:type url: str | 625990642ae34c7f260ac80f |
class Preference(xmlnode.XMLNode): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> xmlnode.XMLNode.__init__(self) <NEW_LINE> self._tag = 'preference' <NEW_LINE> self._attr['name'] = None <NEW_LINE> self._attr['value'] = None | An xmlnode.XMLNode subclass representing a single environment preference:
::
>>> from music21 import environment
>>> a = environment.Preference() | 625990642ae34c7f260ac810 |
class TestCharactersLeftInRow(TestCase): <NEW_LINE> <INDENT> def test_with_characters(self): <NEW_LINE> <INDENT> row = ["foo", "bar", "baz"] <NEW_LINE> output = table.characters_left_in_row(row) <NEW_LINE> self.assertTrue(output) <NEW_LINE> <DEDENT> def test_with_no_characters(self): <NEW_LINE> <INDENT> row = [] <NEW_LINE> output = table.characters_left_in_row(row) <NEW_LINE> self.assertTrue(not output) <NEW_LINE> <DEDENT> def test_with_blank_string(self): <NEW_LINE> <INDENT> row = [""] <NEW_LINE> output = table.characters_left_in_row(row) <NEW_LINE> self.assertTrue(not output) | tests for table.characters_left_in_row | 625990647d43ff2487427fa4 |
class NSNitroNserrCrlShmemAllocFail(NSNitroSsl2Errors): <NEW_LINE> <INDENT> pass | Nitro error code 3648
Crl node allocation in the shared mem is failed | 62599064f548e778e596ccb2 |
class Unique(object): <NEW_LINE> <INDENT> field_flags = ('unique', ) <NEW_LINE> def __init__(self, get_session, model, column, message=None): <NEW_LINE> <INDENT> self.get_session = get_session <NEW_LINE> self.model = model <NEW_LINE> self.column = column <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __call__(self, form, field): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obj = self.get_session().query(self.model) .filter(self.column == field.data).one() <NEW_LINE> if not hasattr(form, '_obj') or not form._obj == obj: <NEW_LINE> <INDENT> if self.message is None: <NEW_LINE> <INDENT> self.message = field.gettext('Already exists.') <NEW_LINE> <DEDENT> raise ValidationError(self.message) <NEW_LINE> <DEDENT> <DEDENT> except NoResultFound: <NEW_LINE> <INDENT> pass | Checks field value unicity against specified table field.
:param get_session:
A function that return a SQAlchemy Session.
:param model:
The model to check unicity against.
:param column:
The unique column.
:param message:
The error message. | 6259906463d6d428bbee3e1d |
class CreateListenerRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(CreateListenerRequest, self).__init__( '/regions/{regionId}/listeners', 'PUT', header, version) <NEW_LINE> self.parameters = parameters | 创建监听器 | 625990640a50d4780f706954 |
class DecrypterTestCase(test_lib.DecrypterTestCase): <NEW_LINE> <INDENT> def testInitialize(self): <NEW_LINE> <INDENT> test_decrypter = decrypter.Decrypter() <NEW_LINE> self.assertIsNotNone(test_decrypter) <NEW_LINE> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> decrypter.Decrypter(key=b'test1') | Tests for the decrypter interface. | 625990647047854f46340ade |
class UserProfileSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.UserProfile <NEW_LINE> fields = ('id', 'email', 'name', 'password') <NEW_LINE> extra_kwargs = { 'password' : { 'write_only': True, 'style': {'input_type': 'password'} } } <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> user = models.UserProfile.objects.create_user( email = validated_data['email'], name = validated_data['name'], password = validated_data['password'] ) <NEW_LINE> return user <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> if 'password' in validated_data: <NEW_LINE> <INDENT> password = validated_data.pop('password') <NEW_LINE> instance.set_password(password) <NEW_LINE> <DEDENT> return super().update(instance, validated_data) | Serializers a suer profile object | 62599064462c4b4f79dbd130 |
class DictionaryImportResolver(idl.parser.ImportResolverBase): <NEW_LINE> <INDENT> def __init__(self, import_dict): <NEW_LINE> <INDENT> self._import_dict = import_dict <NEW_LINE> super(DictionaryImportResolver, self).__init__() <NEW_LINE> <DEDENT> def resolve(self, base_file, imported_file_name): <NEW_LINE> <INDENT> if not imported_file_name in self._import_dict: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return "imported_%s" % (imported_file_name) <NEW_LINE> <DEDENT> def open(self, resolved_file_name): <NEW_LINE> <INDENT> assert resolved_file_name.startswith("imported_") <NEW_LINE> imported_file_name = resolved_file_name.replace("imported_", "") <NEW_LINE> return io.StringIO(self._import_dict[imported_file_name]) | An import resolver resolves files from a dictionary. | 62599064f548e778e596ccb3 |
class LabelEncoder(base.BaseEstimator): <NEW_LINE> <INDENT> def __init__(self, min_obs=10): <NEW_LINE> <INDENT> self.min_obs = min_obs <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ('LabelEncoder(min_obs={})').format(self.min_obs) <NEW_LINE> <DEDENT> def _get_label_encoder_and_max(self, x): <NEW_LINE> <INDENT> x[pd.isnull(x)] = NAN_INT <NEW_LINE> label_count = {} <NEW_LINE> for label in x: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> label_count[label] += 1 <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> label_count[label] = 1 <NEW_LINE> <DEDENT> <DEDENT> label_encoder = {} <NEW_LINE> label_index = 1 <NEW_LINE> labels_not_encoded = 0 <NEW_LINE> for label in label_count.keys(): <NEW_LINE> <INDENT> if label_count[label] >= self.min_obs: <NEW_LINE> <INDENT> label_encoder[label] = label_index <NEW_LINE> label_index += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> labels_not_encoded += 1 <NEW_LINE> <DEDENT> <DEDENT> max_label = label_index - 1 <NEW_LINE> if labels_not_encoded == 0: <NEW_LINE> <INDENT> for label in label_encoder: <NEW_LINE> <INDENT> if label_encoder[label] == max_label: <NEW_LINE> <INDENT> label_encoder[label] = 0 <NEW_LINE> max_label -= 1 <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return label_encoder, max_label <NEW_LINE> <DEDENT> def _transform_col(self, x, col): <NEW_LINE> <INDENT> label_encoder = self.label_encoders[col] <NEW_LINE> x[pd.isnull(x)] = NAN_INT <NEW_LINE> labels = np.zeros((x.shape[0], ), dtype=np.int64) <NEW_LINE> for label in label_encoder: <NEW_LINE> <INDENT> labels[x == label] = label_encoder[label] <NEW_LINE> <DEDENT> return labels <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> self.label_encoders = [None] * X.shape[1] <NEW_LINE> self.label_maxes = [None] * X.shape[1] <NEW_LINE> for col in range(X.shape[1]): <NEW_LINE> <INDENT> self.label_encoders[col], self.label_maxes[col] = self._get_label_encoder_and_max(X[:, col]) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> for col in range(X.shape[1]): <NEW_LINE> <INDENT> X[:, col] = self._transform_col(X[:, col], col) <NEW_LINE> <DEDENT> return X <NEW_LINE> <DEDENT> def fit_transform(self, X, y=None): <NEW_LINE> <INDENT> self.label_encoders = [None] * X.shape[1] <NEW_LINE> self.label_maxes = [None] * X.shape[1] <NEW_LINE> for col in range(X.shape[1]): <NEW_LINE> <INDENT> self.label_encoders[col], self.label_maxes[col] = self._get_label_encoder_and_max(X[:, col]) <NEW_LINE> X[:, col] = self._transform_col(X[:, col], col) <NEW_LINE> <DEDENT> return X | Label Encoder that groups infrequent values into one label.
Attributes:
min_obs (int): minimum number of observation to assign a label.
label_encoders (list of dict): label encoders for columns
label_maxes (list of int): maximum of labels for columns | 6259906445492302aabfdc05 |
@zope.interface.implementer(interfaces.IAddForm) <NEW_LINE> class AddForm(Form): <NEW_LINE> <INDENT> ignoreContext = True <NEW_LINE> ignoreReadonly = True <NEW_LINE> _finishedAdd = False <NEW_LINE> @button.buttonAndHandler(_('Add'), name='add') <NEW_LINE> def handleAdd(self, action): <NEW_LINE> <INDENT> data, errors = self.extractData() <NEW_LINE> if errors: <NEW_LINE> <INDENT> self.status = self.formErrorsMessage <NEW_LINE> return <NEW_LINE> <DEDENT> obj = self.createAndAdd(data) <NEW_LINE> if obj is not None: <NEW_LINE> <INDENT> self._finishedAdd = True <NEW_LINE> <DEDENT> <DEDENT> def createAndAdd(self, data): <NEW_LINE> <INDENT> obj = self.create(data) <NEW_LINE> zope.event.notify(zope.lifecycleevent.ObjectCreatedEvent(obj)) <NEW_LINE> self.add(obj) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def create(self, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def add(self, object): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def nextURL(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> if self._finishedAdd: <NEW_LINE> <INDENT> self.request.response.redirect(self.nextURL()) <NEW_LINE> return "" <NEW_LINE> <DEDENT> return super(AddForm, self).render() | A field and button based add form. | 625990644a966d76dd5f061f |
class Phonebook: <NEW_LINE> <INDENT> def __init__(self, filename, num_digits, alphabet): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.num_digits = num_digits <NEW_LINE> self.alphabet = alphabet <NEW_LINE> self._numbers = PrefixSet() <NEW_LINE> self._new_numbers = [] <NEW_LINE> self._import(filename) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_nums(f): <NEW_LINE> <INDENT> return (line for line in (line.strip() for line in f) if line) <NEW_LINE> <DEDENT> def _import(self, filename): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(filename) as f: <NEW_LINE> <INDENT> for num in self._parse_nums(iter(f)): <NEW_LINE> <INDENT> assert len(num) <= self.num_digits <NEW_LINE> assert all(digit in self.alphabet for digit in num) <NEW_LINE> assert num not in self._numbers <NEW_LINE> self._numbers.add(num) <NEW_LINE> assert num in self._numbers <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def create_num(self): <NEW_LINE> <INDENT> def generate_available_number(digits): <NEW_LINE> <INDENT> if len(digits) == self.num_digits: <NEW_LINE> <INDENT> return digits <NEW_LINE> <DEDENT> for digit in sorted(self.alphabet, key=lambda k: random()): <NEW_LINE> <INDENT> candidate = digits + [digit] <NEW_LINE> if candidate not in self._numbers: <NEW_LINE> <INDENT> ans = generate_available_number(candidate) <NEW_LINE> if ans is not None: <NEW_LINE> <INDENT> return ans <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> num = "".join(generate_available_number([])) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise RuntimeError("Phonebook is full!") from None <NEW_LINE> <DEDENT> assert len(num) == self.num_digits <NEW_LINE> assert all(digit in self.alphabet for digit in num) <NEW_LINE> assert num not in self._numbers <NEW_LINE> self._numbers.add(num) <NEW_LINE> assert num in self._numbers <NEW_LINE> self._new_numbers.append(num) <NEW_LINE> return num <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> with open(self.filename, "a") as f: <NEW_LINE> <INDENT> for nun in self._new_numbers: <NEW_LINE> <INDENT> print(nun, file=f) <NEW_LINE> <DEDENT> <DEDENT> self._new_numbers = [] | Phonebook storing and creating reachable phone numbers.
The phone numbers are of length 'num_digits' (although shorter phone
numbers are accepted) and comprise characters from the given 'alphabet'. | 625990643539df3088ecd9c7 |
class UngenerateCertificatesTest(CertificateManagementTest): <NEW_LINE> <INDENT> command = 'ungenerated_certs' <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.course = self.courses[0] <NEW_LINE> <DEDENT> @override_settings(CERT_QUEUE='test-queue') <NEW_LINE> @patch('capa.xqueue_interface.XQueueInterface.send_to_queue', spec=True) <NEW_LINE> def test_ungenerated_certificate(self, mock_send_to_queue): <NEW_LINE> <INDENT> mock_send_to_queue.return_value = (0, "Successfully queued") <NEW_LINE> key = self.course.location.course_key <NEW_LINE> self._create_cert(key, self.user, CertificateStatuses.unavailable) <NEW_LINE> with mock_passing_grade(): <NEW_LINE> <INDENT> args = '-c {} --insecure'.format(str(key)) <NEW_LINE> call_command(self.command, *args.split(' ')) <NEW_LINE> <DEDENT> assert mock_send_to_queue.called <NEW_LINE> certificate = GeneratedCertificate.eligible_certificates.get( user=self.user, course_id=key ) <NEW_LINE> assert certificate.status == CertificateStatuses.generating | Tests for generating certificates. | 6259906491af0d3eaad3b552 |
class SuperCall(object): <NEW_LINE> <INDENT> def __init__(self, class_, object_): <NEW_LINE> <INDENT> object.__init__(self) <NEW_LINE> self.__dict__['_class'] = class_ <NEW_LINE> self.__dict__['_object'] = object_ <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(self._class, self._object).__call__(*args, **kwargs) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return getattr(super(self._class, self._object), name) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return getattr(self._object, name) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, name, value): setattr(self._object, name, value) <NEW_LINE> def __getstate__(self): return self._class, self._object <NEW_LINE> def __setstate__(self, args): <NEW_LINE> <INDENT> self.__dict__['_class'] = args[0] <NEW_LINE> self.__dict__['_object'] = args[1] <NEW_LINE> <DEDENT> def __dir__(self): return dir(super(self._class, self._object)) <NEW_LINE> def __repr__(self): return repr(super(self._class, self._object)) <NEW_LINE> def copy(self): <NEW_LINE> <INDENT> from copy import deepcopy <NEW_LINE> class_ = deepcopy(self.__dict__['_class']) <NEW_LINE> object_ = deepcopy(self.__dict__['_object']) <NEW_LINE> return self.__class__(class_, object_) | Obviates issues when using a "super" functional.
Since functionals of a job-folder are deepcopied, the following line
will not result in calling the next class in the __mro__.
>>> jobfolder.functional = super(Functional, functional)
Indeed, this line will first call the __getitem__, __setitem__ (or
__deepcopy__) of the super object. In general, this means we end-up with
``jobfolder.function == functional``.
This class obviates this difficulty.
>>> jobfolder.functional = SuperCall(Functional, functional) | 62599064dd821e528d6da516 |
class SavedWindowState(GObject.GObject): <NEW_LINE> <INDENT> __gtype_name__ = 'SavedWindowState' <NEW_LINE> width = GObject.property( type=int, nick='Current window width', default=-1) <NEW_LINE> height = GObject.property( type=int, nick='Current window height', default=-1) <NEW_LINE> is_maximized = GObject.property( type=bool, nick='Is window maximized', default=False) <NEW_LINE> is_fullscreen = GObject.property( type=bool, nick='Is window fullscreen', default=False) <NEW_LINE> def bind(self, window): <NEW_LINE> <INDENT> window.connect('size-allocate', self.on_size_allocate) <NEW_LINE> window.connect('window-state-event', self.on_window_state_event) <NEW_LINE> bind_flags = Gio.SettingsBindFlags.DEFAULT <NEW_LINE> self.settings = load_settings_schema(WINDOW_STATE_SCHEMA) <NEW_LINE> self.settings.bind('width', self, 'width', bind_flags) <NEW_LINE> self.settings.bind('height', self, 'height', bind_flags) <NEW_LINE> self.settings.bind('is-maximized', self, 'is-maximized', bind_flags) <NEW_LINE> self.settings.bind('is-fullscreen', self, 'is-fullscreen', bind_flags) <NEW_LINE> window.set_default_size(self.props.width, self.props.height) <NEW_LINE> if self.props.is_maximized: <NEW_LINE> <INDENT> window.maximize() <NEW_LINE> <DEDENT> if self.props.is_fullscreen: <NEW_LINE> <INDENT> window.fullscreen() <NEW_LINE> <DEDENT> <DEDENT> def on_size_allocate(self, window, allocation): <NEW_LINE> <INDENT> if not (self.props.is_maximized or self.props.is_fullscreen): <NEW_LINE> <INDENT> width, height = window.get_size() <NEW_LINE> self.props.width = width <NEW_LINE> self.props.height = height <NEW_LINE> <DEDENT> <DEDENT> def on_window_state_event(self, window, event): <NEW_LINE> <INDENT> state = event.window.get_state() <NEW_LINE> self.props.is_maximized = state & Gdk.WindowState.MAXIMIZED <NEW_LINE> self.props.is_fullscreen = state & Gdk.WindowState.FULLSCREEN | Utility class for saving and restoring GtkWindow state | 625990643cc13d1c6d466e6d |
class Employee(): <NEW_LINE> <INDENT> def __init__(self, firstname, lastname, salary): <NEW_LINE> <INDENT> self.firstname = firstname <NEW_LINE> self.lastname = lastname <NEW_LINE> self.salary = salary <NEW_LINE> <DEDENT> def give_raise(self, amount = 5000): <NEW_LINE> <INDENT> self.salary += amount | Simulate an employee | 62599064498bea3a75a59195 |
class Boton(object): <NEW_LINE> <INDENT> def __init__(self, pin, nombre='boton1'): <NEW_LINE> <INDENT> self.gpio = mraa.Gpio(pin) <NEW_LINE> self.gpio.dir(mraa.DIR_IN) <NEW_LINE> self.gpio.mode(mraa.MODE_PULLUP) <NEW_LINE> self.nombre = nombre <NEW_LINE> self.pin = pin <NEW_LINE> <DEDENT> def leer_estado(self): <NEW_LINE> <INDENT> return self.gpio.read() | Clase para crear un boton con pin pullup | 625990649c8ee82313040d1d |
class InvoiceDetail(LoginRequiredMixin, DetailView): <NEW_LINE> <INDENT> model = Invoice <NEW_LINE> template_name = 'coin/invoice.html' <NEW_LINE> context_object_name = 'invoice_detail' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Invoice.objects.filter( Q(invoiced_to=self.request.user) | Q(issuer=self.request.user)) | Returns details of given invoice. | 62599064379a373c97d9a748 |
class IntegrateAndFire(): <NEW_LINE> <INDENT> _R_M = 50e6 <NEW_LINE> _C_M = 200e-12 <NEW_LINE> _V_E = -70e-3 <NEW_LINE> _V_TH = -40e-3 <NEW_LINE> _V_RESET = -80e-3 <NEW_LINE> _T_TOT = 1e-1/3 <NEW_LINE> _DELTA_T = 10e-6 <NEW_LINE> _T_REF = 3e-3 <NEW_LINE> t = np.arange(0.0, _T_TOT, _DELTA_T) <NEW_LINE> _I_m = None <NEW_LINE> i = np.arange(0.0, 10e-9, 100e-12) <NEW_LINE> iff = lambda self, t: (t/100e-12) <NEW_LINE> t_2 = np.linspace(0, 10e-9, 1000) <NEW_LINE> VMF = lambda self, v: (v % (self._V_TH - self._V_RESET)) * (self._V_TH - self._V_RESET) + self._V_RESET <NEW_LINE> def __init__(self, i_m=None): <NEW_LINE> <INDENT> self._I_m = i_m <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def voltage_Model(vm, t, self): <NEW_LINE> <INDENT> dVdt = (-1 * (1 / self._R_M) * (vm - self._V_E) + self._I_m) / self._C_M <NEW_LINE> return dVdt <NEW_LINE> <DEDENT> getVoltageModel = lambda self: odeint(self.voltage_Model, self._V_E, self.t, args=(self,)) <NEW_LINE> getVoltageModelThresholded = lambda self: list(map(lambda v_m: self.VMF(v_m), self.getVoltageModel())) <NEW_LINE> @staticmethod <NEW_LINE> def voltage_Model_Sweep(vm, t, self): <NEW_LINE> <INDENT> dVdt = (-1 * (1 / self._R_M) * (vm - self._V_E) + t / self._C_M) <NEW_LINE> return dVdt <NEW_LINE> <DEDENT> getVoltageModel_Sweep = lambda self: odeint(self.voltage_Model_Sweep, self._V_E, self.t_2, args=(self,)) <NEW_LINE> getVoltageModelThresholded_Sweep = lambda self: list(map(lambda v_m: self.VMF(v_m), self.getVoltageModel_Sweep())) <NEW_LINE> @staticmethod <NEW_LINE> def frequency_Model(vm, i, self): <NEW_LINE> <INDENT> i_gt_i_th = lambda i: (self._T_REF - self._R_M * self._C_M * np.log(1 - (self._V_TH / (i * self._R_M)))) ** -1 <NEW_LINE> i_f = 0 if i <= ((self._V_TH - self._V_E) / self._R_M) else i_gt_i_th(i) <NEW_LINE> return i_f <NEW_LINE> <DEDENT> getFrequencyModel = lambda self: odeint(self.frequency_Model, self._V_E, self.i, args=(self,)) | Standard Integrate-and-Fire Model | 625990647d43ff2487427fa5 |
class HostCertWindow(TitledPage): <NEW_LINE> <INDENT> def __init__(self, parent, title): <NEW_LINE> <INDENT> TitledPage.__init__(self, parent, title) <NEW_LINE> self.emailId = wx.NewId() <NEW_LINE> self.hostId = wx.NewId() <NEW_LINE> self.text = wx.StaticText(self, -1, "The e-mail address will be used for verification, please make sure it is valid.") <NEW_LINE> self.emailText = wx.StaticText(self, -1, "E-mail:") <NEW_LINE> self.hostText = wx.StaticText(self, -1, "Machine Name:") <NEW_LINE> self.emailCtrl = wx.TextCtrl(self, self.emailId, validator = HostCertValidator()) <NEW_LINE> self.hostName = SystemConfig.instance().GetHostname(); <NEW_LINE> self.hostCtrl = wx.TextCtrl(self, self.hostId, self.hostName, validator = HostCertValidator()) <NEW_LINE> self.SetEvents() <NEW_LINE> self.Layout() <NEW_LINE> <DEDENT> def SetEvents(self): <NEW_LINE> <INDENT> wx.EVT_TEXT(self.emailCtrl, self.emailId, self.EnterText) <NEW_LINE> wx.EVT_TEXT(self.hostCtrl , self.hostId , self.EnterText) <NEW_LINE> <DEDENT> def EnterText(self, event): <NEW_LINE> <INDENT> item = event.GetEventObject() <NEW_LINE> item.SetBackgroundColour((254, 254, 254)) <NEW_LINE> item.Refresh() <NEW_LINE> <DEDENT> def Layout(self): <NEW_LINE> <INDENT> self.sizer.Add(self.text, 0, wx.ALL, 5) <NEW_LINE> self.sizer.Add(wx.Size(10, 10)) <NEW_LINE> gridSizer = wx.FlexGridSizer(2, 2, 6, 6) <NEW_LINE> gridSizer.Add(self.hostText) <NEW_LINE> gridSizer.Add(self.hostCtrl, 0, wx.EXPAND) <NEW_LINE> gridSizer.Add(self.emailText) <NEW_LINE> gridSizer.Add(self.emailCtrl, 0, wx.EXPAND) <NEW_LINE> gridSizer.AddGrowableCol(1) <NEW_LINE> self.sizer.Add(gridSizer, 0, wx.ALL | wx.EXPAND, 5) | Includes information for requesting a host certificate. | 6259906445492302aabfdc06 |
class Operations(object): <NEW_LINE> <INDENT> models = models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self.api_version = "2017-09-01-preview" <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def list( self, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/providers/Microsoft.DataFactory/operations' <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send(request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> raise models.ErrorResponseException(self._deserialize, response) <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('OperationListResponse', response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized | Operations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version. Constant value: "2017-09-01-preview". | 6259906416aa5153ce401c07 |
class LazyLoaderVirtualEnabledTest(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.opts = salt.config.minion_config(None) <NEW_LINE> cls.opts['disable_modules'] = ['pillar'] <NEW_LINE> cls.opts['grains'] = grains(cls.opts) <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.loader = LazyLoader(_module_dirs(copy.deepcopy(self.opts), 'modules', 'module'), copy.deepcopy(self.opts), tag='module') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> del self.loader <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> del cls.opts <NEW_LINE> <DEDENT> def test_basic(self): <NEW_LINE> <INDENT> self.assertEqual(self.loader._dict, {}) <NEW_LINE> self.assertTrue(inspect.isfunction(self.loader['test.ping'])) <NEW_LINE> for key, val in six.iteritems(self.loader._dict): <NEW_LINE> <INDENT> self.assertEqual(key.split('.', 1)[0], 'test') <NEW_LINE> <DEDENT> self.assertFalse('test.missing_func' in self.loader._dict) <NEW_LINE> <DEDENT> def test_badkey(self): <NEW_LINE> <INDENT> with self.assertRaises(KeyError): <NEW_LINE> <INDENT> self.loader[None] <NEW_LINE> <DEDENT> with self.assertRaises(KeyError): <NEW_LINE> <INDENT> self.loader[1] <NEW_LINE> <DEDENT> <DEDENT> def test_disable(self): <NEW_LINE> <INDENT> self.assertNotIn('pillar.items', self.loader) <NEW_LINE> <DEDENT> def test_len_load(self): <NEW_LINE> <INDENT> self.assertEqual(self.loader._dict, {}) <NEW_LINE> len(self.loader) <NEW_LINE> self.assertNotEqual(self.loader._dict, {}) <NEW_LINE> <DEDENT> def test_iter_load(self): <NEW_LINE> <INDENT> self.assertEqual(self.loader._dict, {}) <NEW_LINE> for key, func in six.iteritems(self.loader): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.assertNotEqual(self.loader._dict, {}) <NEW_LINE> <DEDENT> def test_context(self): <NEW_LINE> <INDENT> self.assertEqual(self.loader._dict, {}) <NEW_LINE> func = self.loader['test.ping'] <NEW_LINE> with patch.dict(func.__globals__['__context__'], {'foo': 'bar'}): <NEW_LINE> <INDENT> self.assertEqual(self.loader['test.echo'].__globals__['__context__']['foo'], 'bar') <NEW_LINE> self.assertEqual(self.loader['grains.get'].__globals__['__context__']['foo'], 'bar') <NEW_LINE> <DEDENT> <DEDENT> def test_globals(self): <NEW_LINE> <INDENT> func_globals = self.loader['test.ping'].__globals__ <NEW_LINE> self.assertEqual(func_globals['__grains__'], self.opts.get('grains', {})) <NEW_LINE> self.assertEqual(func_globals['__pillar__'], self.opts.get('pillar', {})) <NEW_LINE> for key, val in six.iteritems(func_globals['__opts__']): <NEW_LINE> <INDENT> if key in salt.config.DEFAULT_MASTER_OPTS and key not in salt.config.DEFAULT_MINION_OPTS: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if key not in salt.config.DEFAULT_MASTER_OPTS and key not in salt.config.DEFAULT_MINION_OPTS: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.assertEqual(self.opts[key], val) <NEW_LINE> <DEDENT> <DEDENT> def test_pack(self): <NEW_LINE> <INDENT> self.loader.pack['__foo__'] = 'bar' <NEW_LINE> func_globals = self.loader['test.ping'].__globals__ <NEW_LINE> self.assertEqual(func_globals['__foo__'], 'bar') <NEW_LINE> <DEDENT> def test_virtual(self): <NEW_LINE> <INDENT> self.assertNotIn('test_virtual.ping', self.loader) | Test the base loader of salt. | 625990645166f23b2e244afb |
class UpdateEvent(Model): <NEW_LINE> <INDENT> def __init__(self, event_type=None, event_content=None): <NEW_LINE> <INDENT> self.swagger_types = { 'event_type': str, 'event_content': object } <NEW_LINE> self.attribute_map = { 'event_type': 'event_type', 'event_content': 'event_content' } <NEW_LINE> self._event_type = event_type <NEW_LINE> self._event_content = event_content <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt): <NEW_LINE> <INDENT> return deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def event_type(self): <NEW_LINE> <INDENT> return self._event_type <NEW_LINE> <DEDENT> @event_type.setter <NEW_LINE> def event_type(self, event_type): <NEW_LINE> <INDENT> self._event_type = event_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def event_content(self): <NEW_LINE> <INDENT> return self._event_content <NEW_LINE> <DEDENT> @event_content.setter <NEW_LINE> def event_content(self, event_content): <NEW_LINE> <INDENT> self._event_content = event_content | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 625990648e7ae83300eea7b8 |
class AddField(FieldOperation): <NEW_LINE> <INDENT> def __init__(self, model_name, name, field, preserve_default=True): <NEW_LINE> <INDENT> self.preserve_default = preserve_default <NEW_LINE> super().__init__(model_name, name, field) <NEW_LINE> <DEDENT> def deconstruct(self): <NEW_LINE> <INDENT> kwargs = { 'model_name': self.model_name, 'name': self.name, 'field': self.field, } <NEW_LINE> if self.preserve_default is not True: <NEW_LINE> <INDENT> kwargs['preserve_default'] = self.preserve_default <NEW_LINE> <DEDENT> return ( self.__class__.__name__, [], kwargs ) <NEW_LINE> <DEDENT> def state_forwards(self, app_label, state): <NEW_LINE> <INDENT> if not self.preserve_default: <NEW_LINE> <INDENT> field = self.field.clone() <NEW_LINE> field.default = NOT_PROVIDED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> field = self.field <NEW_LINE> <DEDENT> state.models[app_label, self.model_name_lower].fields[self.name] = field <NEW_LINE> delay = not field.is_relation <NEW_LINE> state.reload_model(app_label, self.model_name_lower, delay=delay) <NEW_LINE> <DEDENT> def database_forwards(self, app_label, schema_editor, from_state, to_state): <NEW_LINE> <INDENT> to_model = to_state.apps.get_model(app_label, self.model_name) <NEW_LINE> if self.allow_migrate_model(schema_editor.connection.alias, to_model): <NEW_LINE> <INDENT> from_model = from_state.apps.get_model(app_label, self.model_name) <NEW_LINE> field = to_model._meta.get_field(self.name) <NEW_LINE> if not self.preserve_default: <NEW_LINE> <INDENT> field.default = self.field.default <NEW_LINE> <DEDENT> schema_editor.add_field( from_model, field, ) <NEW_LINE> if not self.preserve_default: <NEW_LINE> <INDENT> field.default = NOT_PROVIDED <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def database_backwards(self, app_label, schema_editor, from_state, to_state): <NEW_LINE> <INDENT> from_model = from_state.apps.get_model(app_label, self.model_name) <NEW_LINE> if self.allow_migrate_model(schema_editor.connection.alias, from_model): <NEW_LINE> <INDENT> schema_editor.remove_field(from_model, from_model._meta.get_field(self.name)) <NEW_LINE> <DEDENT> <DEDENT> def describe(self): <NEW_LINE> <INDENT> return "Add field %s to %s" % (self.name, self.model_name) <NEW_LINE> <DEDENT> def reduce(self, operation, app_label): <NEW_LINE> <INDENT> if isinstance(operation, FieldOperation) and self.is_same_field_operation(operation): <NEW_LINE> <INDENT> if isinstance(operation, AlterField): <NEW_LINE> <INDENT> return [ AddField( model_name=self.model_name, name=operation.name, field=operation.field, ), ] <NEW_LINE> <DEDENT> elif isinstance(operation, RemoveField): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> elif isinstance(operation, RenameField): <NEW_LINE> <INDENT> return [ AddField( model_name=self.model_name, name=operation.new_name, field=self.field, ), ] <NEW_LINE> <DEDENT> <DEDENT> return super().reduce(operation, app_label) | Add a field to a model. | 62599064a17c0f6771d5d73b |
class settings(ProtectedPage): <NEW_LINE> <INDENT> def GET(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open('./data/proto.json', 'r') as f: <NEW_LINE> <INDENT> settings = json.load(f) <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> settings = {} <NEW_LINE> <DEDENT> return template_render.proto(settings) | Load an html page for entering plugin settings. | 6259906466673b3332c31b27 |
class Wikipedia(Engine): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> Engine.__init__(self, **kwargs) <NEW_LINE> <DEDENT> def _search(self, query): <NEW_LINE> <INDENT> if not query.top: <NEW_LINE> <INDENT> raise QueryParamException(self.name, "Total result amount (query.top) not specified") <NEW_LINE> <DEDENT> return self._request(query) <NEW_LINE> <DEDENT> def _request(self, query): <NEW_LINE> <INDENT> search_params = {'format': 'xml', 'search': query.terms, 'action': 'opensearch', 'limit': query.top} <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.get(API_ENDPOINT, params=search_params) <NEW_LINE> <DEDENT> except requests.exceptions.ConnectionError: <NEW_LINE> <INDENT> raise EngineConnectionException(self.name, "Unable to send request, check connectivity") <NEW_LINE> <DEDENT> if response.status_code != 200: <NEW_LINE> <INDENT> raise EngineConnectionException(self.name, "", code=response.status_code) <NEW_LINE> <DEDENT> return Wikipedia._parse_xml_response(query, response) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_xml_response(query, results): <NEW_LINE> <INDENT> response = Response(query.terms) <NEW_LINE> xml_doc = xml.dom.minidom.parseString(results.content) <NEW_LINE> results = xml_doc.getElementsByTagName('Item') <NEW_LINE> for result in results: <NEW_LINE> <INDENT> title = result.getElementsByTagName('Text')[0].firstChild.data <NEW_LINE> url = result.getElementsByTagName('Url')[0].firstChild.data <NEW_LINE> summary = result.getElementsByTagName('Description')[0].firstChild.data <NEW_LINE> response.add_result(title=title, url=url, summary=summary) <NEW_LINE> <DEDENT> return response | Wikipedia search engine. | 625990645fdd1c0f98e5f6ae |
class AircraftSerializer(serializers.HyperlinkedModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Aircraft <NEW_LINE> fields = ('man_type', 'tail_number', 'license_type', 'id', 'photo') <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> user = self.context['request'].user <NEW_LINE> aircraft = Aircraft.objects.create(user=user, **validated_data) <NEW_LINE> return aircraft | Serializer to access Aircraft | 625990647047854f46340adf |
class RedLockFactory(object): <NEW_LINE> <INDENT> def __init__(self, connection_details): <NEW_LINE> <INDENT> self.redis_nodes = [] <NEW_LINE> for conn in connection_details: <NEW_LINE> <INDENT> if isinstance(conn, redis.StrictRedis): <NEW_LINE> <INDENT> node = conn <NEW_LINE> <DEDENT> elif 'url' in conn: <NEW_LINE> <INDENT> url = conn.pop('url') <NEW_LINE> node = redis.StrictRedis.from_url(url, **conn) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = redis.StrictRedis(**conn) <NEW_LINE> <DEDENT> node._release_script = node.register_script(RELEASE_LUA_SCRIPT) <NEW_LINE> self.redis_nodes.append(node) <NEW_LINE> self.quorum = len(self.redis_nodes) // 2 + 1 <NEW_LINE> <DEDENT> <DEDENT> def create_lock(self, resource, **kwargs): <NEW_LINE> <INDENT> lock = RedLock(resource=resource, created_by_factory=True, **kwargs) <NEW_LINE> lock.redis_nodes = self.redis_nodes <NEW_LINE> lock.quorum = self.quorum <NEW_LINE> lock.factory = self <NEW_LINE> return lock | A Factory class that helps reuse multiple Redis connections. | 62599064cc0a2c111447c665 |
class RingSensor(RingEntityMixin, SensorEntity): <NEW_LINE> <INDENT> def __init__(self, config_entry_id, device, sensor_type): <NEW_LINE> <INDENT> super().__init__(config_entry_id, device) <NEW_LINE> self._sensor_type = sensor_type <NEW_LINE> self._extra = None <NEW_LINE> self._icon = f"mdi:{SENSOR_TYPES.get(sensor_type)[3]}" <NEW_LINE> self._kind = SENSOR_TYPES.get(sensor_type)[4] <NEW_LINE> self._name = f"{self._device.name} {SENSOR_TYPES.get(sensor_type)[0]}" <NEW_LINE> self._unique_id = f"{device.id}-{sensor_type}" <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> if self._sensor_type == "volume": <NEW_LINE> <INDENT> return self._device.volume <NEW_LINE> <DEDENT> if self._sensor_type == "battery": <NEW_LINE> <INDENT> return self._device.battery_life <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_class(self): <NEW_LINE> <INDENT> return SENSOR_TYPES[self._sensor_type][5] <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> if self._sensor_type == "battery" and self._device.battery_life is not None: <NEW_LINE> <INDENT> return icon_for_battery_level( battery_level=self._device.battery_life, charging=False ) <NEW_LINE> <DEDENT> return self._icon <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return SENSOR_TYPES.get(self._sensor_type)[2] | A sensor implementation for Ring device. | 625990642c8b7c6e89bd4f1b |
class AggregateSavingRule(HARKobject): <NEW_LINE> <INDENT> def __init__(self,intercept,slope): <NEW_LINE> <INDENT> self.intercept = intercept <NEW_LINE> self.slope = slope <NEW_LINE> self.distance_criteria = ['slope','intercept'] <NEW_LINE> <DEDENT> def __call__(self,Mnow): <NEW_LINE> <INDENT> Aagg = np.exp(self.intercept + self.slope*np.log(Mnow)) <NEW_LINE> return Aagg | A class to represent agent beliefs about aggregate saving at the end of this period (AaggNow) as
a function of (normalized) aggregate market resources at the beginning of the period (MaggNow). | 62599064f548e778e596ccb5 |
class Memory(ProcfsMetric): <NEW_LINE> <INDENT> TOTAL, FREE, _AVAIL, _BUFFERS, CACHED = xrange(5) <NEW_LINE> NAME_FIELD, VALUE_FIELD, UNITS_FIELD = xrange(3) <NEW_LINE> TOTAL_FIELD_NAME = 'MemTotal:' <NEW_LINE> FREE_FIELD_NAME = 'MemFree:' <NEW_LINE> CACHED_FIELD_NAME = 'Cached:' <NEW_LINE> SUFFIX = 'kB' <NEW_LINE> Mem = namedtuple('Mem', [ 'total', 'free', 'cached', 'used', 'load', 'usable', 'free_and_cached_ma', ]) <NEW_LINE> def __init__(self, path, alpha): <NEW_LINE> <INDENT> super(Memory, self).__init__(path) <NEW_LINE> self._free_and_cached_ma = EWMA(alpha) <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> mem = Memory._parse(self.read_nfirst_lines(5)) <NEW_LINE> self._free_and_cached_ma.update(mem.free + mem.cached) <NEW_LINE> load_ma, usable_ma = self._calc_load_ma(mem) <NEW_LINE> return Memory.Mem( mem.total, mem.free, mem.cached, mem.used, load_ma, usable_ma, self._free_and_cached_ma.int_of_value, ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse(lines): <NEW_LINE> <INDENT> splitted = [ln.split() for ln in lines] <NEW_LINE> Memory._assert_splitted(splitted) <NEW_LINE> vals = [int(val[Memory.VALUE_FIELD]) for val in splitted] <NEW_LINE> total_kb = vals[Memory.TOTAL] <NEW_LINE> free_kb = vals[Memory.FREE] <NEW_LINE> cached_kb = vals[Memory.CACHED] <NEW_LINE> used_kb = vals[Memory.TOTAL] - vals[Memory.FREE] <NEW_LINE> return Memory.Mem( total_kb << 10, free_kb << 10, cached_kb << 10, used_kb << 10, .0, .0, .0, ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _assert_splitted(splitted): <NEW_LINE> <INDENT> assert splitted[Memory.TOTAL][Memory.NAME_FIELD] == Memory.TOTAL_FIELD_NAME <NEW_LINE> assert splitted[Memory.FREE][Memory.NAME_FIELD] == Memory.FREE_FIELD_NAME <NEW_LINE> assert splitted[Memory.CACHED][Memory.NAME_FIELD] == Memory.CACHED_FIELD_NAME <NEW_LINE> assert splitted[Memory.TOTAL][Memory.UNITS_FIELD] == Memory.SUFFIX <NEW_LINE> assert splitted[Memory.FREE][Memory.UNITS_FIELD] == Memory.SUFFIX <NEW_LINE> assert splitted[Memory.CACHED][Memory.UNITS_FIELD] == Memory.SUFFIX <NEW_LINE> <DEDENT> def _calc_load_ma(self, mem): <NEW_LINE> <INDENT> if not mem.total: <NEW_LINE> <INDENT> return .0, .0 <NEW_LINE> <DEDENT> free = self._free_and_cached_ma.value <NEW_LINE> load = mem.total - free <NEW_LINE> return clamp(load / float(mem.total), .0, 1.0), clamp(free / float(mem.total), .0, 1.0) | Memory metrics from /proc/memifo. | 625990648da39b475be04915 |
class VarLibCFFDictMergeError(VarLibMergeError): <NEW_LINE> <INDENT> def __init__(self, key, value, values): <NEW_LINE> <INDENT> error_msg = ( f"For the Private Dict key '{key}', the default font value list:" f"\n\t{value}\nhad a different number of values than a region font:" ) <NEW_LINE> for region_value in values: <NEW_LINE> <INDENT> error_msg += f"\n\t{region_value}" <NEW_LINE> <DEDENT> self.args = (error_msg,) | Raised when a CFF PrivateDict cannot be merged. | 62599064097d151d1a2c2797 |
class VirtualEnvironment(object): <NEW_LINE> <INDENT> def __init__(self, location, *args, **kwargs): <NEW_LINE> <INDENT> self.location = Path(location) <NEW_LINE> self.pip_source_dir = kwargs.pop("pip_source_dir") <NEW_LINE> self._system_site_packages = kwargs.pop("system_site_packages", False) <NEW_LINE> home, lib, inc, bin = _virtualenv.path_locations(self.location) <NEW_LINE> if hasattr(sys, "pypy_version_info"): <NEW_LINE> <INDENT> lib = os.path.join(home, 'lib-python', sys.version[:3]) <NEW_LINE> <DEDENT> self.lib = Path(lib) <NEW_LINE> self.bin = Path(bin) <NEW_LINE> super(VirtualEnvironment, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<VirtualEnvironment {}>".format(self.location) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create(cls, location, clear=False, pip_source_dir=None): <NEW_LINE> <INDENT> obj = cls(location, pip_source_dir=pip_source_dir) <NEW_LINE> obj._create(clear=clear) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def _create(self, clear=False): <NEW_LINE> <INDENT> _virtualenv.create_environment( self.location, clear=clear, never_download=True, no_pip=True, ) <NEW_LINE> cmd = [self.bin.join("python"), "setup.py", "develop"] <NEW_LINE> p = subprocess.Popen( cmd, cwd=self.pip_source_dir, stderr=subprocess.STDOUT, stdout=DEVNULL, ) <NEW_LINE> p.communicate() <NEW_LINE> if p.returncode != 0: <NEW_LINE> <INDENT> raise Exception(p.stderr) <NEW_LINE> raise subprocess.CalledProcessError( p.returncode, cmd[0], output=p.stdout, ) <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._create(clear=True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def system_site_packages(self): <NEW_LINE> <INDENT> return self._system_site_packages <NEW_LINE> <DEDENT> @system_site_packages.setter <NEW_LINE> def system_site_packages(self, value): <NEW_LINE> <INDENT> marker = self.lib.join("no-global-site-packages.txt") <NEW_LINE> if value: <NEW_LINE> <INDENT> marker.rm() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> marker.touch() <NEW_LINE> <DEDENT> self._system_site_packages = value | An abstraction around virtual environments, currently it only uses
virtualenv but in the future it could use pyvenv. | 625990643539df3088ecd9c9 |
class ROUGEScorer(Scorer): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ROUGEScorer, self).__init__("ROUGE") <NEW_LINE> <DEDENT> def summarize(self, writer, step, score): <NEW_LINE> <INDENT> self._summarize_value(writer, step, "external_evaluation/ROUGE-1", score["rouge-1"]) <NEW_LINE> self._summarize_value(writer, step, "external_evaluation/ROUGE-2", score["rouge-2"]) <NEW_LINE> self._summarize_value(writer, step, "external_evaluation/ROUGE-L", score["rouge-l"]) <NEW_LINE> <DEDENT> def log(self, score): <NEW_LINE> <INDENT> tf.logging.info("Evaluation score: ROUGE-1 = %f; ROUGE-2 = %f; ROUGE-L = %s", score["rouge-1"], score["rouge-2"], score["rouge-l"]) <NEW_LINE> <DEDENT> def __call__(self, labels_file, predictions_path): <NEW_LINE> <INDENT> from rouge import FilesRouge <NEW_LINE> files_rouge = FilesRouge(predictions_path, labels_file) <NEW_LINE> rouge_scores = files_rouge.get_scores(avg=True) <NEW_LINE> return {k:v["f"] for k, v in six.iteritems(rouge_scores)} | ROUGE scorer based on https://github.com/pltrdy/rouge. | 625990643539df3088ecd9ca |
class GaussianNoise(Operation): <NEW_LINE> <INDENT> def __init__(self, probability, mean, std): <NEW_LINE> <INDENT> Operation.__init__(self, probability) <NEW_LINE> self.mean = mean <NEW_LINE> self.std = std <NEW_LINE> <DEDENT> def perform_operation(self, image): <NEW_LINE> <INDENT> w, h = image.size <NEW_LINE> c = len(image.getbands()) <NEW_LINE> noise = np.random.normal(self.mean, self.std, (h, w, c)) <NEW_LINE> return Image.fromarray(np.uint8(np.array(image) + noise)) | The class `:class noise` is used to perfrom random noise on images passed
to its :func:`perform_operation` function. | 62599064435de62698e9d535 |
class BaseStudentEngagementTaskMapTest(InitializeOpaqueKeysMixin, MapperTestMixin, TestCase): <NEW_LINE> <INDENT> DEFAULT_USER_ID = 10 <NEW_LINE> DEFAULT_TIMESTAMP = "2013-12-17T15:38:32.805444" <NEW_LINE> DEFAULT_DATE = "2013-12-17" <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(BaseStudentEngagementTaskMapTest, self).setUp() <NEW_LINE> self.initialize_ids() <NEW_LINE> self.video_id = 'i4x-foo-bar-baz' <NEW_LINE> self.event_templates = { 'play_video': { "username": "test_user", "host": "test_host", "event_source": "browser", "event_type": "play_video", "context": { "course_id": self.course_id, "org_id": self.org_id, "user_id": self.DEFAULT_USER_ID, }, "time": "{0}+00:00".format(self.DEFAULT_TIMESTAMP), "ip": "127.0.0.1", "event": '{"id": "%s", "currentTime": "23.4398", "code": "87389iouhdfh"}' % self.video_id, "agent": "blah, blah, blah", "page": None }, 'problem_check': { "username": "test_user", "host": "test_host", "event_source": "server", "event_type": "problem_check", "context": { "course_id": self.course_id, "org_id": self.org_id, "user_id": self.DEFAULT_USER_ID, }, "time": "{0}+00:00".format(self.DEFAULT_TIMESTAMP), "ip": "127.0.0.1", "event": { "problem_id": self.problem_id, "success": "incorrect", }, "agent": "blah, blah, blah", "page": None } } <NEW_LINE> self.default_event_template = 'problem_check' <NEW_LINE> self.default_key = (self.DEFAULT_DATE, self.course_id, 'test_user') <NEW_LINE> <DEDENT> def create_task(self, interval=None, interval_type=None): <NEW_LINE> <INDENT> if not interval: <NEW_LINE> <INDENT> interval = self.DEFAULT_DATE <NEW_LINE> <DEDENT> self.task = StudentEngagementTask( interval=luigi.DateIntervalParameter().parse(interval), output_root='/fake/output', interval_type=interval_type, ) <NEW_LINE> self.task.init_local() <NEW_LINE> <DEDENT> def assert_date_mappings(self, expected_end_date, actual_event_date): <NEW_LINE> <INDENT> self.assert_single_map_output( self.create_event_log_line(time="{}T15:38:32.805444".format(actual_event_date)), (expected_end_date, self.course_id, 'test_user'), (self.problem_id, 'problem_check', '{}', actual_event_date) ) | Base class for test analysis of detailed student engagement | 6259906492d797404e3896f3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.