body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
8860e188554203beabc2e92904b07383ced04bc27175a2b983a3d2428e08dca6
def updateMetaData(self): 'Override.' return True
Override.
extra_foam/gui/ctrl_widgets/image_ctrl_widget.py
updateMetaData
zhujun98/EXtra-foam
0
python
def updateMetaData(self): return True
def updateMetaData(self): return True<|docstring|>Override.<|endoftext|>
92c5bc7bf6fd086acf6ef6f428392d4aa0fbeff2f2b1b68343f9c9c58a9c179e
def loadMetaData(self): 'Override.' pass
Override.
extra_foam/gui/ctrl_widgets/image_ctrl_widget.py
loadMetaData
zhujun98/EXtra-foam
0
python
def loadMetaData(self): pass
def loadMetaData(self): pass<|docstring|>Override.<|endoftext|>
8bcaf35aad18c205c8843a7c974d0ec495dc273122840b869593f7b0245509ff
def is_new_to_wildlifelicensing(request=None): '\n Verify request user holds minimum details to use Wildlife Licensing.\n ' from wildlifecompliance.management.securebase_manager import SecureBaseUtils has_user_details = (True if (request.user.first_name and request.user.last_name and request.user.dob and request.user.residential_address and (request.user.phone_number or request.user.mobile_number) and request.user.identification) else False) if (not SecureBaseUtils.is_wildlifelicensing_request(request)): has_user_details = True if is_internal(request): has_user_details = True return (not has_user_details)
Verify request user holds minimum details to use Wildlife Licensing.
wildlifecompliance/helpers.py
is_new_to_wildlifelicensing
jawaidm/wildlifecompliance
1
python
def is_new_to_wildlifelicensing(request=None): '\n \n ' from wildlifecompliance.management.securebase_manager import SecureBaseUtils has_user_details = (True if (request.user.first_name and request.user.last_name and request.user.dob and request.user.residential_address and (request.user.phone_number or request.user.mobile_number) and request.user.identification) else False) if (not SecureBaseUtils.is_wildlifelicensing_request(request)): has_user_details = True if is_internal(request): has_user_details = True return (not has_user_details)
def is_new_to_wildlifelicensing(request=None): '\n \n ' from wildlifecompliance.management.securebase_manager import SecureBaseUtils has_user_details = (True if (request.user.first_name and request.user.last_name and request.user.dob and request.user.residential_address and (request.user.phone_number or request.user.mobile_number) and request.user.identification) else False) if (not SecureBaseUtils.is_wildlifelicensing_request(request)): has_user_details = True if is_internal(request): has_user_details = True return (not has_user_details)<|docstring|>Verify request user holds minimum details to use Wildlife Licensing.<|endoftext|>
52148cf684ca840ea83ead84c79c3d3ab1d734bd763c0d2c8b93b0145e76ef5d
def belongs_to(user, group_name): '\n Check if the user belongs to the given group.\n :param user:\n :param group_name:\n :return:\n ' return user.groups.filter(name=group_name).exists()
Check if the user belongs to the given group. :param user: :param group_name: :return:
wildlifecompliance/helpers.py
belongs_to
jawaidm/wildlifecompliance
1
python
def belongs_to(user, group_name): '\n Check if the user belongs to the given group.\n :param user:\n :param group_name:\n :return:\n ' return user.groups.filter(name=group_name).exists()
def belongs_to(user, group_name): '\n Check if the user belongs to the given group.\n :param user:\n :param group_name:\n :return:\n ' return user.groups.filter(name=group_name).exists()<|docstring|>Check if the user belongs to the given group. :param user: :param group_name: :return:<|endoftext|>
67228f2c2cf5d3d75a60644f25e7aa3d4e1a6cde496305efc5873b8ceb993a8d
def belongs_to_list(user, group_names): '\n Check if the user belongs to the given list of groups.\n :param user:\n :param list_of_group_names:\n :return:\n ' return user.groups.filter(name__in=group_names).exists()
Check if the user belongs to the given list of groups. :param user: :param list_of_group_names: :return:
wildlifecompliance/helpers.py
belongs_to_list
jawaidm/wildlifecompliance
1
python
def belongs_to_list(user, group_names): '\n Check if the user belongs to the given list of groups.\n :param user:\n :param list_of_group_names:\n :return:\n ' return user.groups.filter(name__in=group_names).exists()
def belongs_to_list(user, group_names): '\n Check if the user belongs to the given list of groups.\n :param user:\n :param list_of_group_names:\n :return:\n ' return user.groups.filter(name__in=group_names).exists()<|docstring|>Check if the user belongs to the given list of groups. :param user: :param list_of_group_names: :return:<|endoftext|>
84c23ecefe1a55b3030292926b89f5cf62b3ee0842180b48a6c48ea0769c347c
def is_wildlifecompliance_payment_officer(request): '\n Check user for request has payment officer permissions.\n\n :return: boolean\n ' PAYMENTS_GROUP_NAME = 'Wildlife Compliance - Payment Officers' is_payment_officer = (request.user.is_authenticated() and is_model_backend(request) and in_dbca_domain(request) and request.user.groups.filter(name__in=[PAYMENTS_GROUP_NAME]).exists()) return is_payment_officer
Check user for request has payment officer permissions. :return: boolean
wildlifecompliance/helpers.py
is_wildlifecompliance_payment_officer
jawaidm/wildlifecompliance
1
python
def is_wildlifecompliance_payment_officer(request): '\n Check user for request has payment officer permissions.\n\n :return: boolean\n ' PAYMENTS_GROUP_NAME = 'Wildlife Compliance - Payment Officers' is_payment_officer = (request.user.is_authenticated() and is_model_backend(request) and in_dbca_domain(request) and request.user.groups.filter(name__in=[PAYMENTS_GROUP_NAME]).exists()) return is_payment_officer
def is_wildlifecompliance_payment_officer(request): '\n Check user for request has payment officer permissions.\n\n :return: boolean\n ' PAYMENTS_GROUP_NAME = 'Wildlife Compliance - Payment Officers' is_payment_officer = (request.user.is_authenticated() and is_model_backend(request) and in_dbca_domain(request) and request.user.groups.filter(name__in=[PAYMENTS_GROUP_NAME]).exists()) return is_payment_officer<|docstring|>Check user for request has payment officer permissions. :return: boolean<|endoftext|>
5d63dda6a1f505007a827594d95179399bc008d45a36a9e320856d424bb62584
def is_reception(request): '\n A check whether request is performed by Wildlife Licensing Reception.\n ' from wildlifecompliance.components.licences.models import WildlifeLicenceReceptionEmail is_reception_email = WildlifeLicenceReceptionEmail.objects.filter(email=request.user.email).exists() return (request.user.is_authenticated() and is_reception_email)
A check whether request is performed by Wildlife Licensing Reception.
wildlifecompliance/helpers.py
is_reception
jawaidm/wildlifecompliance
1
python
def is_reception(request): '\n \n ' from wildlifecompliance.components.licences.models import WildlifeLicenceReceptionEmail is_reception_email = WildlifeLicenceReceptionEmail.objects.filter(email=request.user.email).exists() return (request.user.is_authenticated() and is_reception_email)
def is_reception(request): '\n \n ' from wildlifecompliance.components.licences.models import WildlifeLicenceReceptionEmail is_reception_email = WildlifeLicenceReceptionEmail.objects.filter(email=request.user.email).exists() return (request.user.is_authenticated() and is_reception_email)<|docstring|>A check whether request is performed by Wildlife Licensing Reception.<|endoftext|>
24f7f1ffebd025f015802b0541452979802efba5d2d64f4d89b43fa06ee64677
@cli.group('replicas') @click.option('--count', type=int, expose_value=True, help='Number of replicas the indices should have.') @click.pass_context def replicas(ctx, count): 'Replica Count Per-shard' if (count == None): click.echo('{0}'.format(ctx.get_help())) click.echo(click.style('Missing required parameter --count', fg='red', bold=True)) sys.exit(1)
Replica Count Per-shard
curator/cli/replicas.py
replicas
ferki/curator
0
python
@cli.group('replicas') @click.option('--count', type=int, expose_value=True, help='Number of replicas the indices should have.') @click.pass_context def replicas(ctx, count): if (count == None): click.echo('{0}'.format(ctx.get_help())) click.echo(click.style('Missing required parameter --count', fg='red', bold=True)) sys.exit(1)
@cli.group('replicas') @click.option('--count', type=int, expose_value=True, help='Number of replicas the indices should have.') @click.pass_context def replicas(ctx, count): if (count == None): click.echo('{0}'.format(ctx.get_help())) click.echo(click.style('Missing required parameter --count', fg='red', bold=True)) sys.exit(1)<|docstring|>Replica Count Per-shard<|endoftext|>
1e5bd9f5db73e705ee70c161737987ecec4b2c05ec2a4ca141c7074710a085a5
def __init__(__self__, *, family: pulumi.Input[str], description: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None): '\n The set of arguments for constructing a ClusterParameterGroup resource.\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[\'ClusterParameterGroupParameterArgs\']]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' pulumi.set(__self__, 'family', family) if (description is not None): pulumi.set(__self__, 'description', description) if (name is not None): pulumi.set(__self__, 'name', name) if (name_prefix is not None): pulumi.set(__self__, 'name_prefix', name_prefix) if (parameters is not None): pulumi.set(__self__, 'parameters', parameters) if (tags is not None): pulumi.set(__self__, 'tags', tags)
The set of arguments for constructing a ClusterParameterGroup resource. :param pulumi.Input[str] family: The family of the documentDB cluster parameter group. :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi". :param pulumi.Input[str] name: The name of the documentDB parameter. :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`. :param pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
__init__
jen20/pulumi-aws
0
python
def __init__(__self__, *, family: pulumi.Input[str], description: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None): '\n The set of arguments for constructing a ClusterParameterGroup resource.\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[\'ClusterParameterGroupParameterArgs\']]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' pulumi.set(__self__, 'family', family) if (description is not None): pulumi.set(__self__, 'description', description) if (name is not None): pulumi.set(__self__, 'name', name) if (name_prefix is not None): pulumi.set(__self__, 'name_prefix', name_prefix) if (parameters is not None): pulumi.set(__self__, 'parameters', parameters) if (tags is not None): pulumi.set(__self__, 'tags', tags)
def __init__(__self__, *, family: pulumi.Input[str], description: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None): '\n The set of arguments for constructing a ClusterParameterGroup resource.\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[\'ClusterParameterGroupParameterArgs\']]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' pulumi.set(__self__, 'family', family) if (description is not None): pulumi.set(__self__, 'description', description) if (name is not None): pulumi.set(__self__, 'name', name) if (name_prefix is not None): pulumi.set(__self__, 'name_prefix', name_prefix) if (parameters is not None): pulumi.set(__self__, 'parameters', parameters) if (tags is not None): pulumi.set(__self__, 'tags', tags)<|docstring|>The set of arguments for constructing a ClusterParameterGroup resource. :param pulumi.Input[str] family: The family of the documentDB cluster parameter group. :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi". :param pulumi.Input[str] name: The name of the documentDB parameter. :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`. :param pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.<|endoftext|>
fcebe3e5868e3971f8220d829c52637b53042fccdf9a1c0bd3495f5f15ce642d
@property @pulumi.getter def family(self) -> pulumi.Input[str]: '\n The family of the documentDB cluster parameter group.\n ' return pulumi.get(self, 'family')
The family of the documentDB cluster parameter group.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
family
jen20/pulumi-aws
0
python
@property @pulumi.getter def family(self) -> pulumi.Input[str]: '\n \n ' return pulumi.get(self, 'family')
@property @pulumi.getter def family(self) -> pulumi.Input[str]: '\n \n ' return pulumi.get(self, 'family')<|docstring|>The family of the documentDB cluster parameter group.<|endoftext|>
dc3888658fdd6360c92bae24f944e137469b0c94d211e25a7e6ddee476dc142c
@property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: '\n The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n ' return pulumi.get(self, 'description')
The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
description
jen20/pulumi-aws
0
python
@property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'description')
@property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'description')<|docstring|>The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".<|endoftext|>
beb351adfc8efcbd2dd2b6955740f14de3409cdcc537231abc06ae8154b219ce
@property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: '\n The name of the documentDB parameter.\n ' return pulumi.get(self, 'name')
The name of the documentDB parameter.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
name
jen20/pulumi-aws
0
python
@property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'name')
@property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'name')<|docstring|>The name of the documentDB parameter.<|endoftext|>
f1b492a53529f0cb4485884fa763ebcbf854a87cd8b4f10afa7904a89fe340d5
@property @pulumi.getter(name='namePrefix') def name_prefix(self) -> Optional[pulumi.Input[str]]: '\n Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n ' return pulumi.get(self, 'name_prefix')
Creates a unique name beginning with the specified prefix. Conflicts with `name`.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
name_prefix
jen20/pulumi-aws
0
python
@property @pulumi.getter(name='namePrefix') def name_prefix(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'name_prefix')
@property @pulumi.getter(name='namePrefix') def name_prefix(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'name_prefix')<|docstring|>Creates a unique name beginning with the specified prefix. Conflicts with `name`.<|endoftext|>
4c9a40aa5e3fdc2b064cf484395734e9ca2c9ca62655aa4b8e19fd755d430757
@property @pulumi.getter def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]]]: '\n A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n ' return pulumi.get(self, 'parameters')
A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
parameters
jen20/pulumi-aws
0
python
@property @pulumi.getter def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]]]: '\n \n ' return pulumi.get(self, 'parameters')
@property @pulumi.getter def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterParameterGroupParameterArgs']]]]: '\n \n ' return pulumi.get(self, 'parameters')<|docstring|>A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.<|endoftext|>
93f559ff90e2f960b3004f862bd462633ee849c089398c8d34d32d46319efd91
@property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]: '\n A map of tags to assign to the resource.\n ' return pulumi.get(self, 'tags')
A map of tags to assign to the resource.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
tags
jen20/pulumi-aws
0
python
@property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]: '\n \n ' return pulumi.get(self, 'tags')
@property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]: '\n \n ' return pulumi.get(self, 'tags')<|docstring|>A map of tags to assign to the resource.<|endoftext|>
f9f16ac3d72edba6e07a8bfd68aa3efa902555d1dfa42040453f18738b2bd0d1
@overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]=None, description: Optional[pulumi.Input[str]]=None, family: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, __props__=None, __name__=None, __opts__=None): '\n Manages a DocumentDB Cluster Parameter Group\n\n ## Example Usage\n\n ```python\n import pulumi\n import pulumi_aws as aws\n\n example = aws.docdb.ClusterParameterGroup("example",\n description="docdb cluster parameter group",\n family="docdb3.6",\n parameters=[aws.docdb.ClusterParameterGroupParameterArgs(\n name="tls",\n value="enabled",\n )])\n ```\n\n ## Import\n\n DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g.\n\n ```sh\n $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1\n ```\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType[\'ClusterParameterGroupParameterArgs\']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' ...
Manages a DocumentDB Cluster Parameter Group ## Example Usage ```python import pulumi import pulumi_aws as aws example = aws.docdb.ClusterParameterGroup("example", description="docdb cluster parameter group", family="docdb3.6", parameters=[aws.docdb.ClusterParameterGroupParameterArgs( name="tls", value="enabled", )]) ``` ## Import DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g. ```sh $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1 ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi". :param pulumi.Input[str] family: The family of the documentDB cluster parameter group. :param pulumi.Input[str] name: The name of the documentDB parameter. :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
__init__
jen20/pulumi-aws
0
python
@overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]=None, description: Optional[pulumi.Input[str]]=None, family: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, __props__=None, __name__=None, __opts__=None): '\n Manages a DocumentDB Cluster Parameter Group\n\n ## Example Usage\n\n ```python\n import pulumi\n import pulumi_aws as aws\n\n example = aws.docdb.ClusterParameterGroup("example",\n description="docdb cluster parameter group",\n family="docdb3.6",\n parameters=[aws.docdb.ClusterParameterGroupParameterArgs(\n name="tls",\n value="enabled",\n )])\n ```\n\n ## Import\n\n DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g.\n\n ```sh\n $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1\n ```\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType[\'ClusterParameterGroupParameterArgs\']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' ...
@overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]=None, description: Optional[pulumi.Input[str]]=None, family: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, __props__=None, __name__=None, __opts__=None): '\n Manages a DocumentDB Cluster Parameter Group\n\n ## Example Usage\n\n ```python\n import pulumi\n import pulumi_aws as aws\n\n example = aws.docdb.ClusterParameterGroup("example",\n description="docdb cluster parameter group",\n family="docdb3.6",\n parameters=[aws.docdb.ClusterParameterGroupParameterArgs(\n name="tls",\n value="enabled",\n )])\n ```\n\n ## Import\n\n DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g.\n\n ```sh\n $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1\n ```\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType[\'ClusterParameterGroupParameterArgs\']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' ...<|docstring|>Manages a DocumentDB Cluster Parameter Group ## Example Usage ```python import pulumi import pulumi_aws as aws example = aws.docdb.ClusterParameterGroup("example", description="docdb cluster parameter group", family="docdb3.6", parameters=[aws.docdb.ClusterParameterGroupParameterArgs( name="tls", value="enabled", )]) ``` ## Import DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g. ```sh $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1 ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi". :param pulumi.Input[str] family: The family of the documentDB cluster parameter group. :param pulumi.Input[str] name: The name of the documentDB parameter. :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.<|endoftext|>
d608c30d22baac5fd65395c0c9bd56f561be79e52eefc991080e6043499cb439
@overload def __init__(__self__, resource_name: str, args: ClusterParameterGroupArgs, opts: Optional[pulumi.ResourceOptions]=None): '\n Manages a DocumentDB Cluster Parameter Group\n\n ## Example Usage\n\n ```python\n import pulumi\n import pulumi_aws as aws\n\n example = aws.docdb.ClusterParameterGroup("example",\n description="docdb cluster parameter group",\n family="docdb3.6",\n parameters=[aws.docdb.ClusterParameterGroupParameterArgs(\n name="tls",\n value="enabled",\n )])\n ```\n\n ## Import\n\n DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g.\n\n ```sh\n $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1\n ```\n\n :param str resource_name: The name of the resource.\n :param ClusterParameterGroupArgs args: The arguments to use to populate this resource\'s properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n ' ...
Manages a DocumentDB Cluster Parameter Group ## Example Usage ```python import pulumi import pulumi_aws as aws example = aws.docdb.ClusterParameterGroup("example", description="docdb cluster parameter group", family="docdb3.6", parameters=[aws.docdb.ClusterParameterGroupParameterArgs( name="tls", value="enabled", )]) ``` ## Import DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g. ```sh $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1 ``` :param str resource_name: The name of the resource. :param ClusterParameterGroupArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
__init__
jen20/pulumi-aws
0
python
@overload def __init__(__self__, resource_name: str, args: ClusterParameterGroupArgs, opts: Optional[pulumi.ResourceOptions]=None): '\n Manages a DocumentDB Cluster Parameter Group\n\n ## Example Usage\n\n ```python\n import pulumi\n import pulumi_aws as aws\n\n example = aws.docdb.ClusterParameterGroup("example",\n description="docdb cluster parameter group",\n family="docdb3.6",\n parameters=[aws.docdb.ClusterParameterGroupParameterArgs(\n name="tls",\n value="enabled",\n )])\n ```\n\n ## Import\n\n DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g.\n\n ```sh\n $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1\n ```\n\n :param str resource_name: The name of the resource.\n :param ClusterParameterGroupArgs args: The arguments to use to populate this resource\'s properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n ' ...
@overload def __init__(__self__, resource_name: str, args: ClusterParameterGroupArgs, opts: Optional[pulumi.ResourceOptions]=None): '\n Manages a DocumentDB Cluster Parameter Group\n\n ## Example Usage\n\n ```python\n import pulumi\n import pulumi_aws as aws\n\n example = aws.docdb.ClusterParameterGroup("example",\n description="docdb cluster parameter group",\n family="docdb3.6",\n parameters=[aws.docdb.ClusterParameterGroupParameterArgs(\n name="tls",\n value="enabled",\n )])\n ```\n\n ## Import\n\n DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g.\n\n ```sh\n $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1\n ```\n\n :param str resource_name: The name of the resource.\n :param ClusterParameterGroupArgs args: The arguments to use to populate this resource\'s properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n ' ...<|docstring|>Manages a DocumentDB Cluster Parameter Group ## Example Usage ```python import pulumi import pulumi_aws as aws example = aws.docdb.ClusterParameterGroup("example", description="docdb cluster parameter group", family="docdb3.6", parameters=[aws.docdb.ClusterParameterGroupParameterArgs( name="tls", value="enabled", )]) ``` ## Import DocumentDB Cluster Parameter Groups can be imported using the `name`, e.g. ```sh $ pulumi import aws:docdb/clusterParameterGroup:ClusterParameterGroup cluster_pg production-pg-1 ``` :param str resource_name: The name of the resource. :param ClusterParameterGroupArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource.<|endoftext|>
a05fe3eeb243687e88e4101e0d6e83409eeb70b640c9d98c9ada5ed2b226d088
@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]=None, arn: Optional[pulumi.Input[str]]=None, description: Optional[pulumi.Input[str]]=None, family: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None) -> 'ClusterParameterGroup': '\n Get an existing ClusterParameterGroup resource\'s state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] arn: The ARN of the documentDB cluster parameter group.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType[\'ClusterParameterGroupParameterArgs\']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__['arn'] = arn __props__['description'] = description __props__['family'] = family __props__['name'] = name __props__['name_prefix'] = name_prefix __props__['parameters'] = parameters __props__['tags'] = tags return ClusterParameterGroup(resource_name, opts=opts, __props__=__props__)
Get an existing ClusterParameterGroup resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] arn: The ARN of the documentDB cluster parameter group. :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi". :param pulumi.Input[str] family: The family of the documentDB cluster parameter group. :param pulumi.Input[str] name: The name of the documentDB parameter. :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
get
jen20/pulumi-aws
0
python
@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]=None, arn: Optional[pulumi.Input[str]]=None, description: Optional[pulumi.Input[str]]=None, family: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None) -> 'ClusterParameterGroup': '\n Get an existing ClusterParameterGroup resource\'s state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] arn: The ARN of the documentDB cluster parameter group.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType[\'ClusterParameterGroupParameterArgs\']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__['arn'] = arn __props__['description'] = description __props__['family'] = family __props__['name'] = name __props__['name_prefix'] = name_prefix __props__['parameters'] = parameters __props__['tags'] = tags return ClusterParameterGroup(resource_name, opts=opts, __props__=__props__)
@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]=None, arn: Optional[pulumi.Input[str]]=None, description: Optional[pulumi.Input[str]]=None, family: Optional[pulumi.Input[str]]=None, name: Optional[pulumi.Input[str]]=None, name_prefix: Optional[pulumi.Input[str]]=None, parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None) -> 'ClusterParameterGroup': '\n Get an existing ClusterParameterGroup resource\'s state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] arn: The ARN of the documentDB cluster parameter group.\n :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n :param pulumi.Input[str] family: The family of the documentDB cluster parameter group.\n :param pulumi.Input[str] name: The name of the documentDB parameter.\n :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType[\'ClusterParameterGroupParameterArgs\']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.\n ' opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__['arn'] = arn __props__['description'] = description __props__['family'] = family __props__['name'] = name __props__['name_prefix'] = name_prefix __props__['parameters'] = parameters __props__['tags'] = tags return ClusterParameterGroup(resource_name, opts=opts, __props__=__props__)<|docstring|>Get an existing ClusterParameterGroup resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] arn: The ARN of the documentDB cluster parameter group. :param pulumi.Input[str] description: The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi". :param pulumi.Input[str] family: The family of the documentDB cluster parameter group. :param pulumi.Input[str] name: The name of the documentDB parameter. :param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterParameterGroupParameterArgs']]]] parameters: A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource.<|endoftext|>
9659c08d4406d0d16c7b4155b88ad71bc71e6de666cb74ccbf8db484bf1f33af
@property @pulumi.getter def arn(self) -> pulumi.Output[str]: '\n The ARN of the documentDB cluster parameter group.\n ' return pulumi.get(self, 'arn')
The ARN of the documentDB cluster parameter group.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
arn
jen20/pulumi-aws
0
python
@property @pulumi.getter def arn(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'arn')
@property @pulumi.getter def arn(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'arn')<|docstring|>The ARN of the documentDB cluster parameter group.<|endoftext|>
46c44c2c1e15ef5bc8d2772ca06c909cfd9474f090a1b61b67a3d952258a22b6
@property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: '\n The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".\n ' return pulumi.get(self, 'description')
The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
description
jen20/pulumi-aws
0
python
@property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: '\n \n ' return pulumi.get(self, 'description')
@property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: '\n \n ' return pulumi.get(self, 'description')<|docstring|>The description of the documentDB cluster parameter group. Defaults to "Managed by Pulumi".<|endoftext|>
a614c72469d84521879d8754a8a13f0373741eaeae2b57851425eaca3dfa5d06
@property @pulumi.getter def family(self) -> pulumi.Output[str]: '\n The family of the documentDB cluster parameter group.\n ' return pulumi.get(self, 'family')
The family of the documentDB cluster parameter group.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
family
jen20/pulumi-aws
0
python
@property @pulumi.getter def family(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'family')
@property @pulumi.getter def family(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'family')<|docstring|>The family of the documentDB cluster parameter group.<|endoftext|>
5d92bbb764d6f52e07844ffc9205465a5dd29ed3e442c23b6b424efb3e689547
@property @pulumi.getter def name(self) -> pulumi.Output[str]: '\n The name of the documentDB parameter.\n ' return pulumi.get(self, 'name')
The name of the documentDB parameter.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
name
jen20/pulumi-aws
0
python
@property @pulumi.getter def name(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'name')
@property @pulumi.getter def name(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'name')<|docstring|>The name of the documentDB parameter.<|endoftext|>
5ea02d2cfd78240c2047d5e469988b82276474d9b14bac6004d7a098437918c5
@property @pulumi.getter(name='namePrefix') def name_prefix(self) -> pulumi.Output[str]: '\n Creates a unique name beginning with the specified prefix. Conflicts with `name`.\n ' return pulumi.get(self, 'name_prefix')
Creates a unique name beginning with the specified prefix. Conflicts with `name`.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
name_prefix
jen20/pulumi-aws
0
python
@property @pulumi.getter(name='namePrefix') def name_prefix(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'name_prefix')
@property @pulumi.getter(name='namePrefix') def name_prefix(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'name_prefix')<|docstring|>Creates a unique name beginning with the specified prefix. Conflicts with `name`.<|endoftext|>
c05c9de4e1e4495324612c6e776008dfd69ffdb111db17076763f3bf5e29d492
@property @pulumi.getter def parameters(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterParameterGroupParameter']]]: '\n A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.\n ' return pulumi.get(self, 'parameters')
A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
parameters
jen20/pulumi-aws
0
python
@property @pulumi.getter def parameters(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterParameterGroupParameter']]]: '\n \n ' return pulumi.get(self, 'parameters')
@property @pulumi.getter def parameters(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterParameterGroupParameter']]]: '\n \n ' return pulumi.get(self, 'parameters')<|docstring|>A list of documentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources.<|endoftext|>
f00e5ea8a558dd35375d601d76efbd8ba3270f53689afc47a3b121fae9db0f54
@property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[(str, str)]]]: '\n A map of tags to assign to the resource.\n ' return pulumi.get(self, 'tags')
A map of tags to assign to the resource.
sdk/python/pulumi_aws/docdb/cluster_parameter_group.py
tags
jen20/pulumi-aws
0
python
@property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[(str, str)]]]: '\n \n ' return pulumi.get(self, 'tags')
@property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[(str, str)]]]: '\n \n ' return pulumi.get(self, 'tags')<|docstring|>A map of tags to assign to the resource.<|endoftext|>
4f59022283ca279ed2a32c8100aa6a503878e2138654fe687fbe4d7b41ff4c22
def get_row(m, i): ' Returns array of 9 SudokuSets. ' return m[(i * 9):((i * 9) + 9)]
Returns array of 9 SudokuSets.
solver.py
get_row
joneser005/sudoku
0
python
def get_row(m, i): ' ' return m[(i * 9):((i * 9) + 9)]
def get_row(m, i): ' ' return m[(i * 9):((i * 9) + 9)]<|docstring|>Returns array of 9 SudokuSets.<|endoftext|>
ac418a6db96f161c95a71d00f90de96eb7a1326b96ac6a96ff8d401a3f7fbf71
def get_col(m, i): ' Returns array of 9 SudokuSets. ' result = [] for j in range(9): result.append(m[((j * 9) + i)]) return result
Returns array of 9 SudokuSets.
solver.py
get_col
joneser005/sudoku
0
python
def get_col(m, i): ' ' result = [] for j in range(9): result.append(m[((j * 9) + i)]) return result
def get_col(m, i): ' ' result = [] for j in range(9): result.append(m[((j * 9) + i)]) return result<|docstring|>Returns array of 9 SudokuSets.<|endoftext|>
27d3a49a46f215562ab9ddf953379e5a3df889f2c523e13160abe61afec6d94f
def get_box(m, bi): ' \n Returns array of 9 SudokuSets.\n \n 0 1 2\n 3 4 5\n 6 7 8\n x offset = (i%3)*3\n y offset = int(i/3) # possible values are 0,1,2\n m[0] => (0-2)+0 + (0-2)+9 + (0-2)+18\n m[2] => (0-2)+6+(0*9) + (0-2)+6+9 + (0-2)+6+18\n m[4] => (0-2)+0 + (0-2)+9 + (0-2)+18\n m[i] => \n ' result = [] xoff = ((bi % 3) * 3) yoff = int((bi / 3)) for i in range((xoff + (yoff * 27)), ((xoff + (yoff * 27)) + 3)): result.append(m[i]) for i in range(((xoff + (yoff * 27)) + 9), (((xoff + (yoff * 27)) + 9) + 3)): result.append(m[i]) for i in range(((xoff + (yoff * 27)) + 18), (((xoff + (yoff * 27)) + 18) + 3)): result.append(m[i]) return result
Returns array of 9 SudokuSets. 0 1 2 3 4 5 6 7 8 x offset = (i%3)*3 y offset = int(i/3) # possible values are 0,1,2 m[0] => (0-2)+0 + (0-2)+9 + (0-2)+18 m[2] => (0-2)+6+(0*9) + (0-2)+6+9 + (0-2)+6+18 m[4] => (0-2)+0 + (0-2)+9 + (0-2)+18 m[i] =>
solver.py
get_box
joneser005/sudoku
0
python
def get_box(m, bi): ' \n Returns array of 9 SudokuSets.\n \n 0 1 2\n 3 4 5\n 6 7 8\n x offset = (i%3)*3\n y offset = int(i/3) # possible values are 0,1,2\n m[0] => (0-2)+0 + (0-2)+9 + (0-2)+18\n m[2] => (0-2)+6+(0*9) + (0-2)+6+9 + (0-2)+6+18\n m[4] => (0-2)+0 + (0-2)+9 + (0-2)+18\n m[i] => \n ' result = [] xoff = ((bi % 3) * 3) yoff = int((bi / 3)) for i in range((xoff + (yoff * 27)), ((xoff + (yoff * 27)) + 3)): result.append(m[i]) for i in range(((xoff + (yoff * 27)) + 9), (((xoff + (yoff * 27)) + 9) + 3)): result.append(m[i]) for i in range(((xoff + (yoff * 27)) + 18), (((xoff + (yoff * 27)) + 18) + 3)): result.append(m[i]) return result
def get_box(m, bi): ' \n Returns array of 9 SudokuSets.\n \n 0 1 2\n 3 4 5\n 6 7 8\n x offset = (i%3)*3\n y offset = int(i/3) # possible values are 0,1,2\n m[0] => (0-2)+0 + (0-2)+9 + (0-2)+18\n m[2] => (0-2)+6+(0*9) + (0-2)+6+9 + (0-2)+6+18\n m[4] => (0-2)+0 + (0-2)+9 + (0-2)+18\n m[i] => \n ' result = [] xoff = ((bi % 3) * 3) yoff = int((bi / 3)) for i in range((xoff + (yoff * 27)), ((xoff + (yoff * 27)) + 3)): result.append(m[i]) for i in range(((xoff + (yoff * 27)) + 9), (((xoff + (yoff * 27)) + 9) + 3)): result.append(m[i]) for i in range(((xoff + (yoff * 27)) + 18), (((xoff + (yoff * 27)) + 18) + 3)): result.append(m[i]) return result<|docstring|>Returns array of 9 SudokuSets. 0 1 2 3 4 5 6 7 8 x offset = (i%3)*3 y offset = int(i/3) # possible values are 0,1,2 m[0] => (0-2)+0 + (0-2)+9 + (0-2)+18 m[2] => (0-2)+6+(0*9) + (0-2)+6+9 + (0-2)+6+18 m[4] => (0-2)+0 + (0-2)+9 + (0-2)+18 m[i] =><|endoftext|>
3b83cd183d9e81ca404d720c23278dbb385955e4ccbdd7ab055d8460e5905cfc
def makekey(s): " Returns string from set of ints, ordered low to high.\n Example: if set is (5,2,6), result will be '256'. " so = sorted(s) s = '' for x in so: s += str(x) return s
Returns string from set of ints, ordered low to high. Example: if set is (5,2,6), result will be '256'.
solver.py
makekey
joneser005/sudoku
0
python
def makekey(s): " Returns string from set of ints, ordered low to high.\n Example: if set is (5,2,6), result will be '256'. " so = sorted(s) s = for x in so: s += str(x) return s
def makekey(s): " Returns string from set of ints, ordered low to high.\n Example: if set is (5,2,6), result will be '256'. " so = sorted(s) s = for x in so: s += str(x) return s<|docstring|>Returns string from set of ints, ordered low to high. Example: if set is (5,2,6), result will be '256'.<|endoftext|>
76e4d4d59165acf313a12f565719305d52d189bd638976a7354d88f52130a7c8
def get_symcc_build_dir(target_directory): 'Return path to uninstrumented target directory.' return os.path.join(target_directory, 'uninstrumented')
Return path to uninstrumented target directory.
fuzzers/symcc_aflplusplus_single/fuzzer.py
get_symcc_build_dir
andreafioraldi/fuzzbench
800
python
def get_symcc_build_dir(target_directory): return os.path.join(target_directory, 'uninstrumented')
def get_symcc_build_dir(target_directory): return os.path.join(target_directory, 'uninstrumented')<|docstring|>Return path to uninstrumented target directory.<|endoftext|>
97847e273ab9e3f0ad29bf400930f1cb27b4055fb1e98437468e6f7dc5b3975d
def build(): 'Build an AFL version and SymCC version of the benchmark' print('Step 1: Building with AFL and SymCC') build_directory = os.environ['OUT'] src = os.getenv('SRC') work = os.getenv('WORK') with utils.restore_directory(src), utils.restore_directory(work): aflplusplus_fuzzer.build('tracepc', 'symcc') print('Step 2: Completed AFL build') shutil.copy('/afl/afl-fuzz', build_directory) shutil.copy('/afl/afl-showmap', build_directory) print('Step 3: Copying SymCC files') symcc_build_dir = get_symcc_build_dir(os.environ['OUT']) shutil.copy('/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so', symcc_build_dir) shutil.copy('/usr/lib/libz3.so', os.path.join(symcc_build_dir, 'libz3.so')) shutil.copy('/libcxx_native_build/lib/libc++.so.1', symcc_build_dir) shutil.copy('/libcxx_native_build/lib/libc++abi.so.1', symcc_build_dir) shutil.copy('/rust/bin/symcc_fuzzing_helper', symcc_build_dir)
Build an AFL version and SymCC version of the benchmark
fuzzers/symcc_aflplusplus_single/fuzzer.py
build
andreafioraldi/fuzzbench
800
python
def build(): print('Step 1: Building with AFL and SymCC') build_directory = os.environ['OUT'] src = os.getenv('SRC') work = os.getenv('WORK') with utils.restore_directory(src), utils.restore_directory(work): aflplusplus_fuzzer.build('tracepc', 'symcc') print('Step 2: Completed AFL build') shutil.copy('/afl/afl-fuzz', build_directory) shutil.copy('/afl/afl-showmap', build_directory) print('Step 3: Copying SymCC files') symcc_build_dir = get_symcc_build_dir(os.environ['OUT']) shutil.copy('/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so', symcc_build_dir) shutil.copy('/usr/lib/libz3.so', os.path.join(symcc_build_dir, 'libz3.so')) shutil.copy('/libcxx_native_build/lib/libc++.so.1', symcc_build_dir) shutil.copy('/libcxx_native_build/lib/libc++abi.so.1', symcc_build_dir) shutil.copy('/rust/bin/symcc_fuzzing_helper', symcc_build_dir)
def build(): print('Step 1: Building with AFL and SymCC') build_directory = os.environ['OUT'] src = os.getenv('SRC') work = os.getenv('WORK') with utils.restore_directory(src), utils.restore_directory(work): aflplusplus_fuzzer.build('tracepc', 'symcc') print('Step 2: Completed AFL build') shutil.copy('/afl/afl-fuzz', build_directory) shutil.copy('/afl/afl-showmap', build_directory) print('Step 3: Copying SymCC files') symcc_build_dir = get_symcc_build_dir(os.environ['OUT']) shutil.copy('/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so', symcc_build_dir) shutil.copy('/usr/lib/libz3.so', os.path.join(symcc_build_dir, 'libz3.so')) shutil.copy('/libcxx_native_build/lib/libc++.so.1', symcc_build_dir) shutil.copy('/libcxx_native_build/lib/libc++abi.so.1', symcc_build_dir) shutil.copy('/rust/bin/symcc_fuzzing_helper', symcc_build_dir)<|docstring|>Build an AFL version and SymCC version of the benchmark<|endoftext|>
d71050f363eb32dfa93ede9a74fcf180c04bf61582e54ad3374392c1662fd497
def launch_afl_thread(input_corpus, output_corpus, target_binary, additional_flags): ' Simple wrapper for running AFL. ' afl_thread = threading.Thread(target=afl_fuzzer.run_afl_fuzz, args=(input_corpus, output_corpus, target_binary, additional_flags)) afl_thread.start() return afl_thread
Simple wrapper for running AFL.
fuzzers/symcc_aflplusplus_single/fuzzer.py
launch_afl_thread
andreafioraldi/fuzzbench
800
python
def launch_afl_thread(input_corpus, output_corpus, target_binary, additional_flags): ' ' afl_thread = threading.Thread(target=afl_fuzzer.run_afl_fuzz, args=(input_corpus, output_corpus, target_binary, additional_flags)) afl_thread.start() return afl_thread
def launch_afl_thread(input_corpus, output_corpus, target_binary, additional_flags): ' ' afl_thread = threading.Thread(target=afl_fuzzer.run_afl_fuzz, args=(input_corpus, output_corpus, target_binary, additional_flags)) afl_thread.start() return afl_thread<|docstring|>Simple wrapper for running AFL.<|endoftext|>
246ea4c68e6915b834fb2e3bfdb24a785d7f837959131c33cb1d2c4228cc408d
def fuzz(input_corpus, output_corpus, target_binary): '\n Launches a master and a secondary instance of AFL, as well as\n the symcc helper.\n ' target_binary_dir = os.path.dirname(target_binary) symcc_workdir = get_symcc_build_dir(target_binary_dir) target_binary_name = os.path.basename(target_binary) symcc_target_binary = os.path.join(symcc_workdir, target_binary_name) os.environ['AFL_DISABLE_TRIM'] = '1' print('[run_fuzzer] Running AFL for SymCC') afl_fuzzer.prepare_fuzz_environment(input_corpus) launch_afl_thread(input_corpus, output_corpus, target_binary, ['-S', 'afl-secondary']) time.sleep(5) print('Starting the SymCC helper') new_environ = os.environ.copy() new_environ['LD_LIBRARY_PATH'] = symcc_workdir cmd = [os.path.join(symcc_workdir, 'symcc_fuzzing_helper'), '-o', output_corpus, '-a', 'afl-secondary', '-n', 'symcc', '-m', '--', symcc_target_binary, '@@'] subprocess.Popen(cmd, env=new_environ)
Launches a master and a secondary instance of AFL, as well as the symcc helper.
fuzzers/symcc_aflplusplus_single/fuzzer.py
fuzz
andreafioraldi/fuzzbench
800
python
def fuzz(input_corpus, output_corpus, target_binary): '\n Launches a master and a secondary instance of AFL, as well as\n the symcc helper.\n ' target_binary_dir = os.path.dirname(target_binary) symcc_workdir = get_symcc_build_dir(target_binary_dir) target_binary_name = os.path.basename(target_binary) symcc_target_binary = os.path.join(symcc_workdir, target_binary_name) os.environ['AFL_DISABLE_TRIM'] = '1' print('[run_fuzzer] Running AFL for SymCC') afl_fuzzer.prepare_fuzz_environment(input_corpus) launch_afl_thread(input_corpus, output_corpus, target_binary, ['-S', 'afl-secondary']) time.sleep(5) print('Starting the SymCC helper') new_environ = os.environ.copy() new_environ['LD_LIBRARY_PATH'] = symcc_workdir cmd = [os.path.join(symcc_workdir, 'symcc_fuzzing_helper'), '-o', output_corpus, '-a', 'afl-secondary', '-n', 'symcc', '-m', '--', symcc_target_binary, '@@'] subprocess.Popen(cmd, env=new_environ)
def fuzz(input_corpus, output_corpus, target_binary): '\n Launches a master and a secondary instance of AFL, as well as\n the symcc helper.\n ' target_binary_dir = os.path.dirname(target_binary) symcc_workdir = get_symcc_build_dir(target_binary_dir) target_binary_name = os.path.basename(target_binary) symcc_target_binary = os.path.join(symcc_workdir, target_binary_name) os.environ['AFL_DISABLE_TRIM'] = '1' print('[run_fuzzer] Running AFL for SymCC') afl_fuzzer.prepare_fuzz_environment(input_corpus) launch_afl_thread(input_corpus, output_corpus, target_binary, ['-S', 'afl-secondary']) time.sleep(5) print('Starting the SymCC helper') new_environ = os.environ.copy() new_environ['LD_LIBRARY_PATH'] = symcc_workdir cmd = [os.path.join(symcc_workdir, 'symcc_fuzzing_helper'), '-o', output_corpus, '-a', 'afl-secondary', '-n', 'symcc', '-m', '--', symcc_target_binary, '@@'] subprocess.Popen(cmd, env=new_environ)<|docstring|>Launches a master and a secondary instance of AFL, as well as the symcc helper.<|endoftext|>
9c5dac2aa26ae2b03d437be07836f5e28905597f352ac1fbaa4977a803735547
def __init__(self, jsondict=None, strict=True): ' Initialize all valid properties.\n\n :raises: FHIRValidationError on validation errors, unless strict is False\n :param dict jsondict: A JSON dictionary to use for initialization\n :param bool strict: If True (the default), invalid variables will raise a TypeError\n ' self.beneficiary = None " Plan Beneficiary.\n Type `FHIRReference` referencing `['Patient']` (represented as `dict` in JSON). " self.contract = None " Contract details.\n List of `FHIRReference` items referencing `['Contract']` (represented as `dict` in JSON). " self.dependent = None ' Dependent number.\n Type `str`. ' self.grouping = None ' Additional coverage classifications.\n Type `CoverageGrouping` (represented as `dict` in JSON). ' self.identifier = None ' The primary coverage ID.\n List of `Identifier` items (represented as `dict` in JSON). ' self.network = None ' Insurer network.\n Type `str`. ' self.order = None ' Relative order of the coverage.\n Type `int`. ' self.payor = None " Identifier for the plan or agreement issuer.\n List of `FHIRReference` items referencing `['Organization'], ['Patient'], ['RelatedPerson']` (represented as `dict` in JSON). " self.period = None ' Coverage start and end dates.\n Type `Period` (represented as `dict` in JSON). ' self.policyHolder = None " Owner of the policy.\n Type `FHIRReference` referencing `['Patient'], ['RelatedPerson'], ['Organization']` (represented as `dict` in JSON). " self.relationship = None ' Beneficiary relationship to the Subscriber.\n Type `CodeableConcept` (represented as `dict` in JSON). ' self.sequence = None ' The plan instance or sequence counter.\n Type `str`. ' self.status = None ' active | cancelled | draft | entered-in-error.\n Type `str`. ' self.subscriber = None " Subscriber to the policy.\n Type `FHIRReference` referencing `['Patient'], ['RelatedPerson']` (represented as `dict` in JSON). " self.subscriberId = None ' ID assigned to the Subscriber.\n Type `str`. ' self.type = None ' Type of coverage such as medical or accident.\n Type `CodeableConcept` (represented as `dict` in JSON). ' super(Coverage, self).__init__(jsondict=jsondict, strict=strict)
Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError
fhir/resources/STU3/coverage.py
__init__
mmabey/fhir.resources
0
python
def __init__(self, jsondict=None, strict=True): ' Initialize all valid properties.\n\n :raises: FHIRValidationError on validation errors, unless strict is False\n :param dict jsondict: A JSON dictionary to use for initialization\n :param bool strict: If True (the default), invalid variables will raise a TypeError\n ' self.beneficiary = None " Plan Beneficiary.\n Type `FHIRReference` referencing `['Patient']` (represented as `dict` in JSON). " self.contract = None " Contract details.\n List of `FHIRReference` items referencing `['Contract']` (represented as `dict` in JSON). " self.dependent = None ' Dependent number.\n Type `str`. ' self.grouping = None ' Additional coverage classifications.\n Type `CoverageGrouping` (represented as `dict` in JSON). ' self.identifier = None ' The primary coverage ID.\n List of `Identifier` items (represented as `dict` in JSON). ' self.network = None ' Insurer network.\n Type `str`. ' self.order = None ' Relative order of the coverage.\n Type `int`. ' self.payor = None " Identifier for the plan or agreement issuer.\n List of `FHIRReference` items referencing `['Organization'], ['Patient'], ['RelatedPerson']` (represented as `dict` in JSON). " self.period = None ' Coverage start and end dates.\n Type `Period` (represented as `dict` in JSON). ' self.policyHolder = None " Owner of the policy.\n Type `FHIRReference` referencing `['Patient'], ['RelatedPerson'], ['Organization']` (represented as `dict` in JSON). " self.relationship = None ' Beneficiary relationship to the Subscriber.\n Type `CodeableConcept` (represented as `dict` in JSON). ' self.sequence = None ' The plan instance or sequence counter.\n Type `str`. ' self.status = None ' active | cancelled | draft | entered-in-error.\n Type `str`. ' self.subscriber = None " Subscriber to the policy.\n Type `FHIRReference` referencing `['Patient'], ['RelatedPerson']` (represented as `dict` in JSON). " self.subscriberId = None ' ID assigned to the Subscriber.\n Type `str`. ' self.type = None ' Type of coverage such as medical or accident.\n Type `CodeableConcept` (represented as `dict` in JSON). ' super(Coverage, self).__init__(jsondict=jsondict, strict=strict)
def __init__(self, jsondict=None, strict=True): ' Initialize all valid properties.\n\n :raises: FHIRValidationError on validation errors, unless strict is False\n :param dict jsondict: A JSON dictionary to use for initialization\n :param bool strict: If True (the default), invalid variables will raise a TypeError\n ' self.beneficiary = None " Plan Beneficiary.\n Type `FHIRReference` referencing `['Patient']` (represented as `dict` in JSON). " self.contract = None " Contract details.\n List of `FHIRReference` items referencing `['Contract']` (represented as `dict` in JSON). " self.dependent = None ' Dependent number.\n Type `str`. ' self.grouping = None ' Additional coverage classifications.\n Type `CoverageGrouping` (represented as `dict` in JSON). ' self.identifier = None ' The primary coverage ID.\n List of `Identifier` items (represented as `dict` in JSON). ' self.network = None ' Insurer network.\n Type `str`. ' self.order = None ' Relative order of the coverage.\n Type `int`. ' self.payor = None " Identifier for the plan or agreement issuer.\n List of `FHIRReference` items referencing `['Organization'], ['Patient'], ['RelatedPerson']` (represented as `dict` in JSON). " self.period = None ' Coverage start and end dates.\n Type `Period` (represented as `dict` in JSON). ' self.policyHolder = None " Owner of the policy.\n Type `FHIRReference` referencing `['Patient'], ['RelatedPerson'], ['Organization']` (represented as `dict` in JSON). " self.relationship = None ' Beneficiary relationship to the Subscriber.\n Type `CodeableConcept` (represented as `dict` in JSON). ' self.sequence = None ' The plan instance or sequence counter.\n Type `str`. ' self.status = None ' active | cancelled | draft | entered-in-error.\n Type `str`. ' self.subscriber = None " Subscriber to the policy.\n Type `FHIRReference` referencing `['Patient'], ['RelatedPerson']` (represented as `dict` in JSON). " self.subscriberId = None ' ID assigned to the Subscriber.\n Type `str`. ' self.type = None ' Type of coverage such as medical or accident.\n Type `CodeableConcept` (represented as `dict` in JSON). ' super(Coverage, self).__init__(jsondict=jsondict, strict=strict)<|docstring|>Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError<|endoftext|>
29e33034240a75ad2541d2a3a690949c9fcae485e7f11011e6b4af1c304c74ef
def __init__(self, jsondict=None, strict=True): ' Initialize all valid properties.\n\n :raises: FHIRValidationError on validation errors, unless strict is False\n :param dict jsondict: A JSON dictionary to use for initialization\n :param bool strict: If True (the default), invalid variables will raise a TypeError\n ' self.classDisplay = None ' Display text for the class.\n Type `str`. ' self.class_fhir = None ' An identifier for the class.\n Type `str`. ' self.group = None ' An identifier for the group.\n Type `str`. ' self.groupDisplay = None ' Display text for an identifier for the group.\n Type `str`. ' self.plan = None ' An identifier for the plan.\n Type `str`. ' self.planDisplay = None ' Display text for the plan.\n Type `str`. ' self.subClass = None ' An identifier for the subsection of the class.\n Type `str`. ' self.subClassDisplay = None ' Display text for the subsection of the subclass.\n Type `str`. ' self.subGroup = None ' An identifier for the subsection of the group.\n Type `str`. ' self.subGroupDisplay = None ' Display text for the subsection of the group.\n Type `str`. ' self.subPlan = None ' An identifier for the subsection of the plan.\n Type `str`. ' self.subPlanDisplay = None ' Display text for the subsection of the plan.\n Type `str`. ' super(CoverageGrouping, self).__init__(jsondict=jsondict, strict=strict)
Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError
fhir/resources/STU3/coverage.py
__init__
mmabey/fhir.resources
0
python
def __init__(self, jsondict=None, strict=True): ' Initialize all valid properties.\n\n :raises: FHIRValidationError on validation errors, unless strict is False\n :param dict jsondict: A JSON dictionary to use for initialization\n :param bool strict: If True (the default), invalid variables will raise a TypeError\n ' self.classDisplay = None ' Display text for the class.\n Type `str`. ' self.class_fhir = None ' An identifier for the class.\n Type `str`. ' self.group = None ' An identifier for the group.\n Type `str`. ' self.groupDisplay = None ' Display text for an identifier for the group.\n Type `str`. ' self.plan = None ' An identifier for the plan.\n Type `str`. ' self.planDisplay = None ' Display text for the plan.\n Type `str`. ' self.subClass = None ' An identifier for the subsection of the class.\n Type `str`. ' self.subClassDisplay = None ' Display text for the subsection of the subclass.\n Type `str`. ' self.subGroup = None ' An identifier for the subsection of the group.\n Type `str`. ' self.subGroupDisplay = None ' Display text for the subsection of the group.\n Type `str`. ' self.subPlan = None ' An identifier for the subsection of the plan.\n Type `str`. ' self.subPlanDisplay = None ' Display text for the subsection of the plan.\n Type `str`. ' super(CoverageGrouping, self).__init__(jsondict=jsondict, strict=strict)
def __init__(self, jsondict=None, strict=True): ' Initialize all valid properties.\n\n :raises: FHIRValidationError on validation errors, unless strict is False\n :param dict jsondict: A JSON dictionary to use for initialization\n :param bool strict: If True (the default), invalid variables will raise a TypeError\n ' self.classDisplay = None ' Display text for the class.\n Type `str`. ' self.class_fhir = None ' An identifier for the class.\n Type `str`. ' self.group = None ' An identifier for the group.\n Type `str`. ' self.groupDisplay = None ' Display text for an identifier for the group.\n Type `str`. ' self.plan = None ' An identifier for the plan.\n Type `str`. ' self.planDisplay = None ' Display text for the plan.\n Type `str`. ' self.subClass = None ' An identifier for the subsection of the class.\n Type `str`. ' self.subClassDisplay = None ' Display text for the subsection of the subclass.\n Type `str`. ' self.subGroup = None ' An identifier for the subsection of the group.\n Type `str`. ' self.subGroupDisplay = None ' Display text for the subsection of the group.\n Type `str`. ' self.subPlan = None ' An identifier for the subsection of the plan.\n Type `str`. ' self.subPlanDisplay = None ' Display text for the subsection of the plan.\n Type `str`. ' super(CoverageGrouping, self).__init__(jsondict=jsondict, strict=strict)<|docstring|>Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError<|endoftext|>
90958978df33d7f3cca144d265757f2d604beca30ccbb11154ab77cfab1f3af0
def register_message(pt_type, parser): 'Register new message and a new handler.' if (pt_type not in PT_TYPES): PT_TYPES[pt_type] = parser if (pt_type not in PT_TYPES_HANDLERS): PT_TYPES_HANDLERS[pt_type] = []
Register new message and a new handler.
empower/managers/ranmanager/vbsp/__init__.py
register_message
5g-empower/empower-runtime
52
python
def register_message(pt_type, parser): if (pt_type not in PT_TYPES): PT_TYPES[pt_type] = parser if (pt_type not in PT_TYPES_HANDLERS): PT_TYPES_HANDLERS[pt_type] = []
def register_message(pt_type, parser): if (pt_type not in PT_TYPES): PT_TYPES[pt_type] = parser if (pt_type not in PT_TYPES_HANDLERS): PT_TYPES_HANDLERS[pt_type] = []<|docstring|>Register new message and a new handler.<|endoftext|>
ee675e1549b029b1d5c3b4a52fc8754a83380a6fc62ac1bca288acb1761fb91f
def register_callbacks(app, callback_str='handle_'): 'Register callbacks.' for pt_type in PT_TYPES_HANDLERS: if (not PT_TYPES[pt_type]): handler_name = (callback_str + pt_type) else: handler_name = (callback_str + PT_TYPES[pt_type][1]) if hasattr(app, handler_name): handler = getattr(app, handler_name) PT_TYPES_HANDLERS[pt_type].append(handler)
Register callbacks.
empower/managers/ranmanager/vbsp/__init__.py
register_callbacks
5g-empower/empower-runtime
52
python
def register_callbacks(app, callback_str='handle_'): for pt_type in PT_TYPES_HANDLERS: if (not PT_TYPES[pt_type]): handler_name = (callback_str + pt_type) else: handler_name = (callback_str + PT_TYPES[pt_type][1]) if hasattr(app, handler_name): handler = getattr(app, handler_name) PT_TYPES_HANDLERS[pt_type].append(handler)
def register_callbacks(app, callback_str='handle_'): for pt_type in PT_TYPES_HANDLERS: if (not PT_TYPES[pt_type]): handler_name = (callback_str + pt_type) else: handler_name = (callback_str + PT_TYPES[pt_type][1]) if hasattr(app, handler_name): handler = getattr(app, handler_name) PT_TYPES_HANDLERS[pt_type].append(handler)<|docstring|>Register callbacks.<|endoftext|>
253505f077c237184df5ca33d3a201b83a6a5a31368e24d1dd90f192a09dc8ec
def unregister_callbacks(app, callback_str='handle_'): 'Unregister callbacks.' for pt_type in PT_TYPES_HANDLERS: if (not PT_TYPES[pt_type]): handler_name = (callback_str + pt_type) else: handler_name = (callback_str + PT_TYPES[pt_type][1]) if hasattr(app, handler_name): handler = getattr(app, handler_name) PT_TYPES_HANDLERS[pt_type].remove(handler)
Unregister callbacks.
empower/managers/ranmanager/vbsp/__init__.py
unregister_callbacks
5g-empower/empower-runtime
52
python
def unregister_callbacks(app, callback_str='handle_'): for pt_type in PT_TYPES_HANDLERS: if (not PT_TYPES[pt_type]): handler_name = (callback_str + pt_type) else: handler_name = (callback_str + PT_TYPES[pt_type][1]) if hasattr(app, handler_name): handler = getattr(app, handler_name) PT_TYPES_HANDLERS[pt_type].remove(handler)
def unregister_callbacks(app, callback_str='handle_'): for pt_type in PT_TYPES_HANDLERS: if (not PT_TYPES[pt_type]): handler_name = (callback_str + pt_type) else: handler_name = (callback_str + PT_TYPES[pt_type][1]) if hasattr(app, handler_name): handler = getattr(app, handler_name) PT_TYPES_HANDLERS[pt_type].remove(handler)<|docstring|>Unregister callbacks.<|endoftext|>
6793f447409330a962753f9f90e5a95e8dd6553e168087ef39499a540b87042c
def register_callback(pt_type, handler): 'Register new message and a new handler.' if (pt_type not in PT_TYPES): raise KeyError('Packet type %u undefined') if (pt_type not in PT_TYPES_HANDLERS): PT_TYPES_HANDLERS[pt_type] = [] PT_TYPES_HANDLERS[pt_type].append(handler)
Register new message and a new handler.
empower/managers/ranmanager/vbsp/__init__.py
register_callback
5g-empower/empower-runtime
52
python
def register_callback(pt_type, handler): if (pt_type not in PT_TYPES): raise KeyError('Packet type %u undefined') if (pt_type not in PT_TYPES_HANDLERS): PT_TYPES_HANDLERS[pt_type] = [] PT_TYPES_HANDLERS[pt_type].append(handler)
def register_callback(pt_type, handler): if (pt_type not in PT_TYPES): raise KeyError('Packet type %u undefined') if (pt_type not in PT_TYPES_HANDLERS): PT_TYPES_HANDLERS[pt_type] = [] PT_TYPES_HANDLERS[pt_type].append(handler)<|docstring|>Register new message and a new handler.<|endoftext|>
438afff592d6b53d8569d7bed60150716583f93b7e9c96a9779f7d25fce429ba
def unregister_callback(pt_type, handler): 'Register new message and a new handler.' if (pt_type not in PT_TYPES): raise KeyError('Packet type %u undefined') if (pt_type not in PT_TYPES_HANDLERS): return PT_TYPES_HANDLERS[pt_type].remove(handler)
Register new message and a new handler.
empower/managers/ranmanager/vbsp/__init__.py
unregister_callback
5g-empower/empower-runtime
52
python
def unregister_callback(pt_type, handler): if (pt_type not in PT_TYPES): raise KeyError('Packet type %u undefined') if (pt_type not in PT_TYPES_HANDLERS): return PT_TYPES_HANDLERS[pt_type].remove(handler)
def unregister_callback(pt_type, handler): if (pt_type not in PT_TYPES): raise KeyError('Packet type %u undefined') if (pt_type not in PT_TYPES_HANDLERS): return PT_TYPES_HANDLERS[pt_type].remove(handler)<|docstring|>Register new message and a new handler.<|endoftext|>
e11fcb0bb1f0d8f62795221aea4939f547a4156a24cdba60d412c2444033fe0d
def decode_msg(msg_type, crud_result): 'Return the tuple (msg_type, crud_result).' if (int(msg_type) == MSG_TYPE_REQUEST): msg_type_str = 'request' if (crud_result == OP_UNDEFINED): crud_result_str = 'undefined' elif (crud_result == OP_CREATE): crud_result_str = 'create' elif (crud_result == OP_UPDATE): crud_result_str = 'update' elif (crud_result == OP_DELETE): crud_result_str = 'delete' else: crud_result_str = 'unknown' return (msg_type_str, crud_result_str) msg_type_str = 'response' if (crud_result == RESULT_SUCCESS): crud_result_str = 'success' elif (crud_result == RESULT_FAIL): crud_result_str = 'fail' else: crud_result_str = 'unknown' return (msg_type_str, crud_result_str)
Return the tuple (msg_type, crud_result).
empower/managers/ranmanager/vbsp/__init__.py
decode_msg
5g-empower/empower-runtime
52
python
def decode_msg(msg_type, crud_result): if (int(msg_type) == MSG_TYPE_REQUEST): msg_type_str = 'request' if (crud_result == OP_UNDEFINED): crud_result_str = 'undefined' elif (crud_result == OP_CREATE): crud_result_str = 'create' elif (crud_result == OP_UPDATE): crud_result_str = 'update' elif (crud_result == OP_DELETE): crud_result_str = 'delete' else: crud_result_str = 'unknown' return (msg_type_str, crud_result_str) msg_type_str = 'response' if (crud_result == RESULT_SUCCESS): crud_result_str = 'success' elif (crud_result == RESULT_FAIL): crud_result_str = 'fail' else: crud_result_str = 'unknown' return (msg_type_str, crud_result_str)
def decode_msg(msg_type, crud_result): if (int(msg_type) == MSG_TYPE_REQUEST): msg_type_str = 'request' if (crud_result == OP_UNDEFINED): crud_result_str = 'undefined' elif (crud_result == OP_CREATE): crud_result_str = 'create' elif (crud_result == OP_UPDATE): crud_result_str = 'update' elif (crud_result == OP_DELETE): crud_result_str = 'delete' else: crud_result_str = 'unknown' return (msg_type_str, crud_result_str) msg_type_str = 'response' if (crud_result == RESULT_SUCCESS): crud_result_str = 'success' elif (crud_result == RESULT_FAIL): crud_result_str = 'fail' else: crud_result_str = 'unknown' return (msg_type_str, crud_result_str)<|docstring|>Return the tuple (msg_type, crud_result).<|endoftext|>
ee608ebada079a6962348733cbe7bef52527145b6ce8aaa884d9ed4cdd5afca3
@cli.command() def status(): 'Show status information for all available devices.'
Show status information for all available devices.
projects/clusterctrl/src/python/clusterctrl/__main__.py
status
arrdem/source
4
python
@cli.command() def status():
@cli.command() def status(): <|docstring|>Show status information for all available devices.<|endoftext|>
0ec8ac587de3e93f8c24706514d968d103c2ff8df68e0d76c6eb5f7260580bb2
@cli.command() def maxpi(): 'Show the number of available/attached Pis.'
Show the number of available/attached Pis.
projects/clusterctrl/src/python/clusterctrl/__main__.py
maxpi
arrdem/source
4
python
@cli.command() def maxpi():
@cli.command() def maxpi(): <|docstring|>Show the number of available/attached Pis.<|endoftext|>
bb531def3c5caec36fb304731b5ed08ca47608ed5ea5598c2f2d39c286042dbb
@cli.command() def init(): 'Init ClusterHAT'
Init ClusterHAT
projects/clusterctrl/src/python/clusterctrl/__main__.py
init
arrdem/source
4
python
@cli.command() def init():
@cli.command() def init(): <|docstring|>Init ClusterHAT<|endoftext|>
d6fbb6b78a33dc076f9a2ee8798144e08ca31e30bf9d9a4658ccae08785dad2d
def recommend(self, full_graph, K, h_user, h_item): '\n Return a (n_user, K) matrix of recommended items for each user\n ' graph_slice = full_graph.edge_type_subgraph([self.user_to_item_etype]) n_users = full_graph.number_of_nodes(self.user_ntype) latest_interactions = dgl.sampling.select_topk(graph_slice, 1, self.timestamp, edge_dir='out') (user, latest_items) = latest_interactions.all_edges(form='uv', order='srcdst') assert torch.equal(user, torch.arange(n_users)) recommended_batches = [] user_batches = torch.arange(n_users).split(self.batch_size) for user_batch in user_batches: latest_item_batch = latest_items[user_batch].to(device=h_item.device) dist = (h_item[latest_item_batch] @ h_item.t()) for (i, u) in enumerate(user_batch.tolist()): interacted_items = full_graph.successors(u, etype=self.user_to_item_etype) dist[(i, interacted_items)] = (- np.inf) recommended_batches.append(dist.topk(K, 1)[1]) recommendations = torch.cat(recommended_batches, 0) return recommendations
Return a (n_user, K) matrix of recommended items for each user
examples/pytorch/pinsage/evaluation.py
recommend
alexpod1000/dgl
9,516
python
def recommend(self, full_graph, K, h_user, h_item): '\n \n ' graph_slice = full_graph.edge_type_subgraph([self.user_to_item_etype]) n_users = full_graph.number_of_nodes(self.user_ntype) latest_interactions = dgl.sampling.select_topk(graph_slice, 1, self.timestamp, edge_dir='out') (user, latest_items) = latest_interactions.all_edges(form='uv', order='srcdst') assert torch.equal(user, torch.arange(n_users)) recommended_batches = [] user_batches = torch.arange(n_users).split(self.batch_size) for user_batch in user_batches: latest_item_batch = latest_items[user_batch].to(device=h_item.device) dist = (h_item[latest_item_batch] @ h_item.t()) for (i, u) in enumerate(user_batch.tolist()): interacted_items = full_graph.successors(u, etype=self.user_to_item_etype) dist[(i, interacted_items)] = (- np.inf) recommended_batches.append(dist.topk(K, 1)[1]) recommendations = torch.cat(recommended_batches, 0) return recommendations
def recommend(self, full_graph, K, h_user, h_item): '\n \n ' graph_slice = full_graph.edge_type_subgraph([self.user_to_item_etype]) n_users = full_graph.number_of_nodes(self.user_ntype) latest_interactions = dgl.sampling.select_topk(graph_slice, 1, self.timestamp, edge_dir='out') (user, latest_items) = latest_interactions.all_edges(form='uv', order='srcdst') assert torch.equal(user, torch.arange(n_users)) recommended_batches = [] user_batches = torch.arange(n_users).split(self.batch_size) for user_batch in user_batches: latest_item_batch = latest_items[user_batch].to(device=h_item.device) dist = (h_item[latest_item_batch] @ h_item.t()) for (i, u) in enumerate(user_batch.tolist()): interacted_items = full_graph.successors(u, etype=self.user_to_item_etype) dist[(i, interacted_items)] = (- np.inf) recommended_batches.append(dist.topk(K, 1)[1]) recommendations = torch.cat(recommended_batches, 0) return recommendations<|docstring|>Return a (n_user, K) matrix of recommended items for each user<|endoftext|>
69b0b44ef2013d55d20f35b3326cc92ffe4276da98f0ea1310fa7068b49e3e4b
def is_only_embed(maybe_embeds): '\n Checks whether the given value is a `tuple` or `list` containing only `embed-like`-s.\n \n Parameters\n ----------\n maybe_embeds : (`tuple` or `list`) of `EmbedBase` or `Any`\n The value to check whether is a `tuple` or `list` containing only `embed-like`-s.\n \n Returns\n -------\n is_only_embed : `bool`\n ' if (not isinstance(maybe_embeds, (list, tuple))): return False for maybe_embed in maybe_embeds: if (not isinstance(maybe_embed, EmbedBase)): return False return True
Checks whether the given value is a `tuple` or `list` containing only `embed-like`-s. Parameters ---------- maybe_embeds : (`tuple` or `list`) of `EmbedBase` or `Any` The value to check whether is a `tuple` or `list` containing only `embed-like`-s. Returns ------- is_only_embed : `bool`
hata/ext/slash/responding.py
is_only_embed
asleep-cult/hata
0
python
def is_only_embed(maybe_embeds): '\n Checks whether the given value is a `tuple` or `list` containing only `embed-like`-s.\n \n Parameters\n ----------\n maybe_embeds : (`tuple` or `list`) of `EmbedBase` or `Any`\n The value to check whether is a `tuple` or `list` containing only `embed-like`-s.\n \n Returns\n -------\n is_only_embed : `bool`\n ' if (not isinstance(maybe_embeds, (list, tuple))): return False for maybe_embed in maybe_embeds: if (not isinstance(maybe_embed, EmbedBase)): return False return True
def is_only_embed(maybe_embeds): '\n Checks whether the given value is a `tuple` or `list` containing only `embed-like`-s.\n \n Parameters\n ----------\n maybe_embeds : (`tuple` or `list`) of `EmbedBase` or `Any`\n The value to check whether is a `tuple` or `list` containing only `embed-like`-s.\n \n Returns\n -------\n is_only_embed : `bool`\n ' if (not isinstance(maybe_embeds, (list, tuple))): return False for maybe_embed in maybe_embeds: if (not isinstance(maybe_embed, EmbedBase)): return False return True<|docstring|>Checks whether the given value is a `tuple` or `list` containing only `embed-like`-s. Parameters ---------- maybe_embeds : (`tuple` or `list`) of `EmbedBase` or `Any` The value to check whether is a `tuple` or `list` containing only `embed-like`-s. Returns ------- is_only_embed : `bool`<|endoftext|>
8026955ea6a04042dfeb0edb9559c9a118a3706c22935a7f70718f072806b055
async def get_request_coros(client, interaction_event, show_for_invoking_user_only, response): '\n Gets request coroutine after an output from a command coroutine. Might return `None` if there is nothing to send.\n \n This function is a coroutine generator, which should be ued inside of an async for loop.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n response : `Any`\n Any object yielded or returned by the command coroutine.\n \n Yields\n -------\n request_coro : `None` or `coroutine`\n ' response_state = interaction_event._response_state if (response is None): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) return if (isinstance(response, (str, EmbedBase)) or is_only_embed(response)): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, response, show_for_invoking_user_only=show_for_invoking_user_only)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_DEFERRED): (yield client.interaction_response_message_edit(interaction_event, response)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_RESPONDED): (yield client.interaction_followup_message_create(interaction_event, response, show_for_invoking_user_only=show_for_invoking_user_only)) return return if is_coroutine_generator(response): response = (await process_command_gen(client, interaction_event, show_for_invoking_user_only, response)) async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): (yield request_coro) return if isinstance(response, SlashResponse): for request_coro in response.get_request_coros(client, interaction_event, show_for_invoking_user_only): (yield request_coro) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) return return
Gets request coroutine after an output from a command coroutine. Might return `None` if there is nothing to send. This function is a coroutine generator, which should be ued inside of an async for loop. Parameters ---------- client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. response : `Any` Any object yielded or returned by the command coroutine. Yields ------- request_coro : `None` or `coroutine`
hata/ext/slash/responding.py
get_request_coros
asleep-cult/hata
0
python
async def get_request_coros(client, interaction_event, show_for_invoking_user_only, response): '\n Gets request coroutine after an output from a command coroutine. Might return `None` if there is nothing to send.\n \n This function is a coroutine generator, which should be ued inside of an async for loop.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n response : `Any`\n Any object yielded or returned by the command coroutine.\n \n Yields\n -------\n request_coro : `None` or `coroutine`\n ' response_state = interaction_event._response_state if (response is None): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) return if (isinstance(response, (str, EmbedBase)) or is_only_embed(response)): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, response, show_for_invoking_user_only=show_for_invoking_user_only)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_DEFERRED): (yield client.interaction_response_message_edit(interaction_event, response)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_RESPONDED): (yield client.interaction_followup_message_create(interaction_event, response, show_for_invoking_user_only=show_for_invoking_user_only)) return return if is_coroutine_generator(response): response = (await process_command_gen(client, interaction_event, show_for_invoking_user_only, response)) async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): (yield request_coro) return if isinstance(response, SlashResponse): for request_coro in response.get_request_coros(client, interaction_event, show_for_invoking_user_only): (yield request_coro) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) return return
async def get_request_coros(client, interaction_event, show_for_invoking_user_only, response): '\n Gets request coroutine after an output from a command coroutine. Might return `None` if there is nothing to send.\n \n This function is a coroutine generator, which should be ued inside of an async for loop.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n response : `Any`\n Any object yielded or returned by the command coroutine.\n \n Yields\n -------\n request_coro : `None` or `coroutine`\n ' response_state = interaction_event._response_state if (response is None): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) return if (isinstance(response, (str, EmbedBase)) or is_only_embed(response)): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, response, show_for_invoking_user_only=show_for_invoking_user_only)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_DEFERRED): (yield client.interaction_response_message_edit(interaction_event, response)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_RESPONDED): (yield client.interaction_followup_message_create(interaction_event, response, show_for_invoking_user_only=show_for_invoking_user_only)) return return if is_coroutine_generator(response): response = (await process_command_gen(client, interaction_event, show_for_invoking_user_only, response)) async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): (yield request_coro) return if isinstance(response, SlashResponse): for request_coro in response.get_request_coros(client, interaction_event, show_for_invoking_user_only): (yield request_coro) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) return return<|docstring|>Gets request coroutine after an output from a command coroutine. Might return `None` if there is nothing to send. This function is a coroutine generator, which should be ued inside of an async for loop. Parameters ---------- client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. response : `Any` Any object yielded or returned by the command coroutine. Yields ------- request_coro : `None` or `coroutine`<|endoftext|>
b7852d26b14371506267a71a59cf8291cf88a10231cb429d1ffca9677fd44d6c
async def process_command_gen(client, interaction_event, show_for_invoking_user_only, coro): '\n Processes a slash command coroutine generator.\n \n This function os a coroutine.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n coro : `CoroutineGenerator`\n A coroutine generator with will send command response.\n \n Returns\n -------\n response : `Any`\n Returned object by the coroutine generator.\n \n Raises\n ------\n BaseException\n Any exception raised by `coro`.\n ' response_message = None response_exception = None while True: if (response_exception is None): step = coro.asend(response_message) else: step = coro.athrow(response_exception) try: response = (await step) except StopAsyncIteration as err: if ((response_exception is not None) and (response_exception is not err)): raise args = err.args if args: response = args[0] else: response = None break except InteractionAbortedError as err: response = err.response break except BaseException as err: if ((response_exception is None) or (response_exception is not err)): raise if isinstance(err, ConnectionError): return if isinstance(err, DiscordException): if (err.code in (ERROR_CODES.unknown_channel, ERROR_CODES.invalid_access, ERROR_CODES.invalid_permissions, ERROR_CODES.cannot_message_user, ERROR_CODES.unknown_interaction)): return raise else: response_message = None response_exception = None async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): try: response_message = (await request_coro) except BaseException as err: response_message = None response_exception = err break return response
Processes a slash command coroutine generator. This function os a coroutine. Parameters ---------- client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. coro : `CoroutineGenerator` A coroutine generator with will send command response. Returns ------- response : `Any` Returned object by the coroutine generator. Raises ------ BaseException Any exception raised by `coro`.
hata/ext/slash/responding.py
process_command_gen
asleep-cult/hata
0
python
async def process_command_gen(client, interaction_event, show_for_invoking_user_only, coro): '\n Processes a slash command coroutine generator.\n \n This function os a coroutine.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n coro : `CoroutineGenerator`\n A coroutine generator with will send command response.\n \n Returns\n -------\n response : `Any`\n Returned object by the coroutine generator.\n \n Raises\n ------\n BaseException\n Any exception raised by `coro`.\n ' response_message = None response_exception = None while True: if (response_exception is None): step = coro.asend(response_message) else: step = coro.athrow(response_exception) try: response = (await step) except StopAsyncIteration as err: if ((response_exception is not None) and (response_exception is not err)): raise args = err.args if args: response = args[0] else: response = None break except InteractionAbortedError as err: response = err.response break except BaseException as err: if ((response_exception is None) or (response_exception is not err)): raise if isinstance(err, ConnectionError): return if isinstance(err, DiscordException): if (err.code in (ERROR_CODES.unknown_channel, ERROR_CODES.invalid_access, ERROR_CODES.invalid_permissions, ERROR_CODES.cannot_message_user, ERROR_CODES.unknown_interaction)): return raise else: response_message = None response_exception = None async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): try: response_message = (await request_coro) except BaseException as err: response_message = None response_exception = err break return response
async def process_command_gen(client, interaction_event, show_for_invoking_user_only, coro): '\n Processes a slash command coroutine generator.\n \n This function os a coroutine.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n coro : `CoroutineGenerator`\n A coroutine generator with will send command response.\n \n Returns\n -------\n response : `Any`\n Returned object by the coroutine generator.\n \n Raises\n ------\n BaseException\n Any exception raised by `coro`.\n ' response_message = None response_exception = None while True: if (response_exception is None): step = coro.asend(response_message) else: step = coro.athrow(response_exception) try: response = (await step) except StopAsyncIteration as err: if ((response_exception is not None) and (response_exception is not err)): raise args = err.args if args: response = args[0] else: response = None break except InteractionAbortedError as err: response = err.response break except BaseException as err: if ((response_exception is None) or (response_exception is not err)): raise if isinstance(err, ConnectionError): return if isinstance(err, DiscordException): if (err.code in (ERROR_CODES.unknown_channel, ERROR_CODES.invalid_access, ERROR_CODES.invalid_permissions, ERROR_CODES.cannot_message_user, ERROR_CODES.unknown_interaction)): return raise else: response_message = None response_exception = None async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): try: response_message = (await request_coro) except BaseException as err: response_message = None response_exception = err break return response<|docstring|>Processes a slash command coroutine generator. This function os a coroutine. Parameters ---------- client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. coro : `CoroutineGenerator` A coroutine generator with will send command response. Returns ------- response : `Any` Returned object by the coroutine generator. Raises ------ BaseException Any exception raised by `coro`.<|endoftext|>
d0af751f5df58ec44f1dffaad1a78d452d9f52fc0162e36ffc55af77a796ca26
async def process_command_coro(client, interaction_event, show_for_invoking_user_only, coro): '\n Processes a slash command coroutine.\n \n If the coroutine returns or yields a string or an embed like then sends it to the respective channel.\n \n This function is a coroutine.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n coro : `coroutine`\n A coroutine with will send command response.\n \n Raises\n ------\n BaseException\n Any exception raised by `coro`.\n ' if is_coroutine_generator(coro): response = (await process_command_gen(client, interaction_event, show_for_invoking_user_only, coro)) else: try: response = (await coro) except InteractionAbortedError as err: response = err.response async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): try: (await request_coro) except BaseException as err: if isinstance(err, ConnectionError): return if isinstance(err, DiscordException): if (err.code in (ERROR_CODES.unknown_channel, ERROR_CODES.invalid_access, ERROR_CODES.invalid_permissions, ERROR_CODES.cannot_message_user, ERROR_CODES.unknown_interaction)): return raise
Processes a slash command coroutine. If the coroutine returns or yields a string or an embed like then sends it to the respective channel. This function is a coroutine. Parameters ---------- client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. coro : `coroutine` A coroutine with will send command response. Raises ------ BaseException Any exception raised by `coro`.
hata/ext/slash/responding.py
process_command_coro
asleep-cult/hata
0
python
async def process_command_coro(client, interaction_event, show_for_invoking_user_only, coro): '\n Processes a slash command coroutine.\n \n If the coroutine returns or yields a string or an embed like then sends it to the respective channel.\n \n This function is a coroutine.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n coro : `coroutine`\n A coroutine with will send command response.\n \n Raises\n ------\n BaseException\n Any exception raised by `coro`.\n ' if is_coroutine_generator(coro): response = (await process_command_gen(client, interaction_event, show_for_invoking_user_only, coro)) else: try: response = (await coro) except InteractionAbortedError as err: response = err.response async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): try: (await request_coro) except BaseException as err: if isinstance(err, ConnectionError): return if isinstance(err, DiscordException): if (err.code in (ERROR_CODES.unknown_channel, ERROR_CODES.invalid_access, ERROR_CODES.invalid_permissions, ERROR_CODES.cannot_message_user, ERROR_CODES.unknown_interaction)): return raise
async def process_command_coro(client, interaction_event, show_for_invoking_user_only, coro): '\n Processes a slash command coroutine.\n \n If the coroutine returns or yields a string or an embed like then sends it to the respective channel.\n \n This function is a coroutine.\n \n Parameters\n ----------\n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n coro : `coroutine`\n A coroutine with will send command response.\n \n Raises\n ------\n BaseException\n Any exception raised by `coro`.\n ' if is_coroutine_generator(coro): response = (await process_command_gen(client, interaction_event, show_for_invoking_user_only, coro)) else: try: response = (await coro) except InteractionAbortedError as err: response = err.response async for request_coro in get_request_coros(client, interaction_event, show_for_invoking_user_only, response): try: (await request_coro) except BaseException as err: if isinstance(err, ConnectionError): return if isinstance(err, DiscordException): if (err.code in (ERROR_CODES.unknown_channel, ERROR_CODES.invalid_access, ERROR_CODES.invalid_permissions, ERROR_CODES.cannot_message_user, ERROR_CODES.unknown_interaction)): return raise<|docstring|>Processes a slash command coroutine. If the coroutine returns or yields a string or an embed like then sends it to the respective channel. This function is a coroutine. Parameters ---------- client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. coro : `coroutine` A coroutine with will send command response. Raises ------ BaseException Any exception raised by `coro`.<|endoftext|>
a121bc00c1b42f36f4a2bc3fd987c8774abd4f0a71f22efae7d474a2762ee28c
def abort(content=..., *, embed=..., file=..., allowed_mentions=..., tts=..., show_for_invoking_user_only=...): "\n Aborts the slash response with sending the passed parameters as a response.\n \n The abortion auto detects `show_for_invoking_user_only` if not given. Not follows the command's preference.\n If only a string `content` is given, `show_for_invoking_user_only` will become `True`, else `False`. The reason of\n becoming `False` at that case is, Discord ignores every other field except string content.\n \n Parameters\n ----------\n content : `str`, ``EmbedBase``, `Any`, Optional\n The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile\n if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string.\n \n If given as ``EmbedBase`` instance, then is sent as the message's embed.\n embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional\n The embedded content of the message.\n \n If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised.\n file : `Any`, Optional\n A file to send. Check ``Client._create_file_form`` for details.\n allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional\n Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details.\n tts : `bool`, Optional\n Whether the message is text-to-speech.\n show_for_invoking_user_only : `bool`, Optional\n Whether the sent message should only be shown to the invoking user.\n \n If given as `True`, only the message's content will be processed by Discord.\n \n Raises\n ------\n InteractionAbortedError\n The exception which aborts the interaction, then yields the response.\n " if (show_for_invoking_user_only is ...): if (embed is not ...): show_for_invoking_user_only = False elif (file is not ...): show_for_invoking_user_only = False elif (allowed_mentions is not ...): show_for_invoking_user_only = False elif (tts is not ...): show_for_invoking_user_only = False elif (content is ...): show_for_invoking_user_only = True elif is_only_embed(content): show_for_invoking_user_only = False else: show_for_invoking_user_only = True response = SlashResponse(content, embed=embed, file=file, allowed_mentions=allowed_mentions, tts=tts, show_for_invoking_user_only=show_for_invoking_user_only, force_new_message=(- 1)) raise InteractionAbortedError(response)
Aborts the slash response with sending the passed parameters as a response. The abortion auto detects `show_for_invoking_user_only` if not given. Not follows the command's preference. If only a string `content` is given, `show_for_invoking_user_only` will become `True`, else `False`. The reason of becoming `False` at that case is, Discord ignores every other field except string content. Parameters ---------- content : `str`, ``EmbedBase``, `Any`, Optional The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string. If given as ``EmbedBase`` instance, then is sent as the message's embed. embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional The embedded content of the message. If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised. file : `Any`, Optional A file to send. Check ``Client._create_file_form`` for details. allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details. tts : `bool`, Optional Whether the message is text-to-speech. show_for_invoking_user_only : `bool`, Optional Whether the sent message should only be shown to the invoking user. If given as `True`, only the message's content will be processed by Discord. Raises ------ InteractionAbortedError The exception which aborts the interaction, then yields the response.
hata/ext/slash/responding.py
abort
asleep-cult/hata
0
python
def abort(content=..., *, embed=..., file=..., allowed_mentions=..., tts=..., show_for_invoking_user_only=...): "\n Aborts the slash response with sending the passed parameters as a response.\n \n The abortion auto detects `show_for_invoking_user_only` if not given. Not follows the command's preference.\n If only a string `content` is given, `show_for_invoking_user_only` will become `True`, else `False`. The reason of\n becoming `False` at that case is, Discord ignores every other field except string content.\n \n Parameters\n ----------\n content : `str`, ``EmbedBase``, `Any`, Optional\n The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile\n if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string.\n \n If given as ``EmbedBase`` instance, then is sent as the message's embed.\n embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional\n The embedded content of the message.\n \n If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised.\n file : `Any`, Optional\n A file to send. Check ``Client._create_file_form`` for details.\n allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional\n Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details.\n tts : `bool`, Optional\n Whether the message is text-to-speech.\n show_for_invoking_user_only : `bool`, Optional\n Whether the sent message should only be shown to the invoking user.\n \n If given as `True`, only the message's content will be processed by Discord.\n \n Raises\n ------\n InteractionAbortedError\n The exception which aborts the interaction, then yields the response.\n " if (show_for_invoking_user_only is ...): if (embed is not ...): show_for_invoking_user_only = False elif (file is not ...): show_for_invoking_user_only = False elif (allowed_mentions is not ...): show_for_invoking_user_only = False elif (tts is not ...): show_for_invoking_user_only = False elif (content is ...): show_for_invoking_user_only = True elif is_only_embed(content): show_for_invoking_user_only = False else: show_for_invoking_user_only = True response = SlashResponse(content, embed=embed, file=file, allowed_mentions=allowed_mentions, tts=tts, show_for_invoking_user_only=show_for_invoking_user_only, force_new_message=(- 1)) raise InteractionAbortedError(response)
def abort(content=..., *, embed=..., file=..., allowed_mentions=..., tts=..., show_for_invoking_user_only=...): "\n Aborts the slash response with sending the passed parameters as a response.\n \n The abortion auto detects `show_for_invoking_user_only` if not given. Not follows the command's preference.\n If only a string `content` is given, `show_for_invoking_user_only` will become `True`, else `False`. The reason of\n becoming `False` at that case is, Discord ignores every other field except string content.\n \n Parameters\n ----------\n content : `str`, ``EmbedBase``, `Any`, Optional\n The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile\n if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string.\n \n If given as ``EmbedBase`` instance, then is sent as the message's embed.\n embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional\n The embedded content of the message.\n \n If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised.\n file : `Any`, Optional\n A file to send. Check ``Client._create_file_form`` for details.\n allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional\n Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details.\n tts : `bool`, Optional\n Whether the message is text-to-speech.\n show_for_invoking_user_only : `bool`, Optional\n Whether the sent message should only be shown to the invoking user.\n \n If given as `True`, only the message's content will be processed by Discord.\n \n Raises\n ------\n InteractionAbortedError\n The exception which aborts the interaction, then yields the response.\n " if (show_for_invoking_user_only is ...): if (embed is not ...): show_for_invoking_user_only = False elif (file is not ...): show_for_invoking_user_only = False elif (allowed_mentions is not ...): show_for_invoking_user_only = False elif (tts is not ...): show_for_invoking_user_only = False elif (content is ...): show_for_invoking_user_only = True elif is_only_embed(content): show_for_invoking_user_only = False else: show_for_invoking_user_only = True response = SlashResponse(content, embed=embed, file=file, allowed_mentions=allowed_mentions, tts=tts, show_for_invoking_user_only=show_for_invoking_user_only, force_new_message=(- 1)) raise InteractionAbortedError(response)<|docstring|>Aborts the slash response with sending the passed parameters as a response. The abortion auto detects `show_for_invoking_user_only` if not given. Not follows the command's preference. If only a string `content` is given, `show_for_invoking_user_only` will become `True`, else `False`. The reason of becoming `False` at that case is, Discord ignores every other field except string content. Parameters ---------- content : `str`, ``EmbedBase``, `Any`, Optional The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string. If given as ``EmbedBase`` instance, then is sent as the message's embed. embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional The embedded content of the message. If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised. file : `Any`, Optional A file to send. Check ``Client._create_file_form`` for details. allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details. tts : `bool`, Optional Whether the message is text-to-speech. show_for_invoking_user_only : `bool`, Optional Whether the sent message should only be shown to the invoking user. If given as `True`, only the message's content will be processed by Discord. Raises ------ InteractionAbortedError The exception which aborts the interaction, then yields the response.<|endoftext|>
b6cdb732ce13e8beaa293d483cc90b1d38e23d421335269d3a7b12ae0251e588
def __init__(self, content=..., *, embed=..., file=..., allowed_mentions=..., tts=..., show_for_invoking_user_only=..., force_new_message=False): "\n Creates a new ``SlashResponse`` instance with the given parameters.\n \n Parameters\n ----------\n content : `str`, ``EmbedBase``, `Any`, Optional\n The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile\n if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string.\n \n If given as ``EmbedBase`` instance, then is sent as the message's embed.\n \n embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional\n The embedded content of the message.\n \n If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised.\n file : `Any`, Optional\n A file to send. Check ``Client._create_file_form`` for details.\n allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional\n Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details.\n tts : `bool`, Optional\n Whether the message is text-to-speech.\n show_for_invoking_user_only : `bool`, Optional\n Whether the sent message should only be shown to the invoking user. Defaults to the value passed when adding\n the command.\n \n If given as `True` only the message's content will be processed by Discord.\n force_new_message : `int` or `bool`, Optional\n Whether a new message should be forced out from Discord allowing the client to retrieve a new ``Message``\n object as well. Defaults to `False`.\n \n If given as `-1` will only force new message if the event already deferred.\n " self._force_new_message = force_new_message self._parameters = parameters = {} if (content is not ...): parameters['content'] = content if (embed is not ...): parameters['embed'] = embed if (file is not ...): parameters['file'] = file if (allowed_mentions is not ...): parameters['allowed_mentions'] = allowed_mentions if (tts is not ...): parameters['tts'] = tts if (show_for_invoking_user_only is not ...): parameters['show_for_invoking_user_only'] = show_for_invoking_user_only
Creates a new ``SlashResponse`` instance with the given parameters. Parameters ---------- content : `str`, ``EmbedBase``, `Any`, Optional The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string. If given as ``EmbedBase`` instance, then is sent as the message's embed. embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional The embedded content of the message. If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised. file : `Any`, Optional A file to send. Check ``Client._create_file_form`` for details. allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details. tts : `bool`, Optional Whether the message is text-to-speech. show_for_invoking_user_only : `bool`, Optional Whether the sent message should only be shown to the invoking user. Defaults to the value passed when adding the command. If given as `True` only the message's content will be processed by Discord. force_new_message : `int` or `bool`, Optional Whether a new message should be forced out from Discord allowing the client to retrieve a new ``Message`` object as well. Defaults to `False`. If given as `-1` will only force new message if the event already deferred.
hata/ext/slash/responding.py
__init__
asleep-cult/hata
0
python
def __init__(self, content=..., *, embed=..., file=..., allowed_mentions=..., tts=..., show_for_invoking_user_only=..., force_new_message=False): "\n Creates a new ``SlashResponse`` instance with the given parameters.\n \n Parameters\n ----------\n content : `str`, ``EmbedBase``, `Any`, Optional\n The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile\n if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string.\n \n If given as ``EmbedBase`` instance, then is sent as the message's embed.\n \n embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional\n The embedded content of the message.\n \n If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised.\n file : `Any`, Optional\n A file to send. Check ``Client._create_file_form`` for details.\n allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional\n Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details.\n tts : `bool`, Optional\n Whether the message is text-to-speech.\n show_for_invoking_user_only : `bool`, Optional\n Whether the sent message should only be shown to the invoking user. Defaults to the value passed when adding\n the command.\n \n If given as `True` only the message's content will be processed by Discord.\n force_new_message : `int` or `bool`, Optional\n Whether a new message should be forced out from Discord allowing the client to retrieve a new ``Message``\n object as well. Defaults to `False`.\n \n If given as `-1` will only force new message if the event already deferred.\n " self._force_new_message = force_new_message self._parameters = parameters = {} if (content is not ...): parameters['content'] = content if (embed is not ...): parameters['embed'] = embed if (file is not ...): parameters['file'] = file if (allowed_mentions is not ...): parameters['allowed_mentions'] = allowed_mentions if (tts is not ...): parameters['tts'] = tts if (show_for_invoking_user_only is not ...): parameters['show_for_invoking_user_only'] = show_for_invoking_user_only
def __init__(self, content=..., *, embed=..., file=..., allowed_mentions=..., tts=..., show_for_invoking_user_only=..., force_new_message=False): "\n Creates a new ``SlashResponse`` instance with the given parameters.\n \n Parameters\n ----------\n content : `str`, ``EmbedBase``, `Any`, Optional\n The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile\n if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string.\n \n If given as ``EmbedBase`` instance, then is sent as the message's embed.\n \n embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional\n The embedded content of the message.\n \n If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised.\n file : `Any`, Optional\n A file to send. Check ``Client._create_file_form`` for details.\n allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional\n Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details.\n tts : `bool`, Optional\n Whether the message is text-to-speech.\n show_for_invoking_user_only : `bool`, Optional\n Whether the sent message should only be shown to the invoking user. Defaults to the value passed when adding\n the command.\n \n If given as `True` only the message's content will be processed by Discord.\n force_new_message : `int` or `bool`, Optional\n Whether a new message should be forced out from Discord allowing the client to retrieve a new ``Message``\n object as well. Defaults to `False`.\n \n If given as `-1` will only force new message if the event already deferred.\n " self._force_new_message = force_new_message self._parameters = parameters = {} if (content is not ...): parameters['content'] = content if (embed is not ...): parameters['embed'] = embed if (file is not ...): parameters['file'] = file if (allowed_mentions is not ...): parameters['allowed_mentions'] = allowed_mentions if (tts is not ...): parameters['tts'] = tts if (show_for_invoking_user_only is not ...): parameters['show_for_invoking_user_only'] = show_for_invoking_user_only<|docstring|>Creates a new ``SlashResponse`` instance with the given parameters. Parameters ---------- content : `str`, ``EmbedBase``, `Any`, Optional The message's content if given. If given as `str` or empty string, then no content will be sent, meanwhile if any other non `str` or ``EmbedBase`` instance is given, then will be casted to string. If given as ``EmbedBase`` instance, then is sent as the message's embed. embed : ``EmbedBase`` instance or `list` of ``EmbedBase`` instances, Optional The embedded content of the message. If `embed` and `content` parameters are both given as ``EmbedBase`` instance, then `TypeError` is raised. file : `Any`, Optional A file to send. Check ``Client._create_file_form`` for details. allowed_mentions : `None`, `str`, ``UserBase``, ``Role``, `list` of (`str`, ``UserBase``, ``Role`` ), Optional Which user or role can the message ping (or everyone). Check ``Client._parse_allowed_mentions`` for details. tts : `bool`, Optional Whether the message is text-to-speech. show_for_invoking_user_only : `bool`, Optional Whether the sent message should only be shown to the invoking user. Defaults to the value passed when adding the command. If given as `True` only the message's content will be processed by Discord. force_new_message : `int` or `bool`, Optional Whether a new message should be forced out from Discord allowing the client to retrieve a new ``Message`` object as well. Defaults to `False`. If given as `-1` will only force new message if the event already deferred.<|endoftext|>
818aca27430876f05a36fe73805c3a9f17dba19cf32d5f0d576bde09fd070516
def _get_response_parameters(self, allowed_parameters): '\n Gets response parameters to pass to a ``Client`` method.\n \n Parameters\n ----------\n allowed_parameters : `tuple` of `str`\n Allowed parameters to be passed to the respective client method.\n \n Returns\n -------\n response_parameters : `dict` of (`str`, `Any`) items\n Parameters to pass the the respective client method.\n ' parameters = self._parameters response_parameters = {} for key in allowed_parameters: try: value = parameters[key] except KeyError: continue response_parameters[key] = value return response_parameters
Gets response parameters to pass to a ``Client`` method. Parameters ---------- allowed_parameters : `tuple` of `str` Allowed parameters to be passed to the respective client method. Returns ------- response_parameters : `dict` of (`str`, `Any`) items Parameters to pass the the respective client method.
hata/ext/slash/responding.py
_get_response_parameters
asleep-cult/hata
0
python
def _get_response_parameters(self, allowed_parameters): '\n Gets response parameters to pass to a ``Client`` method.\n \n Parameters\n ----------\n allowed_parameters : `tuple` of `str`\n Allowed parameters to be passed to the respective client method.\n \n Returns\n -------\n response_parameters : `dict` of (`str`, `Any`) items\n Parameters to pass the the respective client method.\n ' parameters = self._parameters response_parameters = {} for key in allowed_parameters: try: value = parameters[key] except KeyError: continue response_parameters[key] = value return response_parameters
def _get_response_parameters(self, allowed_parameters): '\n Gets response parameters to pass to a ``Client`` method.\n \n Parameters\n ----------\n allowed_parameters : `tuple` of `str`\n Allowed parameters to be passed to the respective client method.\n \n Returns\n -------\n response_parameters : `dict` of (`str`, `Any`) items\n Parameters to pass the the respective client method.\n ' parameters = self._parameters response_parameters = {} for key in allowed_parameters: try: value = parameters[key] except KeyError: continue response_parameters[key] = value return response_parameters<|docstring|>Gets response parameters to pass to a ``Client`` method. Parameters ---------- allowed_parameters : `tuple` of `str` Allowed parameters to be passed to the respective client method. Returns ------- response_parameters : `dict` of (`str`, `Any`) items Parameters to pass the the respective client method.<|endoftext|>
d6809d4ffc276cde9eff8bdf3c626fece8e60d7c2503954370f90329c667101f
def get_request_coros(self, client, interaction_event, show_for_invoking_user_only): '\n Gets request coroutine buildable from the ``SlashResponse``.\n \n This method is a generator, which should be used inside of a `for` loop.\n \n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n \n Yields\n -------\n request_coro : `None` or `coroutine`\n ' response_state = interaction_event._response_state force_new_message = self._force_new_message if force_new_message: if (force_new_message > 0): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return else: if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): if ('file' in self._parameters): show_for_invoking_user_only = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) else: response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, **response_parameters)) return response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return else: if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): if ('file' in self._parameters): show_for_invoking_user_only = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) else: response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, **response_parameters)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_DEFERRED): response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embedfile')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_RESPONDED): response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return
Gets request coroutine buildable from the ``SlashResponse``. This method is a generator, which should be used inside of a `for` loop. client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. Yields ------- request_coro : `None` or `coroutine`
hata/ext/slash/responding.py
get_request_coros
asleep-cult/hata
0
python
def get_request_coros(self, client, interaction_event, show_for_invoking_user_only): '\n Gets request coroutine buildable from the ``SlashResponse``.\n \n This method is a generator, which should be used inside of a `for` loop.\n \n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n \n Yields\n -------\n request_coro : `None` or `coroutine`\n ' response_state = interaction_event._response_state force_new_message = self._force_new_message if force_new_message: if (force_new_message > 0): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return else: if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): if ('file' in self._parameters): show_for_invoking_user_only = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) else: response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, **response_parameters)) return response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return else: if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): if ('file' in self._parameters): show_for_invoking_user_only = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) else: response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, **response_parameters)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_DEFERRED): response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embedfile')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_RESPONDED): response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return
def get_request_coros(self, client, interaction_event, show_for_invoking_user_only): '\n Gets request coroutine buildable from the ``SlashResponse``.\n \n This method is a generator, which should be used inside of a `for` loop.\n \n client : ``Client``\n The client who will send the responses if applicable.\n interaction_event : ``InteractionEvent``\n The respective event to respond on.\n show_for_invoking_user_only : `bool`\n Whether the response message should only be shown for the invoking user.\n \n Yields\n -------\n request_coro : `None` or `coroutine`\n ' response_state = interaction_event._response_state force_new_message = self._force_new_message if force_new_message: if (force_new_message > 0): if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): (yield client.interaction_response_message_create(interaction_event)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return else: if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): if ('file' in self._parameters): show_for_invoking_user_only = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) else: response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, **response_parameters)) return response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return else: if (response_state == INTERACTION_EVENT_RESPONSE_STATE_NONE): if ('file' in self._parameters): show_for_invoking_user_only = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, show_for_invoking_user_only=show_for_invoking_user_only)) response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) else: response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_response_message_create(interaction_event, **response_parameters)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_DEFERRED): response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embedfile')) (yield client.interaction_response_message_edit(interaction_event, **response_parameters)) return if (response_state == INTERACTION_EVENT_RESPONSE_STATE_RESPONDED): response_parameters = self._get_response_parameters(('allowed_mentions', 'content', 'embed', 'file', 'tts')) response_parameters['show_for_invoking_user_only'] = self._parameters.get('show_for_invoking_user_only', show_for_invoking_user_only) (yield client.interaction_followup_message_create(interaction_event, **response_parameters)) return<|docstring|>Gets request coroutine buildable from the ``SlashResponse``. This method is a generator, which should be used inside of a `for` loop. client : ``Client`` The client who will send the responses if applicable. interaction_event : ``InteractionEvent`` The respective event to respond on. show_for_invoking_user_only : `bool` Whether the response message should only be shown for the invoking user. Yields ------- request_coro : `None` or `coroutine`<|endoftext|>
8f18684ad28fd31b242696a04f7b4b1b5e9b1059cfee0e0169fbc220cd76f35e
def __repr__(self): "Returns the slash response's representation." result = ['<', self.__class__.__name__, ' '] if self._force_new_message: result.append('(force new message) ') parameters = self._parameters if parameters: for (key, value) in parameters.items(): result.append(key) result.append('=') result.append(repr(value)) result.append(', ') result[(- 1)] = '>' else: result.append('>') return ''.join(result)
Returns the slash response's representation.
hata/ext/slash/responding.py
__repr__
asleep-cult/hata
0
python
def __repr__(self): result = ['<', self.__class__.__name__, ' '] if self._force_new_message: result.append('(force new message) ') parameters = self._parameters if parameters: for (key, value) in parameters.items(): result.append(key) result.append('=') result.append(repr(value)) result.append(', ') result[(- 1)] = '>' else: result.append('>') return .join(result)
def __repr__(self): result = ['<', self.__class__.__name__, ' '] if self._force_new_message: result.append('(force new message) ') parameters = self._parameters if parameters: for (key, value) in parameters.items(): result.append(key) result.append('=') result.append(repr(value)) result.append(', ') result[(- 1)] = '>' else: result.append('>') return .join(result)<|docstring|>Returns the slash response's representation.<|endoftext|>
51855473bed46c50be89308644d4f0bb093b925895ce2f58ab32e488b87a741c
def __init__(self, response): '\n Creates a new ``InteractionAbortedError`` instance with the given response.\n \n Parameters\n ----------\n response : ``SlashResponse``\n The response to send.\n ' self.response = response BaseException.__init__(self, response)
Creates a new ``InteractionAbortedError`` instance with the given response. Parameters ---------- response : ``SlashResponse`` The response to send.
hata/ext/slash/responding.py
__init__
asleep-cult/hata
0
python
def __init__(self, response): '\n Creates a new ``InteractionAbortedError`` instance with the given response.\n \n Parameters\n ----------\n response : ``SlashResponse``\n The response to send.\n ' self.response = response BaseException.__init__(self, response)
def __init__(self, response): '\n Creates a new ``InteractionAbortedError`` instance with the given response.\n \n Parameters\n ----------\n response : ``SlashResponse``\n The response to send.\n ' self.response = response BaseException.__init__(self, response)<|docstring|>Creates a new ``InteractionAbortedError`` instance with the given response. Parameters ---------- response : ``SlashResponse`` The response to send.<|endoftext|>
df21c598289e9404ea4abded861b3300f2c1f6fb2bc43579bcedd92a59816026
def __repr__(self): "Returns the exception's representation." return f'{self.__class__.__name__}({self.response!r})'
Returns the exception's representation.
hata/ext/slash/responding.py
__repr__
asleep-cult/hata
0
python
def __repr__(self): return f'{self.__class__.__name__}({self.response!r})'
def __repr__(self): return f'{self.__class__.__name__}({self.response!r})'<|docstring|>Returns the exception's representation.<|endoftext|>
ebd30a773a9ee97aef335333e13e05bf269a4b2c35870ffb6dd02c98e5011f44
def test_write_jpeg(): 'See if Pillow can write JPEG (tests linkage against mozjpeg)' im = Image.new('RGB', (10, 10)) buffer = BytesIO() im.save(buffer, format='JPEG') if (sys.version_info[0] == 2): buffer.seek(0) size = len(buffer.read()) else: size = buffer.getbuffer().nbytes if (size != 375): logger.error('JPEG optimization is not working as expected! size=%s', size)
See if Pillow can write JPEG (tests linkage against mozjpeg)
src/tests/test_jpeg.py
test_write_jpeg
edoburu/demo.django-fluent.org
24
python
def test_write_jpeg(): im = Image.new('RGB', (10, 10)) buffer = BytesIO() im.save(buffer, format='JPEG') if (sys.version_info[0] == 2): buffer.seek(0) size = len(buffer.read()) else: size = buffer.getbuffer().nbytes if (size != 375): logger.error('JPEG optimization is not working as expected! size=%s', size)
def test_write_jpeg(): im = Image.new('RGB', (10, 10)) buffer = BytesIO() im.save(buffer, format='JPEG') if (sys.version_info[0] == 2): buffer.seek(0) size = len(buffer.read()) else: size = buffer.getbuffer().nbytes if (size != 375): logger.error('JPEG optimization is not working as expected! size=%s', size)<|docstring|>See if Pillow can write JPEG (tests linkage against mozjpeg)<|endoftext|>
70664de1827930684106fc23b9e6950944a1ff3266b656b51c8b348d4a5de8e2
def _astroid_interface_for_visitor(visitor_function): 'Turn codewatch visitors into astroid-compatible transform functions\n\n codewatch visitors can make use of 3 args, the node, stats, and the\n relative file path you were visited for\n\n astroid transforms must take only the node\n\n By annotating the node with stats and relative file path, we can make our\n codewatch visitors compatible with astroid transform functions.\n ' @wraps(visitor_function) def call_visitor(annotated_node, *args, **kwargs): return visitor_function(annotated_node, annotated_node._codewatch.stats, annotated_node._codewatch.rel_file_path, *args, **kwargs) return call_visitor
Turn codewatch visitors into astroid-compatible transform functions codewatch visitors can make use of 3 args, the node, stats, and the relative file path you were visited for astroid transforms must take only the node By annotating the node with stats and relative file path, we can make our codewatch visitors compatible with astroid transform functions.
codewatch/node_visitor.py
_astroid_interface_for_visitor
kazu9su/codewatch
39
python
def _astroid_interface_for_visitor(visitor_function): 'Turn codewatch visitors into astroid-compatible transform functions\n\n codewatch visitors can make use of 3 args, the node, stats, and the\n relative file path you were visited for\n\n astroid transforms must take only the node\n\n By annotating the node with stats and relative file path, we can make our\n codewatch visitors compatible with astroid transform functions.\n ' @wraps(visitor_function) def call_visitor(annotated_node, *args, **kwargs): return visitor_function(annotated_node, annotated_node._codewatch.stats, annotated_node._codewatch.rel_file_path, *args, **kwargs) return call_visitor
def _astroid_interface_for_visitor(visitor_function): 'Turn codewatch visitors into astroid-compatible transform functions\n\n codewatch visitors can make use of 3 args, the node, stats, and the\n relative file path you were visited for\n\n astroid transforms must take only the node\n\n By annotating the node with stats and relative file path, we can make our\n codewatch visitors compatible with astroid transform functions.\n ' @wraps(visitor_function) def call_visitor(annotated_node, *args, **kwargs): return visitor_function(annotated_node, annotated_node._codewatch.stats, annotated_node._codewatch.rel_file_path, *args, **kwargs) return call_visitor<|docstring|>Turn codewatch visitors into astroid-compatible transform functions codewatch visitors can make use of 3 args, the node, stats, and the relative file path you were visited for astroid transforms must take only the node By annotating the node with stats and relative file path, we can make our codewatch visitors compatible with astroid transform functions.<|endoftext|>
35512da6b5b991b297cc54f739d2189e1c8d83e68a8e83aae139ccfb7143886c
def __init__(self, type=None, is_active=None, percentage=None, fixed_cost=None, field=None, projects=None, portfolio=None, valid_values=None, _configuration=None): 'Constraint - a model defined in Swagger' if (_configuration is None): _configuration = Configuration() self._configuration = _configuration self._type = None self._is_active = None self._percentage = None self._fixed_cost = None self._field = None self._projects = None self._portfolio = None self._valid_values = None self.discriminator = None if (type is not None): self.type = type if (is_active is not None): self.is_active = is_active if (percentage is not None): self.percentage = percentage if (fixed_cost is not None): self.fixed_cost = fixed_cost if (field is not None): self.field = field if (projects is not None): self.projects = projects if (portfolio is not None): self.portfolio = portfolio if (valid_values is not None): self.valid_values = valid_values
Constraint - a model defined in Swagger
python/dlxapi/models/constraint.py
__init__
dlens/dlxapi
0
python
def __init__(self, type=None, is_active=None, percentage=None, fixed_cost=None, field=None, projects=None, portfolio=None, valid_values=None, _configuration=None): if (_configuration is None): _configuration = Configuration() self._configuration = _configuration self._type = None self._is_active = None self._percentage = None self._fixed_cost = None self._field = None self._projects = None self._portfolio = None self._valid_values = None self.discriminator = None if (type is not None): self.type = type if (is_active is not None): self.is_active = is_active if (percentage is not None): self.percentage = percentage if (fixed_cost is not None): self.fixed_cost = fixed_cost if (field is not None): self.field = field if (projects is not None): self.projects = projects if (portfolio is not None): self.portfolio = portfolio if (valid_values is not None): self.valid_values = valid_values
def __init__(self, type=None, is_active=None, percentage=None, fixed_cost=None, field=None, projects=None, portfolio=None, valid_values=None, _configuration=None): if (_configuration is None): _configuration = Configuration() self._configuration = _configuration self._type = None self._is_active = None self._percentage = None self._fixed_cost = None self._field = None self._projects = None self._portfolio = None self._valid_values = None self.discriminator = None if (type is not None): self.type = type if (is_active is not None): self.is_active = is_active if (percentage is not None): self.percentage = percentage if (fixed_cost is not None): self.fixed_cost = fixed_cost if (field is not None): self.field = field if (projects is not None): self.projects = projects if (portfolio is not None): self.portfolio = portfolio if (valid_values is not None): self.valid_values = valid_values<|docstring|>Constraint - a model defined in Swagger<|endoftext|>
fc807628e93915f20cae05e4317c83b52fbf6ec812bcbcdea7004e2158311d62
@property def type(self): 'Gets the type of this Constraint. # noqa: E501\n\n\n :return: The type of this Constraint. # noqa: E501\n :rtype: ConstraintType\n ' return self._type
Gets the type of this Constraint. # noqa: E501 :return: The type of this Constraint. # noqa: E501 :rtype: ConstraintType
python/dlxapi/models/constraint.py
type
dlens/dlxapi
0
python
@property def type(self): 'Gets the type of this Constraint. # noqa: E501\n\n\n :return: The type of this Constraint. # noqa: E501\n :rtype: ConstraintType\n ' return self._type
@property def type(self): 'Gets the type of this Constraint. # noqa: E501\n\n\n :return: The type of this Constraint. # noqa: E501\n :rtype: ConstraintType\n ' return self._type<|docstring|>Gets the type of this Constraint. # noqa: E501 :return: The type of this Constraint. # noqa: E501 :rtype: ConstraintType<|endoftext|>
9133ab96c666415eaaf3f3ee071f844352968f295f58fdc0caa2b2e3cdfc7715
@type.setter def type(self, type): 'Sets the type of this Constraint.\n\n\n :param type: The type of this Constraint. # noqa: E501\n :type: ConstraintType\n ' self._type = type
Sets the type of this Constraint. :param type: The type of this Constraint. # noqa: E501 :type: ConstraintType
python/dlxapi/models/constraint.py
type
dlens/dlxapi
0
python
@type.setter def type(self, type): 'Sets the type of this Constraint.\n\n\n :param type: The type of this Constraint. # noqa: E501\n :type: ConstraintType\n ' self._type = type
@type.setter def type(self, type): 'Sets the type of this Constraint.\n\n\n :param type: The type of this Constraint. # noqa: E501\n :type: ConstraintType\n ' self._type = type<|docstring|>Sets the type of this Constraint. :param type: The type of this Constraint. # noqa: E501 :type: ConstraintType<|endoftext|>
fa2489278b76094bd36939f95741e13528ceb54c839bc8e21dc0b9306e7d43e1
@property def is_active(self): 'Gets the is_active of this Constraint. # noqa: E501\n\n\n :return: The is_active of this Constraint. # noqa: E501\n :rtype: bool\n ' return self._is_active
Gets the is_active of this Constraint. # noqa: E501 :return: The is_active of this Constraint. # noqa: E501 :rtype: bool
python/dlxapi/models/constraint.py
is_active
dlens/dlxapi
0
python
@property def is_active(self): 'Gets the is_active of this Constraint. # noqa: E501\n\n\n :return: The is_active of this Constraint. # noqa: E501\n :rtype: bool\n ' return self._is_active
@property def is_active(self): 'Gets the is_active of this Constraint. # noqa: E501\n\n\n :return: The is_active of this Constraint. # noqa: E501\n :rtype: bool\n ' return self._is_active<|docstring|>Gets the is_active of this Constraint. # noqa: E501 :return: The is_active of this Constraint. # noqa: E501 :rtype: bool<|endoftext|>
06e5879e277b90abd0189af7574ed864b5ead9c140e7fd1cd27a9493b551eb60
@is_active.setter def is_active(self, is_active): 'Sets the is_active of this Constraint.\n\n\n :param is_active: The is_active of this Constraint. # noqa: E501\n :type: bool\n ' self._is_active = is_active
Sets the is_active of this Constraint. :param is_active: The is_active of this Constraint. # noqa: E501 :type: bool
python/dlxapi/models/constraint.py
is_active
dlens/dlxapi
0
python
@is_active.setter def is_active(self, is_active): 'Sets the is_active of this Constraint.\n\n\n :param is_active: The is_active of this Constraint. # noqa: E501\n :type: bool\n ' self._is_active = is_active
@is_active.setter def is_active(self, is_active): 'Sets the is_active of this Constraint.\n\n\n :param is_active: The is_active of this Constraint. # noqa: E501\n :type: bool\n ' self._is_active = is_active<|docstring|>Sets the is_active of this Constraint. :param is_active: The is_active of this Constraint. # noqa: E501 :type: bool<|endoftext|>
84e71e6d4423f2083e04084512574609ec165c5265f30f2c5180473ff802c7b4
@property def percentage(self): 'Gets the percentage of this Constraint. # noqa: E501\n\n\n :return: The percentage of this Constraint. # noqa: E501\n :rtype: float\n ' return self._percentage
Gets the percentage of this Constraint. # noqa: E501 :return: The percentage of this Constraint. # noqa: E501 :rtype: float
python/dlxapi/models/constraint.py
percentage
dlens/dlxapi
0
python
@property def percentage(self): 'Gets the percentage of this Constraint. # noqa: E501\n\n\n :return: The percentage of this Constraint. # noqa: E501\n :rtype: float\n ' return self._percentage
@property def percentage(self): 'Gets the percentage of this Constraint. # noqa: E501\n\n\n :return: The percentage of this Constraint. # noqa: E501\n :rtype: float\n ' return self._percentage<|docstring|>Gets the percentage of this Constraint. # noqa: E501 :return: The percentage of this Constraint. # noqa: E501 :rtype: float<|endoftext|>
6be4da2b3ccdc09c3e68e49af5956ee04ecc3b79a7a9b71a40a67ca91835c5b1
@percentage.setter def percentage(self, percentage): 'Sets the percentage of this Constraint.\n\n\n :param percentage: The percentage of this Constraint. # noqa: E501\n :type: float\n ' self._percentage = percentage
Sets the percentage of this Constraint. :param percentage: The percentage of this Constraint. # noqa: E501 :type: float
python/dlxapi/models/constraint.py
percentage
dlens/dlxapi
0
python
@percentage.setter def percentage(self, percentage): 'Sets the percentage of this Constraint.\n\n\n :param percentage: The percentage of this Constraint. # noqa: E501\n :type: float\n ' self._percentage = percentage
@percentage.setter def percentage(self, percentage): 'Sets the percentage of this Constraint.\n\n\n :param percentage: The percentage of this Constraint. # noqa: E501\n :type: float\n ' self._percentage = percentage<|docstring|>Sets the percentage of this Constraint. :param percentage: The percentage of this Constraint. # noqa: E501 :type: float<|endoftext|>
43ebdf82c14f12601a76d8018c4aa78ddc5e84ce17ec1d353fc415a628287d12
@property def fixed_cost(self): 'Gets the fixed_cost of this Constraint. # noqa: E501\n\n\n :return: The fixed_cost of this Constraint. # noqa: E501\n :rtype: float\n ' return self._fixed_cost
Gets the fixed_cost of this Constraint. # noqa: E501 :return: The fixed_cost of this Constraint. # noqa: E501 :rtype: float
python/dlxapi/models/constraint.py
fixed_cost
dlens/dlxapi
0
python
@property def fixed_cost(self): 'Gets the fixed_cost of this Constraint. # noqa: E501\n\n\n :return: The fixed_cost of this Constraint. # noqa: E501\n :rtype: float\n ' return self._fixed_cost
@property def fixed_cost(self): 'Gets the fixed_cost of this Constraint. # noqa: E501\n\n\n :return: The fixed_cost of this Constraint. # noqa: E501\n :rtype: float\n ' return self._fixed_cost<|docstring|>Gets the fixed_cost of this Constraint. # noqa: E501 :return: The fixed_cost of this Constraint. # noqa: E501 :rtype: float<|endoftext|>
74fb38c7a55112950132de3e40ddd27580d3f39caaaff052ad8c540491f546d5
@fixed_cost.setter def fixed_cost(self, fixed_cost): 'Sets the fixed_cost of this Constraint.\n\n\n :param fixed_cost: The fixed_cost of this Constraint. # noqa: E501\n :type: float\n ' self._fixed_cost = fixed_cost
Sets the fixed_cost of this Constraint. :param fixed_cost: The fixed_cost of this Constraint. # noqa: E501 :type: float
python/dlxapi/models/constraint.py
fixed_cost
dlens/dlxapi
0
python
@fixed_cost.setter def fixed_cost(self, fixed_cost): 'Sets the fixed_cost of this Constraint.\n\n\n :param fixed_cost: The fixed_cost of this Constraint. # noqa: E501\n :type: float\n ' self._fixed_cost = fixed_cost
@fixed_cost.setter def fixed_cost(self, fixed_cost): 'Sets the fixed_cost of this Constraint.\n\n\n :param fixed_cost: The fixed_cost of this Constraint. # noqa: E501\n :type: float\n ' self._fixed_cost = fixed_cost<|docstring|>Sets the fixed_cost of this Constraint. :param fixed_cost: The fixed_cost of this Constraint. # noqa: E501 :type: float<|endoftext|>
ffe86f296d59c908803c4f6c6f1b10464ac407c7a6f37f38f654905e8385ac0d
@property def field(self): 'Gets the field of this Constraint. # noqa: E501\n\n\n :return: The field of this Constraint. # noqa: E501\n :rtype: Field\n ' return self._field
Gets the field of this Constraint. # noqa: E501 :return: The field of this Constraint. # noqa: E501 :rtype: Field
python/dlxapi/models/constraint.py
field
dlens/dlxapi
0
python
@property def field(self): 'Gets the field of this Constraint. # noqa: E501\n\n\n :return: The field of this Constraint. # noqa: E501\n :rtype: Field\n ' return self._field
@property def field(self): 'Gets the field of this Constraint. # noqa: E501\n\n\n :return: The field of this Constraint. # noqa: E501\n :rtype: Field\n ' return self._field<|docstring|>Gets the field of this Constraint. # noqa: E501 :return: The field of this Constraint. # noqa: E501 :rtype: Field<|endoftext|>
452f22a911818c19f61ae2b852d8a50f18fc4ac9c07cf22825bf32fb77354cee
@field.setter def field(self, field): 'Sets the field of this Constraint.\n\n\n :param field: The field of this Constraint. # noqa: E501\n :type: Field\n ' self._field = field
Sets the field of this Constraint. :param field: The field of this Constraint. # noqa: E501 :type: Field
python/dlxapi/models/constraint.py
field
dlens/dlxapi
0
python
@field.setter def field(self, field): 'Sets the field of this Constraint.\n\n\n :param field: The field of this Constraint. # noqa: E501\n :type: Field\n ' self._field = field
@field.setter def field(self, field): 'Sets the field of this Constraint.\n\n\n :param field: The field of this Constraint. # noqa: E501\n :type: Field\n ' self._field = field<|docstring|>Sets the field of this Constraint. :param field: The field of this Constraint. # noqa: E501 :type: Field<|endoftext|>
6efe63fafc41c16a29d1858364af93ce26dd9bc029b6c6c876d565d4b1de7435
@property def projects(self): 'Gets the projects of this Constraint. # noqa: E501\n\n\n :return: The projects of this Constraint. # noqa: E501\n :rtype: Projects\n ' return self._projects
Gets the projects of this Constraint. # noqa: E501 :return: The projects of this Constraint. # noqa: E501 :rtype: Projects
python/dlxapi/models/constraint.py
projects
dlens/dlxapi
0
python
@property def projects(self): 'Gets the projects of this Constraint. # noqa: E501\n\n\n :return: The projects of this Constraint. # noqa: E501\n :rtype: Projects\n ' return self._projects
@property def projects(self): 'Gets the projects of this Constraint. # noqa: E501\n\n\n :return: The projects of this Constraint. # noqa: E501\n :rtype: Projects\n ' return self._projects<|docstring|>Gets the projects of this Constraint. # noqa: E501 :return: The projects of this Constraint. # noqa: E501 :rtype: Projects<|endoftext|>
a110b31b915c6ed70eb6de1056824c8266262a40c4a98944317abb48ccac6fda
@projects.setter def projects(self, projects): 'Sets the projects of this Constraint.\n\n\n :param projects: The projects of this Constraint. # noqa: E501\n :type: Projects\n ' self._projects = projects
Sets the projects of this Constraint. :param projects: The projects of this Constraint. # noqa: E501 :type: Projects
python/dlxapi/models/constraint.py
projects
dlens/dlxapi
0
python
@projects.setter def projects(self, projects): 'Sets the projects of this Constraint.\n\n\n :param projects: The projects of this Constraint. # noqa: E501\n :type: Projects\n ' self._projects = projects
@projects.setter def projects(self, projects): 'Sets the projects of this Constraint.\n\n\n :param projects: The projects of this Constraint. # noqa: E501\n :type: Projects\n ' self._projects = projects<|docstring|>Sets the projects of this Constraint. :param projects: The projects of this Constraint. # noqa: E501 :type: Projects<|endoftext|>
13b2128957c2283404a0a5f7ba60d91782c9e7fdf1c1cb816b0fb0c10efc9551
@property def portfolio(self): 'Gets the portfolio of this Constraint. # noqa: E501\n\n\n :return: The portfolio of this Constraint. # noqa: E501\n :rtype: Portfolio\n ' return self._portfolio
Gets the portfolio of this Constraint. # noqa: E501 :return: The portfolio of this Constraint. # noqa: E501 :rtype: Portfolio
python/dlxapi/models/constraint.py
portfolio
dlens/dlxapi
0
python
@property def portfolio(self): 'Gets the portfolio of this Constraint. # noqa: E501\n\n\n :return: The portfolio of this Constraint. # noqa: E501\n :rtype: Portfolio\n ' return self._portfolio
@property def portfolio(self): 'Gets the portfolio of this Constraint. # noqa: E501\n\n\n :return: The portfolio of this Constraint. # noqa: E501\n :rtype: Portfolio\n ' return self._portfolio<|docstring|>Gets the portfolio of this Constraint. # noqa: E501 :return: The portfolio of this Constraint. # noqa: E501 :rtype: Portfolio<|endoftext|>
0c0691230bb3ec87226e45039c7f66e4d0463b0764140081d96dbc1dcccb927f
@portfolio.setter def portfolio(self, portfolio): 'Sets the portfolio of this Constraint.\n\n\n :param portfolio: The portfolio of this Constraint. # noqa: E501\n :type: Portfolio\n ' self._portfolio = portfolio
Sets the portfolio of this Constraint. :param portfolio: The portfolio of this Constraint. # noqa: E501 :type: Portfolio
python/dlxapi/models/constraint.py
portfolio
dlens/dlxapi
0
python
@portfolio.setter def portfolio(self, portfolio): 'Sets the portfolio of this Constraint.\n\n\n :param portfolio: The portfolio of this Constraint. # noqa: E501\n :type: Portfolio\n ' self._portfolio = portfolio
@portfolio.setter def portfolio(self, portfolio): 'Sets the portfolio of this Constraint.\n\n\n :param portfolio: The portfolio of this Constraint. # noqa: E501\n :type: Portfolio\n ' self._portfolio = portfolio<|docstring|>Sets the portfolio of this Constraint. :param portfolio: The portfolio of this Constraint. # noqa: E501 :type: Portfolio<|endoftext|>
1ba93649db21123f3ae18e33f1da8b610196240903d8faca241fdf277e946d72
@property def valid_values(self): 'Gets the valid_values of this Constraint. # noqa: E501\n\n\n :return: The valid_values of this Constraint. # noqa: E501\n :rtype: list[ValidConstraintValue]\n ' return self._valid_values
Gets the valid_values of this Constraint. # noqa: E501 :return: The valid_values of this Constraint. # noqa: E501 :rtype: list[ValidConstraintValue]
python/dlxapi/models/constraint.py
valid_values
dlens/dlxapi
0
python
@property def valid_values(self): 'Gets the valid_values of this Constraint. # noqa: E501\n\n\n :return: The valid_values of this Constraint. # noqa: E501\n :rtype: list[ValidConstraintValue]\n ' return self._valid_values
@property def valid_values(self): 'Gets the valid_values of this Constraint. # noqa: E501\n\n\n :return: The valid_values of this Constraint. # noqa: E501\n :rtype: list[ValidConstraintValue]\n ' return self._valid_values<|docstring|>Gets the valid_values of this Constraint. # noqa: E501 :return: The valid_values of this Constraint. # noqa: E501 :rtype: list[ValidConstraintValue]<|endoftext|>
21b62786d7e3fb8f1f3ce841d3f729bd64f50556d05bc77e5945bebc0a2028fc
@valid_values.setter def valid_values(self, valid_values): 'Sets the valid_values of this Constraint.\n\n\n :param valid_values: The valid_values of this Constraint. # noqa: E501\n :type: list[ValidConstraintValue]\n ' self._valid_values = valid_values
Sets the valid_values of this Constraint. :param valid_values: The valid_values of this Constraint. # noqa: E501 :type: list[ValidConstraintValue]
python/dlxapi/models/constraint.py
valid_values
dlens/dlxapi
0
python
@valid_values.setter def valid_values(self, valid_values): 'Sets the valid_values of this Constraint.\n\n\n :param valid_values: The valid_values of this Constraint. # noqa: E501\n :type: list[ValidConstraintValue]\n ' self._valid_values = valid_values
@valid_values.setter def valid_values(self, valid_values): 'Sets the valid_values of this Constraint.\n\n\n :param valid_values: The valid_values of this Constraint. # noqa: E501\n :type: list[ValidConstraintValue]\n ' self._valid_values = valid_values<|docstring|>Sets the valid_values of this Constraint. :param valid_values: The valid_values of this Constraint. # noqa: E501 :type: list[ValidConstraintValue]<|endoftext|>
c01b5d5247cf6cce880ba7f715edc961da5cf04c0d485b9a39ba333b391d1da7
def to_dict(self): 'Returns the model properties as a dict' result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(Constraint, dict): for (key, value) in self.items(): result[key] = value return result
Returns the model properties as a dict
python/dlxapi/models/constraint.py
to_dict
dlens/dlxapi
0
python
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(Constraint, dict): for (key, value) in self.items(): result[key] = value return result
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(Constraint, dict): for (key, value) in self.items(): result[key] = value return result<|docstring|>Returns the model properties as a dict<|endoftext|>
cbb19eaa2fc8a113d9e32f924ef280a7e97563f8915f94f65dab438997af2e99
def to_str(self): 'Returns the string representation of the model' return pprint.pformat(self.to_dict())
Returns the string representation of the model
python/dlxapi/models/constraint.py
to_str
dlens/dlxapi
0
python
def to_str(self): return pprint.pformat(self.to_dict())
def to_str(self): return pprint.pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|>
772243a2c2b3261a9b954d07aaf295e3c1242a579a495e2d6a5679c677861703
def __repr__(self): 'For `print` and `pprint`' return self.to_str()
For `print` and `pprint`
python/dlxapi/models/constraint.py
__repr__
dlens/dlxapi
0
python
def __repr__(self): return self.to_str()
def __repr__(self): return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|>
c76e1b60ca72c2286bd4f6c997206257a140b9c7e2561a29b704fdc25747898f
def __eq__(self, other): 'Returns true if both objects are equal' if (not isinstance(other, Constraint)): return False return (self.to_dict() == other.to_dict())
Returns true if both objects are equal
python/dlxapi/models/constraint.py
__eq__
dlens/dlxapi
0
python
def __eq__(self, other): if (not isinstance(other, Constraint)): return False return (self.to_dict() == other.to_dict())
def __eq__(self, other): if (not isinstance(other, Constraint)): return False return (self.to_dict() == other.to_dict())<|docstring|>Returns true if both objects are equal<|endoftext|>
779b09800607cc29a60dc6a61f6a4267875a6bf47b755c2afb89b4abe12b628e
def __ne__(self, other): 'Returns true if both objects are not equal' if (not isinstance(other, Constraint)): return True return (self.to_dict() != other.to_dict())
Returns true if both objects are not equal
python/dlxapi/models/constraint.py
__ne__
dlens/dlxapi
0
python
def __ne__(self, other): if (not isinstance(other, Constraint)): return True return (self.to_dict() != other.to_dict())
def __ne__(self, other): if (not isinstance(other, Constraint)): return True return (self.to_dict() != other.to_dict())<|docstring|>Returns true if both objects are not equal<|endoftext|>
11d719f9fca2ac8a775c85ef6cde28ca44820d7db0919b49b3bac0f9e62fdded
def repeat(func, args=(), kwargs={}, n_repeat=10000, *, name=None, n_warmup=10, max_duration=_math.inf, devices=None): " Timing utility for measuring time spent by both CPU and GPU.\n\n This function is a very convenient helper for setting up a timing test. The\n GPU time is properly recorded by synchronizing internal streams. As a\n result, to time a multi-GPU function all participating devices must be\n passed as the ``devices`` argument so that this helper knows which devices\n to record. A simple example is given as follows:\n\n .. code-block:: py\n\n import cupy as cp\n from cupyx.time import repeat\n\n def f(a, b):\n return 3 * cp.sin(-a) * b\n\n a = 0.5 - cp.random.random((100,))\n b = cp.random.random((100,))\n print(repeat(f, (a, b), n_repeat=1000))\n\n\n Args:\n func (callable): a callable object to be timed.\n args (tuple): positional argumens to be passed to the callable.\n kwargs (dict): keyword arguments to be passed to the callable.\n n_repeat (int): number of times the callable is called. Increasing\n this value would improve the collected statistics at the cost\n of longer test time.\n name (str): the function name to be reported. If not given, the\n callable's ``__name__`` attribute is used.\n n_warmup (int): number of times the callable is called. The warm-up\n runs are not timed.\n max_duration (float): the maximum time (in seconds) that the entire\n test can use. If the taken time is longer than this limit, the test\n is stopped and the statistics collected up to the breakpoint is\n reported.\n devices (tuple): a tuple of device IDs (int) that will be timed during\n the timing test. If not given, the current device is used.\n\n Returns:\n :class:`_PerfCaseResult`: an object collecting all test results.\n\n .. warning::\n This API is currently experimental and subject to change in future\n releases.\n\n " if (name is None): name = func.__name__ if (devices is None): devices = (_cupy.cuda.get_device_id(),) if (not callable(func)): raise ValueError('`func` should be a callable object.') if (not isinstance(args, tuple)): raise ValueError('`args` should be of tuple type.') if (not isinstance(kwargs, dict)): raise ValueError('`kwargs` should be of dict type.') if (not isinstance(n_repeat, int)): raise ValueError('`n_repeat` should be an integer.') if (not isinstance(name, str)): raise ValueError('`name` should be a string.') if (not isinstance(n_warmup, int)): raise ValueError('`n_warmup` should be an integer.') if (not _numpy.isreal(max_duration)): raise ValueError('`max_duration` should be given in seconds') if (not isinstance(devices, tuple)): raise ValueError('`devices` should be of tuple type') return _repeat(func, args, kwargs, n_repeat, name, n_warmup, max_duration, devices)
Timing utility for measuring time spent by both CPU and GPU. This function is a very convenient helper for setting up a timing test. The GPU time is properly recorded by synchronizing internal streams. As a result, to time a multi-GPU function all participating devices must be passed as the ``devices`` argument so that this helper knows which devices to record. A simple example is given as follows: .. code-block:: py import cupy as cp from cupyx.time import repeat def f(a, b): return 3 * cp.sin(-a) * b a = 0.5 - cp.random.random((100,)) b = cp.random.random((100,)) print(repeat(f, (a, b), n_repeat=1000)) Args: func (callable): a callable object to be timed. args (tuple): positional argumens to be passed to the callable. kwargs (dict): keyword arguments to be passed to the callable. n_repeat (int): number of times the callable is called. Increasing this value would improve the collected statistics at the cost of longer test time. name (str): the function name to be reported. If not given, the callable's ``__name__`` attribute is used. n_warmup (int): number of times the callable is called. The warm-up runs are not timed. max_duration (float): the maximum time (in seconds) that the entire test can use. If the taken time is longer than this limit, the test is stopped and the statistics collected up to the breakpoint is reported. devices (tuple): a tuple of device IDs (int) that will be timed during the timing test. If not given, the current device is used. Returns: :class:`_PerfCaseResult`: an object collecting all test results. .. warning:: This API is currently experimental and subject to change in future releases.
demo/_time_gpu.py
repeat
grlee77/uskimage-demo
0
python
def repeat(func, args=(), kwargs={}, n_repeat=10000, *, name=None, n_warmup=10, max_duration=_math.inf, devices=None): " Timing utility for measuring time spent by both CPU and GPU.\n\n This function is a very convenient helper for setting up a timing test. The\n GPU time is properly recorded by synchronizing internal streams. As a\n result, to time a multi-GPU function all participating devices must be\n passed as the ``devices`` argument so that this helper knows which devices\n to record. A simple example is given as follows:\n\n .. code-block:: py\n\n import cupy as cp\n from cupyx.time import repeat\n\n def f(a, b):\n return 3 * cp.sin(-a) * b\n\n a = 0.5 - cp.random.random((100,))\n b = cp.random.random((100,))\n print(repeat(f, (a, b), n_repeat=1000))\n\n\n Args:\n func (callable): a callable object to be timed.\n args (tuple): positional argumens to be passed to the callable.\n kwargs (dict): keyword arguments to be passed to the callable.\n n_repeat (int): number of times the callable is called. Increasing\n this value would improve the collected statistics at the cost\n of longer test time.\n name (str): the function name to be reported. If not given, the\n callable's ``__name__`` attribute is used.\n n_warmup (int): number of times the callable is called. The warm-up\n runs are not timed.\n max_duration (float): the maximum time (in seconds) that the entire\n test can use. If the taken time is longer than this limit, the test\n is stopped and the statistics collected up to the breakpoint is\n reported.\n devices (tuple): a tuple of device IDs (int) that will be timed during\n the timing test. If not given, the current device is used.\n\n Returns:\n :class:`_PerfCaseResult`: an object collecting all test results.\n\n .. warning::\n This API is currently experimental and subject to change in future\n releases.\n\n " if (name is None): name = func.__name__ if (devices is None): devices = (_cupy.cuda.get_device_id(),) if (not callable(func)): raise ValueError('`func` should be a callable object.') if (not isinstance(args, tuple)): raise ValueError('`args` should be of tuple type.') if (not isinstance(kwargs, dict)): raise ValueError('`kwargs` should be of dict type.') if (not isinstance(n_repeat, int)): raise ValueError('`n_repeat` should be an integer.') if (not isinstance(name, str)): raise ValueError('`name` should be a string.') if (not isinstance(n_warmup, int)): raise ValueError('`n_warmup` should be an integer.') if (not _numpy.isreal(max_duration)): raise ValueError('`max_duration` should be given in seconds') if (not isinstance(devices, tuple)): raise ValueError('`devices` should be of tuple type') return _repeat(func, args, kwargs, n_repeat, name, n_warmup, max_duration, devices)
def repeat(func, args=(), kwargs={}, n_repeat=10000, *, name=None, n_warmup=10, max_duration=_math.inf, devices=None): " Timing utility for measuring time spent by both CPU and GPU.\n\n This function is a very convenient helper for setting up a timing test. The\n GPU time is properly recorded by synchronizing internal streams. As a\n result, to time a multi-GPU function all participating devices must be\n passed as the ``devices`` argument so that this helper knows which devices\n to record. A simple example is given as follows:\n\n .. code-block:: py\n\n import cupy as cp\n from cupyx.time import repeat\n\n def f(a, b):\n return 3 * cp.sin(-a) * b\n\n a = 0.5 - cp.random.random((100,))\n b = cp.random.random((100,))\n print(repeat(f, (a, b), n_repeat=1000))\n\n\n Args:\n func (callable): a callable object to be timed.\n args (tuple): positional argumens to be passed to the callable.\n kwargs (dict): keyword arguments to be passed to the callable.\n n_repeat (int): number of times the callable is called. Increasing\n this value would improve the collected statistics at the cost\n of longer test time.\n name (str): the function name to be reported. If not given, the\n callable's ``__name__`` attribute is used.\n n_warmup (int): number of times the callable is called. The warm-up\n runs are not timed.\n max_duration (float): the maximum time (in seconds) that the entire\n test can use. If the taken time is longer than this limit, the test\n is stopped and the statistics collected up to the breakpoint is\n reported.\n devices (tuple): a tuple of device IDs (int) that will be timed during\n the timing test. If not given, the current device is used.\n\n Returns:\n :class:`_PerfCaseResult`: an object collecting all test results.\n\n .. warning::\n This API is currently experimental and subject to change in future\n releases.\n\n " if (name is None): name = func.__name__ if (devices is None): devices = (_cupy.cuda.get_device_id(),) if (not callable(func)): raise ValueError('`func` should be a callable object.') if (not isinstance(args, tuple)): raise ValueError('`args` should be of tuple type.') if (not isinstance(kwargs, dict)): raise ValueError('`kwargs` should be of dict type.') if (not isinstance(n_repeat, int)): raise ValueError('`n_repeat` should be an integer.') if (not isinstance(name, str)): raise ValueError('`name` should be a string.') if (not isinstance(n_warmup, int)): raise ValueError('`n_warmup` should be an integer.') if (not _numpy.isreal(max_duration)): raise ValueError('`max_duration` should be given in seconds') if (not isinstance(devices, tuple)): raise ValueError('`devices` should be of tuple type') return _repeat(func, args, kwargs, n_repeat, name, n_warmup, max_duration, devices)<|docstring|>Timing utility for measuring time spent by both CPU and GPU. This function is a very convenient helper for setting up a timing test. The GPU time is properly recorded by synchronizing internal streams. As a result, to time a multi-GPU function all participating devices must be passed as the ``devices`` argument so that this helper knows which devices to record. A simple example is given as follows: .. code-block:: py import cupy as cp from cupyx.time import repeat def f(a, b): return 3 * cp.sin(-a) * b a = 0.5 - cp.random.random((100,)) b = cp.random.random((100,)) print(repeat(f, (a, b), n_repeat=1000)) Args: func (callable): a callable object to be timed. args (tuple): positional argumens to be passed to the callable. kwargs (dict): keyword arguments to be passed to the callable. n_repeat (int): number of times the callable is called. Increasing this value would improve the collected statistics at the cost of longer test time. name (str): the function name to be reported. If not given, the callable's ``__name__`` attribute is used. n_warmup (int): number of times the callable is called. The warm-up runs are not timed. max_duration (float): the maximum time (in seconds) that the entire test can use. If the taken time is longer than this limit, the test is stopped and the statistics collected up to the breakpoint is reported. devices (tuple): a tuple of device IDs (int) that will be timed during the timing test. If not given, the current device is used. Returns: :class:`_PerfCaseResult`: an object collecting all test results. .. warning:: This API is currently experimental and subject to change in future releases.<|endoftext|>
3a514804a8ae59a3c3566f458afb1107d8df247ee1e2069bd7bc1ab4e8a6796c
def repeat_dask(func, args=(), kwargs={}, n_repeat=10000, *, name=None, n_warmup=10, max_duration=_math.inf, devices=None): " Timing utility for measuring time spent by both CPU and GPU.\n\n This function is a very convenient helper for setting up a timing test. The\n GPU time is properly recorded by synchronizing internal streams. As a\n result, to time a multi-GPU function all participating devices must be\n passed as the ``devices`` argument so that this helper knows which devices\n to record. A simple example is given as follows:\n\n .. code-block:: py\n\n import cupy as cp\n from cupyx.time import repeat\n\n def f(a, b):\n return 3 * cp.sin(-a) * b\n\n a = 0.5 - cp.random.random((100,))\n b = cp.random.random((100,))\n print(repeat(f, (a, b), n_repeat=1000))\n\n\n Args:\n func (callable): a callable object to be timed.\n args (tuple): positional argumens to be passed to the callable.\n kwargs (dict): keyword arguments to be passed to the callable.\n n_repeat (int): number of times the callable is called. Increasing\n this value would improve the collected statistics at the cost\n of longer test time.\n name (str): the function name to be reported. If not given, the\n callable's ``__name__`` attribute is used.\n n_warmup (int): number of times the callable is called. The warm-up\n runs are not timed.\n max_duration (float): the maximum time (in seconds) that the entire\n test can use. If the taken time is longer than this limit, the test\n is stopped and the statistics collected up to the breakpoint is\n reported.\n devices (tuple): a tuple of device IDs (int) that will be timed during\n the timing test. If not given, the current device is used.\n\n Returns:\n :class:`_PerfCaseResult`: an object collecting all test results.\n\n .. warning::\n This API is currently experimental and subject to change in future\n releases.\n\n " if (name is None): name = func.__name__ if (devices is None): devices = (_cupy.cuda.get_device_id(),) if (not callable(func)): raise ValueError('`func` should be a callable object.') if (not isinstance(args, tuple)): raise ValueError('`args` should be of tuple type.') if (not isinstance(kwargs, dict)): raise ValueError('`kwargs` should be of dict type.') if (not isinstance(n_repeat, int)): raise ValueError('`n_repeat` should be an integer.') if (not isinstance(name, str)): raise ValueError('`name` should be a string.') if (not isinstance(n_warmup, int)): raise ValueError('`n_warmup` should be an integer.') if (not _numpy.isreal(max_duration)): raise ValueError('`max_duration` should be given in seconds') if (not isinstance(devices, tuple)): raise ValueError('`devices` should be of tuple type') return _repeat_dask(func, args, kwargs, n_repeat, name, n_warmup, max_duration, devices)
Timing utility for measuring time spent by both CPU and GPU. This function is a very convenient helper for setting up a timing test. The GPU time is properly recorded by synchronizing internal streams. As a result, to time a multi-GPU function all participating devices must be passed as the ``devices`` argument so that this helper knows which devices to record. A simple example is given as follows: .. code-block:: py import cupy as cp from cupyx.time import repeat def f(a, b): return 3 * cp.sin(-a) * b a = 0.5 - cp.random.random((100,)) b = cp.random.random((100,)) print(repeat(f, (a, b), n_repeat=1000)) Args: func (callable): a callable object to be timed. args (tuple): positional argumens to be passed to the callable. kwargs (dict): keyword arguments to be passed to the callable. n_repeat (int): number of times the callable is called. Increasing this value would improve the collected statistics at the cost of longer test time. name (str): the function name to be reported. If not given, the callable's ``__name__`` attribute is used. n_warmup (int): number of times the callable is called. The warm-up runs are not timed. max_duration (float): the maximum time (in seconds) that the entire test can use. If the taken time is longer than this limit, the test is stopped and the statistics collected up to the breakpoint is reported. devices (tuple): a tuple of device IDs (int) that will be timed during the timing test. If not given, the current device is used. Returns: :class:`_PerfCaseResult`: an object collecting all test results. .. warning:: This API is currently experimental and subject to change in future releases.
demo/_time_gpu.py
repeat_dask
grlee77/uskimage-demo
0
python
def repeat_dask(func, args=(), kwargs={}, n_repeat=10000, *, name=None, n_warmup=10, max_duration=_math.inf, devices=None): " Timing utility for measuring time spent by both CPU and GPU.\n\n This function is a very convenient helper for setting up a timing test. The\n GPU time is properly recorded by synchronizing internal streams. As a\n result, to time a multi-GPU function all participating devices must be\n passed as the ``devices`` argument so that this helper knows which devices\n to record. A simple example is given as follows:\n\n .. code-block:: py\n\n import cupy as cp\n from cupyx.time import repeat\n\n def f(a, b):\n return 3 * cp.sin(-a) * b\n\n a = 0.5 - cp.random.random((100,))\n b = cp.random.random((100,))\n print(repeat(f, (a, b), n_repeat=1000))\n\n\n Args:\n func (callable): a callable object to be timed.\n args (tuple): positional argumens to be passed to the callable.\n kwargs (dict): keyword arguments to be passed to the callable.\n n_repeat (int): number of times the callable is called. Increasing\n this value would improve the collected statistics at the cost\n of longer test time.\n name (str): the function name to be reported. If not given, the\n callable's ``__name__`` attribute is used.\n n_warmup (int): number of times the callable is called. The warm-up\n runs are not timed.\n max_duration (float): the maximum time (in seconds) that the entire\n test can use. If the taken time is longer than this limit, the test\n is stopped and the statistics collected up to the breakpoint is\n reported.\n devices (tuple): a tuple of device IDs (int) that will be timed during\n the timing test. If not given, the current device is used.\n\n Returns:\n :class:`_PerfCaseResult`: an object collecting all test results.\n\n .. warning::\n This API is currently experimental and subject to change in future\n releases.\n\n " if (name is None): name = func.__name__ if (devices is None): devices = (_cupy.cuda.get_device_id(),) if (not callable(func)): raise ValueError('`func` should be a callable object.') if (not isinstance(args, tuple)): raise ValueError('`args` should be of tuple type.') if (not isinstance(kwargs, dict)): raise ValueError('`kwargs` should be of dict type.') if (not isinstance(n_repeat, int)): raise ValueError('`n_repeat` should be an integer.') if (not isinstance(name, str)): raise ValueError('`name` should be a string.') if (not isinstance(n_warmup, int)): raise ValueError('`n_warmup` should be an integer.') if (not _numpy.isreal(max_duration)): raise ValueError('`max_duration` should be given in seconds') if (not isinstance(devices, tuple)): raise ValueError('`devices` should be of tuple type') return _repeat_dask(func, args, kwargs, n_repeat, name, n_warmup, max_duration, devices)
def repeat_dask(func, args=(), kwargs={}, n_repeat=10000, *, name=None, n_warmup=10, max_duration=_math.inf, devices=None): " Timing utility for measuring time spent by both CPU and GPU.\n\n This function is a very convenient helper for setting up a timing test. The\n GPU time is properly recorded by synchronizing internal streams. As a\n result, to time a multi-GPU function all participating devices must be\n passed as the ``devices`` argument so that this helper knows which devices\n to record. A simple example is given as follows:\n\n .. code-block:: py\n\n import cupy as cp\n from cupyx.time import repeat\n\n def f(a, b):\n return 3 * cp.sin(-a) * b\n\n a = 0.5 - cp.random.random((100,))\n b = cp.random.random((100,))\n print(repeat(f, (a, b), n_repeat=1000))\n\n\n Args:\n func (callable): a callable object to be timed.\n args (tuple): positional argumens to be passed to the callable.\n kwargs (dict): keyword arguments to be passed to the callable.\n n_repeat (int): number of times the callable is called. Increasing\n this value would improve the collected statistics at the cost\n of longer test time.\n name (str): the function name to be reported. If not given, the\n callable's ``__name__`` attribute is used.\n n_warmup (int): number of times the callable is called. The warm-up\n runs are not timed.\n max_duration (float): the maximum time (in seconds) that the entire\n test can use. If the taken time is longer than this limit, the test\n is stopped and the statistics collected up to the breakpoint is\n reported.\n devices (tuple): a tuple of device IDs (int) that will be timed during\n the timing test. If not given, the current device is used.\n\n Returns:\n :class:`_PerfCaseResult`: an object collecting all test results.\n\n .. warning::\n This API is currently experimental and subject to change in future\n releases.\n\n " if (name is None): name = func.__name__ if (devices is None): devices = (_cupy.cuda.get_device_id(),) if (not callable(func)): raise ValueError('`func` should be a callable object.') if (not isinstance(args, tuple)): raise ValueError('`args` should be of tuple type.') if (not isinstance(kwargs, dict)): raise ValueError('`kwargs` should be of dict type.') if (not isinstance(n_repeat, int)): raise ValueError('`n_repeat` should be an integer.') if (not isinstance(name, str)): raise ValueError('`name` should be a string.') if (not isinstance(n_warmup, int)): raise ValueError('`n_warmup` should be an integer.') if (not _numpy.isreal(max_duration)): raise ValueError('`max_duration` should be given in seconds') if (not isinstance(devices, tuple)): raise ValueError('`devices` should be of tuple type') return _repeat_dask(func, args, kwargs, n_repeat, name, n_warmup, max_duration, devices)<|docstring|>Timing utility for measuring time spent by both CPU and GPU. This function is a very convenient helper for setting up a timing test. The GPU time is properly recorded by synchronizing internal streams. As a result, to time a multi-GPU function all participating devices must be passed as the ``devices`` argument so that this helper knows which devices to record. A simple example is given as follows: .. code-block:: py import cupy as cp from cupyx.time import repeat def f(a, b): return 3 * cp.sin(-a) * b a = 0.5 - cp.random.random((100,)) b = cp.random.random((100,)) print(repeat(f, (a, b), n_repeat=1000)) Args: func (callable): a callable object to be timed. args (tuple): positional argumens to be passed to the callable. kwargs (dict): keyword arguments to be passed to the callable. n_repeat (int): number of times the callable is called. Increasing this value would improve the collected statistics at the cost of longer test time. name (str): the function name to be reported. If not given, the callable's ``__name__`` attribute is used. n_warmup (int): number of times the callable is called. The warm-up runs are not timed. max_duration (float): the maximum time (in seconds) that the entire test can use. If the taken time is longer than this limit, the test is stopped and the statistics collected up to the breakpoint is reported. devices (tuple): a tuple of device IDs (int) that will be timed during the timing test. If not given, the current device is used. Returns: :class:`_PerfCaseResult`: an object collecting all test results. .. warning:: This API is currently experimental and subject to change in future releases.<|endoftext|>
d12cebf3f46a863a2376eeb58f6bbee0119c84c82d6ace347911985249553cc0
@property def cpu_times(self): ' Returns an array of CPU times of size ``n_repeat``. ' return self._ts[0]
Returns an array of CPU times of size ``n_repeat``.
demo/_time_gpu.py
cpu_times
grlee77/uskimage-demo
0
python
@property def cpu_times(self): ' ' return self._ts[0]
@property def cpu_times(self): ' ' return self._ts[0]<|docstring|>Returns an array of CPU times of size ``n_repeat``.<|endoftext|>
635d2531ed6622d7a6ec5b6531c7d5a1be431687dac47522fdb21c1a0d6ca47e
@property def gpu_times(self): ' Returns an array of GPU times of size ``n_repeat``. ' return self._ts[1:]
Returns an array of GPU times of size ``n_repeat``.
demo/_time_gpu.py
gpu_times
grlee77/uskimage-demo
0
python
@property def gpu_times(self): ' ' return self._ts[1:]
@property def gpu_times(self): ' ' return self._ts[1:]<|docstring|>Returns an array of GPU times of size ``n_repeat``.<|endoftext|>
7fe0425551e51f12beee55e0897829ef58cab9f85df2128a737b7cd2ffd52f78
def infix_to_postfix(infix_expr): '\n 中缀表达式 -> 后缀表达式\n :param infix_expr: 这里的中缀表达式是一个由空格分隔的标记字符串\n ' prec = {} prec['*'] = 3 prec['/'] = 3 prec['+'] = 2 prec['-'] = 2 prec['('] = 1 tokens = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' op_stack = Stack() postfix_list = [] infix_list = infix_expr.split() for token in infix_list: if (token in tokens): postfix_list.append(token) elif (token == '('): op_stack.push(token) elif (token == ')'): top_token = op_stack.pop() while (top_token != '('): postfix_list.append(top_token) top_token = op_stack.pop() else: while ((not op_stack.is_empty()) and (prec[op_stack.peek()] >= prec[token])): postfix_list.append(op_stack.pop()) op_stack.push(token) while (not op_stack.is_empty()): postfix_list.append(op_stack.pop()) return ''.join(postfix_list)
中缀表达式 -> 后缀表达式 :param infix_expr: 这里的中缀表达式是一个由空格分隔的标记字符串
chapter_3/py_3_9_postfix_expressions.py
infix_to_postfix
kfrime/algo-in-python
0
python
def infix_to_postfix(infix_expr): '\n 中缀表达式 -> 后缀表达式\n :param infix_expr: 这里的中缀表达式是一个由空格分隔的标记字符串\n ' prec = {} prec['*'] = 3 prec['/'] = 3 prec['+'] = 2 prec['-'] = 2 prec['('] = 1 tokens = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' op_stack = Stack() postfix_list = [] infix_list = infix_expr.split() for token in infix_list: if (token in tokens): postfix_list.append(token) elif (token == '('): op_stack.push(token) elif (token == ')'): top_token = op_stack.pop() while (top_token != '('): postfix_list.append(top_token) top_token = op_stack.pop() else: while ((not op_stack.is_empty()) and (prec[op_stack.peek()] >= prec[token])): postfix_list.append(op_stack.pop()) op_stack.push(token) while (not op_stack.is_empty()): postfix_list.append(op_stack.pop()) return .join(postfix_list)
def infix_to_postfix(infix_expr): '\n 中缀表达式 -> 后缀表达式\n :param infix_expr: 这里的中缀表达式是一个由空格分隔的标记字符串\n ' prec = {} prec['*'] = 3 prec['/'] = 3 prec['+'] = 2 prec['-'] = 2 prec['('] = 1 tokens = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' op_stack = Stack() postfix_list = [] infix_list = infix_expr.split() for token in infix_list: if (token in tokens): postfix_list.append(token) elif (token == '('): op_stack.push(token) elif (token == ')'): top_token = op_stack.pop() while (top_token != '('): postfix_list.append(top_token) top_token = op_stack.pop() else: while ((not op_stack.is_empty()) and (prec[op_stack.peek()] >= prec[token])): postfix_list.append(op_stack.pop()) op_stack.push(token) while (not op_stack.is_empty()): postfix_list.append(op_stack.pop()) return .join(postfix_list)<|docstring|>中缀表达式 -> 后缀表达式 :param infix_expr: 这里的中缀表达式是一个由空格分隔的标记字符串<|endoftext|>
af33f857f39b994cda6cfc1460e26eecd283dbe86ac04c831a98a4b11f7f1659
def do_math(op, op1, op2): '\n 执行数学运算\n :param op: 操作符\n :param op1: 操作数1\n :param op2: 操作数2\n ' if (op == '+'): return (op1 + op2) elif (op == '-'): return (op1 - op2) elif (op == '*'): return (op1 * op2) elif (op == '/'): return (op1 / op2)
执行数学运算 :param op: 操作符 :param op1: 操作数1 :param op2: 操作数2
chapter_3/py_3_9_postfix_expressions.py
do_math
kfrime/algo-in-python
0
python
def do_math(op, op1, op2): '\n 执行数学运算\n :param op: 操作符\n :param op1: 操作数1\n :param op2: 操作数2\n ' if (op == '+'): return (op1 + op2) elif (op == '-'): return (op1 - op2) elif (op == '*'): return (op1 * op2) elif (op == '/'): return (op1 / op2)
def do_math(op, op1, op2): '\n 执行数学运算\n :param op: 操作符\n :param op1: 操作数1\n :param op2: 操作数2\n ' if (op == '+'): return (op1 + op2) elif (op == '-'): return (op1 - op2) elif (op == '*'): return (op1 * op2) elif (op == '/'): return (op1 / op2)<|docstring|>执行数学运算 :param op: 操作符 :param op1: 操作数1 :param op2: 操作数2<|endoftext|>
e9f1478b095c8a80b40462cd4fd62dffd091ff5fb9e28ca1fe5ba4e2adf4ab4d
def postfix_eval(postfix_expr): '\n 后缀表达式求值\n\n 如果 token 是操作数,将其从字符串转换为整数,并将值压到operandStack。\n\n 如果 token 是运算符*,/,+或-,它将需要两个操作数。弹出operandStack 两次。\n 第一个弹出的是第二个操作数,第二个弹出的是第一个操作数。执行算术运算后,\n 将结果压到操作数栈中。\n\n :param postfix_expr: 这里的后缀表达式是一个由空格分隔的标记(token)字符串\n ' operand_stack = Stack() token_list = postfix_expr.split() for token in token_list: if (token in '0123456789'): operand_stack.push(int(token)) else: op2 = operand_stack.pop() op1 = operand_stack.pop() result = do_math(token, op1, op2) operand_stack.push(result) return operand_stack.pop()
后缀表达式求值 如果 token 是操作数,将其从字符串转换为整数,并将值压到operandStack。 如果 token 是运算符*,/,+或-,它将需要两个操作数。弹出operandStack 两次。 第一个弹出的是第二个操作数,第二个弹出的是第一个操作数。执行算术运算后, 将结果压到操作数栈中。 :param postfix_expr: 这里的后缀表达式是一个由空格分隔的标记(token)字符串
chapter_3/py_3_9_postfix_expressions.py
postfix_eval
kfrime/algo-in-python
0
python
def postfix_eval(postfix_expr): '\n 后缀表达式求值\n\n 如果 token 是操作数,将其从字符串转换为整数,并将值压到operandStack。\n\n 如果 token 是运算符*,/,+或-,它将需要两个操作数。弹出operandStack 两次。\n 第一个弹出的是第二个操作数,第二个弹出的是第一个操作数。执行算术运算后,\n 将结果压到操作数栈中。\n\n :param postfix_expr: 这里的后缀表达式是一个由空格分隔的标记(token)字符串\n ' operand_stack = Stack() token_list = postfix_expr.split() for token in token_list: if (token in '0123456789'): operand_stack.push(int(token)) else: op2 = operand_stack.pop() op1 = operand_stack.pop() result = do_math(token, op1, op2) operand_stack.push(result) return operand_stack.pop()
def postfix_eval(postfix_expr): '\n 后缀表达式求值\n\n 如果 token 是操作数,将其从字符串转换为整数,并将值压到operandStack。\n\n 如果 token 是运算符*,/,+或-,它将需要两个操作数。弹出operandStack 两次。\n 第一个弹出的是第二个操作数,第二个弹出的是第一个操作数。执行算术运算后,\n 将结果压到操作数栈中。\n\n :param postfix_expr: 这里的后缀表达式是一个由空格分隔的标记(token)字符串\n ' operand_stack = Stack() token_list = postfix_expr.split() for token in token_list: if (token in '0123456789'): operand_stack.push(int(token)) else: op2 = operand_stack.pop() op1 = operand_stack.pop() result = do_math(token, op1, op2) operand_stack.push(result) return operand_stack.pop()<|docstring|>后缀表达式求值 如果 token 是操作数,将其从字符串转换为整数,并将值压到operandStack。 如果 token 是运算符*,/,+或-,它将需要两个操作数。弹出operandStack 两次。 第一个弹出的是第二个操作数,第二个弹出的是第一个操作数。执行算术运算后, 将结果压到操作数栈中。 :param postfix_expr: 这里的后缀表达式是一个由空格分隔的标记(token)字符串<|endoftext|>
2d02ef1e499ab61e92b078dcead5c0d1fdb4eb576d734ea89375ecd1bf4b3847
def get_regnet(channels_init, channels_slope, channels_mult, depth, groups, use_se=False, model_name=None, pretrained=False, root=os.path.join('~', '.torch', 'models'), **kwargs): "\n Create RegNet model with specific parameters.\n\n Parameters:\n ----------\n channels_init : float\n Initial value for channels/widths.\n channels_slope : float\n Slope value for channels/widths.\n width_mult : float\n Width multiplier value.\n groups : int\n Number of groups.\n depth : int\n Depth value.\n use_se : bool, default False\n Whether to use SE-module.\n model_name : str or None, default None\n Model name for loading pretrained model.\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " divisor = 8 assert ((channels_slope >= 0) and (channels_init > 0) and (channels_mult > 1) and ((channels_init % divisor) == 0)) channels_cont = ((np.arange(depth) * channels_slope) + channels_init) channels_exps = np.round((np.log((channels_cont / channels_init)) / np.log(channels_mult))) channels = (channels_init * np.power(channels_mult, channels_exps)) channels = (np.round((channels / divisor)) * divisor).astype(np.int) (channels_per_stage, layers) = np.unique(channels, return_counts=True) groups_per_stage = [min(groups, c) for c in channels_per_stage] channels_per_stage = [int((round((c / g)) * g)) for (c, g) in zip(channels_per_stage, groups_per_stage)] channels = [([ci] * li) for (ci, li) in zip(channels_per_stage, layers)] init_block_channels = 32 net = RegNet(channels=channels, init_block_channels=init_block_channels, groups=groups_per_stage, use_se=use_se, **kwargs) if pretrained: if ((model_name is None) or (not model_name)): raise ValueError('Parameter `model_name` should be properly initialized for loading pretrained model.') from .model_store import download_model download_model(net=net, model_name=model_name, local_model_store_dir_path=root) return net
Create RegNet model with specific parameters. Parameters: ---------- channels_init : float Initial value for channels/widths. channels_slope : float Slope value for channels/widths. width_mult : float Width multiplier value. groups : int Number of groups. depth : int Depth value. use_se : bool, default False Whether to use SE-module. model_name : str or None, default None Model name for loading pretrained model. pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
get_regnet
JacobARose/imgclsmob
2,649
python
def get_regnet(channels_init, channels_slope, channels_mult, depth, groups, use_se=False, model_name=None, pretrained=False, root=os.path.join('~', '.torch', 'models'), **kwargs): "\n Create RegNet model with specific parameters.\n\n Parameters:\n ----------\n channels_init : float\n Initial value for channels/widths.\n channels_slope : float\n Slope value for channels/widths.\n width_mult : float\n Width multiplier value.\n groups : int\n Number of groups.\n depth : int\n Depth value.\n use_se : bool, default False\n Whether to use SE-module.\n model_name : str or None, default None\n Model name for loading pretrained model.\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " divisor = 8 assert ((channels_slope >= 0) and (channels_init > 0) and (channels_mult > 1) and ((channels_init % divisor) == 0)) channels_cont = ((np.arange(depth) * channels_slope) + channels_init) channels_exps = np.round((np.log((channels_cont / channels_init)) / np.log(channels_mult))) channels = (channels_init * np.power(channels_mult, channels_exps)) channels = (np.round((channels / divisor)) * divisor).astype(np.int) (channels_per_stage, layers) = np.unique(channels, return_counts=True) groups_per_stage = [min(groups, c) for c in channels_per_stage] channels_per_stage = [int((round((c / g)) * g)) for (c, g) in zip(channels_per_stage, groups_per_stage)] channels = [([ci] * li) for (ci, li) in zip(channels_per_stage, layers)] init_block_channels = 32 net = RegNet(channels=channels, init_block_channels=init_block_channels, groups=groups_per_stage, use_se=use_se, **kwargs) if pretrained: if ((model_name is None) or (not model_name)): raise ValueError('Parameter `model_name` should be properly initialized for loading pretrained model.') from .model_store import download_model download_model(net=net, model_name=model_name, local_model_store_dir_path=root) return net
def get_regnet(channels_init, channels_slope, channels_mult, depth, groups, use_se=False, model_name=None, pretrained=False, root=os.path.join('~', '.torch', 'models'), **kwargs): "\n Create RegNet model with specific parameters.\n\n Parameters:\n ----------\n channels_init : float\n Initial value for channels/widths.\n channels_slope : float\n Slope value for channels/widths.\n width_mult : float\n Width multiplier value.\n groups : int\n Number of groups.\n depth : int\n Depth value.\n use_se : bool, default False\n Whether to use SE-module.\n model_name : str or None, default None\n Model name for loading pretrained model.\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " divisor = 8 assert ((channels_slope >= 0) and (channels_init > 0) and (channels_mult > 1) and ((channels_init % divisor) == 0)) channels_cont = ((np.arange(depth) * channels_slope) + channels_init) channels_exps = np.round((np.log((channels_cont / channels_init)) / np.log(channels_mult))) channels = (channels_init * np.power(channels_mult, channels_exps)) channels = (np.round((channels / divisor)) * divisor).astype(np.int) (channels_per_stage, layers) = np.unique(channels, return_counts=True) groups_per_stage = [min(groups, c) for c in channels_per_stage] channels_per_stage = [int((round((c / g)) * g)) for (c, g) in zip(channels_per_stage, groups_per_stage)] channels = [([ci] * li) for (ci, li) in zip(channels_per_stage, layers)] init_block_channels = 32 net = RegNet(channels=channels, init_block_channels=init_block_channels, groups=groups_per_stage, use_se=use_se, **kwargs) if pretrained: if ((model_name is None) or (not model_name)): raise ValueError('Parameter `model_name` should be properly initialized for loading pretrained model.') from .model_store import download_model download_model(net=net, model_name=model_name, local_model_store_dir_path=root) return net<|docstring|>Create RegNet model with specific parameters. Parameters: ---------- channels_init : float Initial value for channels/widths. channels_slope : float Slope value for channels/widths. width_mult : float Width multiplier value. groups : int Number of groups. depth : int Depth value. use_se : bool, default False Whether to use SE-module. model_name : str or None, default None Model name for loading pretrained model. pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
7207c26b47d35375182b60f1c4e798376c12fe0380095df05373b3857d6eb546
def regnetx002(**kwargs): "\n RegNetX-200MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=24, channels_slope=36.44, channels_mult=2.49, depth=13, groups=8, model_name='regnetx002', **kwargs)
RegNetX-200MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx002
JacobARose/imgclsmob
2,649
python
def regnetx002(**kwargs): "\n RegNetX-200MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=24, channels_slope=36.44, channels_mult=2.49, depth=13, groups=8, model_name='regnetx002', **kwargs)
def regnetx002(**kwargs): "\n RegNetX-200MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=24, channels_slope=36.44, channels_mult=2.49, depth=13, groups=8, model_name='regnetx002', **kwargs)<|docstring|>RegNetX-200MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
6b631ac0da3798e38dd79e44d9bf53d3d2fda3f22e6fc041d44562bdd3c26cf3
def regnetx004(**kwargs): "\n RegNetX-400MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=24, channels_slope=24.48, channels_mult=2.54, depth=22, groups=16, model_name='regnetx004', **kwargs)
RegNetX-400MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx004
JacobARose/imgclsmob
2,649
python
def regnetx004(**kwargs): "\n RegNetX-400MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=24, channels_slope=24.48, channels_mult=2.54, depth=22, groups=16, model_name='regnetx004', **kwargs)
def regnetx004(**kwargs): "\n RegNetX-400MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=24, channels_slope=24.48, channels_mult=2.54, depth=22, groups=16, model_name='regnetx004', **kwargs)<|docstring|>RegNetX-400MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
3ca93c2816f268d505433fcc4e406736cb37c1075b0c014dfb0488629cf9e1cf
def regnetx006(**kwargs): "\n RegNetX-600MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=48, channels_slope=36.97, channels_mult=2.24, depth=16, groups=24, model_name='regnetx006', **kwargs)
RegNetX-600MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx006
JacobARose/imgclsmob
2,649
python
def regnetx006(**kwargs): "\n RegNetX-600MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=48, channels_slope=36.97, channels_mult=2.24, depth=16, groups=24, model_name='regnetx006', **kwargs)
def regnetx006(**kwargs): "\n RegNetX-600MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=48, channels_slope=36.97, channels_mult=2.24, depth=16, groups=24, model_name='regnetx006', **kwargs)<|docstring|>RegNetX-600MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
8623831404d6f757658d5f5e8ed650b081d7aaea1615ebbd13eba15956808f2f
def regnetx008(**kwargs): "\n RegNetX-800MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=56, channels_slope=35.73, channels_mult=2.28, depth=16, groups=16, model_name='regnetx008', **kwargs)
RegNetX-800MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx008
JacobARose/imgclsmob
2,649
python
def regnetx008(**kwargs): "\n RegNetX-800MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=56, channels_slope=35.73, channels_mult=2.28, depth=16, groups=16, model_name='regnetx008', **kwargs)
def regnetx008(**kwargs): "\n RegNetX-800MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=56, channels_slope=35.73, channels_mult=2.28, depth=16, groups=16, model_name='regnetx008', **kwargs)<|docstring|>RegNetX-800MF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
688d2de6dddfda89a4be29da91bd9168f4d743a3708f3ace803af63e91563a4b
def regnetx016(**kwargs): "\n RegNetX-1.6GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=80, channels_slope=34.01, channels_mult=2.25, depth=18, groups=24, model_name='regnetx016', **kwargs)
RegNetX-1.6GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx016
JacobARose/imgclsmob
2,649
python
def regnetx016(**kwargs): "\n RegNetX-1.6GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=80, channels_slope=34.01, channels_mult=2.25, depth=18, groups=24, model_name='regnetx016', **kwargs)
def regnetx016(**kwargs): "\n RegNetX-1.6GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=80, channels_slope=34.01, channels_mult=2.25, depth=18, groups=24, model_name='regnetx016', **kwargs)<|docstring|>RegNetX-1.6GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
3f59319a5e1eeee45b18e4f72e295fb8d71e17b98e17551f5f34cfcbfad4bbf9
def regnetx032(**kwargs): "\n RegNetX-3.2GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=88, channels_slope=26.31, channels_mult=2.25, depth=25, groups=48, model_name='regnetx032', **kwargs)
RegNetX-3.2GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx032
JacobARose/imgclsmob
2,649
python
def regnetx032(**kwargs): "\n RegNetX-3.2GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=88, channels_slope=26.31, channels_mult=2.25, depth=25, groups=48, model_name='regnetx032', **kwargs)
def regnetx032(**kwargs): "\n RegNetX-3.2GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=88, channels_slope=26.31, channels_mult=2.25, depth=25, groups=48, model_name='regnetx032', **kwargs)<|docstring|>RegNetX-3.2GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
26c4105e9b585b7103ebbea8b2c49df29417a9f91f477749a84a71a79c9ffebe
def regnetx040(**kwargs): "\n RegNetX-4.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=96, channels_slope=38.65, channels_mult=2.43, depth=23, groups=40, model_name='regnetx040', **kwargs)
RegNetX-4.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx040
JacobARose/imgclsmob
2,649
python
def regnetx040(**kwargs): "\n RegNetX-4.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=96, channels_slope=38.65, channels_mult=2.43, depth=23, groups=40, model_name='regnetx040', **kwargs)
def regnetx040(**kwargs): "\n RegNetX-4.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=96, channels_slope=38.65, channels_mult=2.43, depth=23, groups=40, model_name='regnetx040', **kwargs)<|docstring|>RegNetX-4.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
5bcd82bd068f0e2913a6a6cf42a44fac009bb8273c89b033a93221aecac576c1
def regnetx064(**kwargs): "\n RegNetX-6.4GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=184, channels_slope=60.83, channels_mult=2.07, depth=17, groups=56, model_name='regnetx064', **kwargs)
RegNetX-6.4GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx064
JacobARose/imgclsmob
2,649
python
def regnetx064(**kwargs): "\n RegNetX-6.4GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=184, channels_slope=60.83, channels_mult=2.07, depth=17, groups=56, model_name='regnetx064', **kwargs)
def regnetx064(**kwargs): "\n RegNetX-6.4GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=184, channels_slope=60.83, channels_mult=2.07, depth=17, groups=56, model_name='regnetx064', **kwargs)<|docstring|>RegNetX-6.4GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
42e229899fb67e2d8567717bc8c538ee6e760c9891c9ef217d5fc57de46ec807
def regnetx080(**kwargs): "\n RegNetX-8.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=80, channels_slope=49.56, channels_mult=2.88, depth=23, groups=120, model_name='regnetx080', **kwargs)
RegNetX-8.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx080
JacobARose/imgclsmob
2,649
python
def regnetx080(**kwargs): "\n RegNetX-8.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=80, channels_slope=49.56, channels_mult=2.88, depth=23, groups=120, model_name='regnetx080', **kwargs)
def regnetx080(**kwargs): "\n RegNetX-8.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=80, channels_slope=49.56, channels_mult=2.88, depth=23, groups=120, model_name='regnetx080', **kwargs)<|docstring|>RegNetX-8.0GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
3b7e26d0c0acf7fb26fd638abff430674530e8fcc35dfb70cdc5883ff50ab38e
def regnetx120(**kwargs): "\n RegNetX-12GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=168, channels_slope=73.36, channels_mult=2.37, depth=19, groups=112, model_name='regnetx120', **kwargs)
RegNetX-12GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx120
JacobARose/imgclsmob
2,649
python
def regnetx120(**kwargs): "\n RegNetX-12GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=168, channels_slope=73.36, channels_mult=2.37, depth=19, groups=112, model_name='regnetx120', **kwargs)
def regnetx120(**kwargs): "\n RegNetX-12GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=168, channels_slope=73.36, channels_mult=2.37, depth=19, groups=112, model_name='regnetx120', **kwargs)<|docstring|>RegNetX-12GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
1709961543514d99ec238ec35d5579ae72e684989fe9ef55e8bd8c89894b919d
def regnetx160(**kwargs): "\n RegNetX-16GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=216, channels_slope=55.59, channels_mult=2.1, depth=22, groups=128, model_name='regnetx160', **kwargs)
RegNetX-16GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx160
JacobARose/imgclsmob
2,649
python
def regnetx160(**kwargs): "\n RegNetX-16GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=216, channels_slope=55.59, channels_mult=2.1, depth=22, groups=128, model_name='regnetx160', **kwargs)
def regnetx160(**kwargs): "\n RegNetX-16GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=216, channels_slope=55.59, channels_mult=2.1, depth=22, groups=128, model_name='regnetx160', **kwargs)<|docstring|>RegNetX-16GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>
2f43aaa30f0215fea1e9dd59e4c4e39558f4c5cd821acac812adc1e84b70519a
def regnetx320(**kwargs): "\n RegNetX-32GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=320, channels_slope=69.86, channels_mult=2.0, depth=23, groups=168, model_name='regnetx320', **kwargs)
RegNetX-32GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.
pytorch/pytorchcv/models/regnet.py
regnetx320
JacobARose/imgclsmob
2,649
python
def regnetx320(**kwargs): "\n RegNetX-32GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=320, channels_slope=69.86, channels_mult=2.0, depth=23, groups=168, model_name='regnetx320', **kwargs)
def regnetx320(**kwargs): "\n RegNetX-32GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678.\n\n Parameters:\n ----------\n pretrained : bool, default False\n Whether to load the pretrained weights for model.\n root : str, default '~/.torch/models'\n Location for keeping the model parameters.\n " return get_regnet(channels_init=320, channels_slope=69.86, channels_mult=2.0, depth=23, groups=168, model_name='regnetx320', **kwargs)<|docstring|>RegNetX-32GF model from 'Designing Network Design Spaces,' https://arxiv.org/abs/2003.13678. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.torch/models' Location for keeping the model parameters.<|endoftext|>