code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Command(BaseCommand): <NEW_LINE> <INDENT> help = ( "Install the Hubspot Ecommerce Bridge if it is not already installed and configure the settings based on " "the given file. Make sure a HUBSPOT_API_KEY is set in settings and HUBSPOT_ECOMMERCE_SETTINGS are " "configured in ecommerce/management/commands/configure_hubspot_bridge.py" ) <NEW_LINE> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( "--uninstall", action="store_true", help="Uninstall the Ecommerce Bridge" ) <NEW_LINE> parser.add_argument( "--status", action="store_true", help="Get the current status of the Ecommerce Bridge installation", ) <NEW_LINE> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> print( "Checking Hubspot Ecommerce Bridge installation for given Hubspot API Key..." ) <NEW_LINE> installation_status = json.loads(get_hubspot_installation_status().text) <NEW_LINE> print(installation_status) <NEW_LINE> if options["status"]: <NEW_LINE> <INDENT> print(f"Install completed: {installation_status['installCompleted']}") <NEW_LINE> print( f"Ecommerce Settings enabled: {installation_status['ecommSettingsEnabled']}" ) <NEW_LINE> <DEDENT> elif options["uninstall"]: <NEW_LINE> <INDENT> if installation_status["installCompleted"]: <NEW_LINE> <INDENT> print("Uninstalling Ecommerce Bridge...") <NEW_LINE> uninstall_hubspot_ecommerce_bridge() <NEW_LINE> print("Uninstalling cutsom groups and properties...") <NEW_LINE> uninstall_custom_properties() <NEW_LINE> print("Uninstall successful") <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Ecommerce Bridge is not installed") <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("Configuring settings...") <NEW_LINE> configure_hubspot_settings() <NEW_LINE> print("Configuring custom groups and properties...") <NEW_LINE> install_custom_properties() <NEW_LINE> print("Settings and custom properties configured")
Command to configure the Hubspot ecommerce bridge which will handle syncing Hubspot Products, Deals, Line Items, and Contacts with the MITxPro Products, Orders, and Users
62599058379a373c97d9a5bd
class _ArgTemplateBuilder(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._arg_accumulator = [] <NEW_LINE> self._argspec = [] <NEW_LINE> self._finalized = False <NEW_LINE> <DEDENT> def _consume_args(self): <NEW_LINE> <INDENT> if self._arg_accumulator: <NEW_LINE> <INDENT> self._argspec.append( gast.Tuple(elts=self._arg_accumulator, ctx=gast.Load())) <NEW_LINE> self._arg_accumulator = [] <NEW_LINE> <DEDENT> <DEDENT> def add_arg(self, a): <NEW_LINE> <INDENT> self._arg_accumulator.append(a) <NEW_LINE> <DEDENT> def add_stararg(self, a): <NEW_LINE> <INDENT> self._consume_args() <NEW_LINE> self._argspec.append( gast.Call( gast.Name( 'tuple', ctx=gast.Load(), annotation=None, type_comment=None), args=[a], keywords=())) <NEW_LINE> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> self._consume_args() <NEW_LINE> self._finalized = True <NEW_LINE> <DEDENT> def to_ast(self): <NEW_LINE> <INDENT> assert self._finalized <NEW_LINE> if self._argspec: <NEW_LINE> <INDENT> result = self._argspec[0] <NEW_LINE> for i in range(1, len(self._argspec)): <NEW_LINE> <INDENT> result = gast.BinOp(result, gast.Add(), self._argspec[i]) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> return gast.Tuple([], gast.Load())
Constructs a tuple representing the positional arguments in a call. Example (yes, it's legal Python 3): f(*args1, b, *args2, c, d) -> args1 + (b,) + args2 + (c, d)
6259905829b78933be26ab91
class UnknownError(TypeError): <NEW_LINE> <INDENT> pass
Raised whenever :class:`Unknown` is used in a boolean operation. EXAMPLES:: sage: not Unknown Traceback (most recent call last): ... UnknownError: Unknown does not evaluate in boolean context
625990582c8b7c6e89bd4d87
class OPCD_D_spr_XO_1(MachineInstruction): <NEW_LINE> <INDENT> signature = (D, spr) <NEW_LINE> def _render(params, operands): <NEW_LINE> <INDENT> return OPCD.render(params['OPCD']) | D.render(operands['D']) | spr.render(operands['spr']) | XO_1.render(params['XO']) <NEW_LINE> <DEDENT> render = staticmethod(_render)
Instructions: (1) mfspr
62599058d6c5a102081e36b9
class TestAsset(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.name = 'test' <NEW_LINE> self.var_name = 'test_var' <NEW_LINE> self.component = Mock(length=1) <NEW_LINE> self.solution = Mock() <NEW_LINE> self.motion_func = Mock() <NEW_LINE> <DEDENT> def test_motion(self): <NEW_LINE> <INDENT> func = lambda x, y: x + y <NEW_LINE> asset = Asset(self.name, 1, self.component, self.solution, func) <NEW_LINE> self.assertEqual(asset.motion, 2)
Unit test for class Asset.
625990589c8ee82313040c57
class ServiceAssociationLink(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_resource_type': {'key': 'properties.linkedResourceType', 'type': 'str'}, 'link': {'key': 'properties.link', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'allow_delete': {'key': 'properties.allowDelete', 'type': 'bool'}, 'locations': {'key': 'properties.locations', 'type': '[str]'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, linked_resource_type: Optional[str] = None, link: Optional[str] = None, allow_delete: Optional[bool] = None, locations: Optional[List[str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ServiceAssociationLink, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.etag = None <NEW_LINE> self.type = None <NEW_LINE> self.linked_resource_type = linked_resource_type <NEW_LINE> self.link = link <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.allow_delete = allow_delete <NEW_LINE> self.locations = locations
ServiceAssociationLink resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param name: Name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Resource type. :vartype type: str :param linked_resource_type: Resource type of the linked resource. :type linked_resource_type: str :param link: Link to the external resource. :type link: str :ivar provisioning_state: The provisioning state of the service association link resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2020_07_01.models.ProvisioningState :param allow_delete: If true, the resource can be deleted. :type allow_delete: bool :param locations: A list of locations. :type locations: list[str]
6259905807d97122c4218244
class Form(view.View): <NEW_LINE> <INDENT> fields = Fieldset() <NEW_LINE> buttons = None <NEW_LINE> label = None <NEW_LINE> description = '' <NEW_LINE> prefix = 'form.' <NEW_LINE> actions = None <NEW_LINE> widgets = None <NEW_LINE> content = None <NEW_LINE> mode = FORM_INPUT <NEW_LINE> method = 'post' <NEW_LINE> enctype = 'multipart/form-data' <NEW_LINE> accept = None <NEW_LINE> acceptCharset = None <NEW_LINE> csrf = False <NEW_LINE> csrfname = 'csrf-token' <NEW_LINE> params = MultiDict({}) <NEW_LINE> def __init__(self, context, request): <NEW_LINE> <INDENT> super(Form, self).__init__(context, request) <NEW_LINE> if self.buttons is None: <NEW_LINE> <INDENT> self.buttons = Buttons() <NEW_LINE> <DEDENT> <DEDENT> @reify <NEW_LINE> def action(self): <NEW_LINE> <INDENT> return self.request.url <NEW_LINE> <DEDENT> @reify <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.prefix.strip('.') <NEW_LINE> <DEDENT> @reify <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self.name.replace('.', '-') <NEW_LINE> <DEDENT> def form_content(self): <NEW_LINE> <INDENT> return self.content <NEW_LINE> <DEDENT> def form_params(self): <NEW_LINE> <INDENT> if self.method == 'post': <NEW_LINE> <INDENT> return self.request.POST <NEW_LINE> <DEDENT> elif self.method == 'get': <NEW_LINE> <INDENT> return self.request.GET <NEW_LINE> <DEDENT> elif self.method == 'params': <NEW_LINE> <INDENT> return self.params <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.params <NEW_LINE> <DEDENT> <DEDENT> def update_widgets(self): <NEW_LINE> <INDENT> self.widgets = FormWidgets(self.fields, self, self.request) <NEW_LINE> self.widgets.mode = self.mode <NEW_LINE> self.widgets.update() <NEW_LINE> <DEDENT> def update_actions(self): <NEW_LINE> <INDENT> self.actions = Actions(self, self.request) <NEW_LINE> self.actions.update() <NEW_LINE> <DEDENT> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> if CSRF is not None: <NEW_LINE> <INDENT> return CSRF.generate(self.tokenData) <NEW_LINE> <DEDENT> <DEDENT> @reify <NEW_LINE> def tokenData(self): <NEW_LINE> <INDENT> return '%s.%s:%s' % (self.__module__, self.__class__.__name__, security.authenticated_userid(self.request)) <NEW_LINE> <DEDENT> def validate(self, data, errors): <NEW_LINE> <INDENT> self.validate_csrf_token() <NEW_LINE> <DEDENT> def validate_csrf_token(self): <NEW_LINE> <INDENT> if self.csrf: <NEW_LINE> <INDENT> token = self.form_params().get(self.csrfname, None) <NEW_LINE> if token is not None: <NEW_LINE> <INDENT> if CSRF is not None: <NEW_LINE> <INDENT> if CSRF.get(token) == self.tokenData: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise HTTPForbidden("Form authenticator is not found.") <NEW_LINE> <DEDENT> <DEDENT> def extract(self): <NEW_LINE> <INDENT> return self.widgets.extract() <NEW_LINE> <DEDENT> def update(self, **data): <NEW_LINE> <INDENT> if not self.content and data: <NEW_LINE> <INDENT> self.content = data <NEW_LINE> <DEDENT> self.update_widgets() <NEW_LINE> self.update_actions() <NEW_LINE> return self.actions.execute() <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> if self.template is None: <NEW_LINE> <INDENT> return self.snippet(FORM_VIEW, self) <NEW_LINE> <DEDENT> kwargs = {'view': self, 'context': self.context, 'request': self.request} <NEW_LINE> return self.template(**kwargs)
A form
625990587d847024c075d975
class TradeMonitor(BasicMonitor): <NEW_LINE> <INDENT> def __init__(self, mainEngine, eventEngine, parent=None): <NEW_LINE> <INDENT> super(TradeMonitor, self).__init__(mainEngine, eventEngine, parent) <NEW_LINE> d = OrderedDict() <NEW_LINE> d['gatewayName'] = {'chinese':vtText.GATEWAY, 'cellType':BasicCell} <NEW_LINE> d['tradeID'] = {'chinese':vtText.TRADE_ID, 'cellType':NumCell} <NEW_LINE> d['orderID'] = {'chinese':vtText.ORDER_ID, 'cellType':NumCell} <NEW_LINE> d['symbol'] = {'chinese':vtText.CONTRACT_SYMBOL, 'cellType':BasicCell} <NEW_LINE> d['direction'] = {'chinese':vtText.DIRECTION, 'cellType':DirectionCell} <NEW_LINE> d['offset'] = {'chinese':vtText.OFFSET, 'cellType':BasicCell} <NEW_LINE> d['price'] = {'chinese':vtText.PRICE, 'cellType':NumCell} <NEW_LINE> d['volume'] = {'chinese':vtText.VOLUME, 'cellType':NumCell} <NEW_LINE> d['tradeTime'] = {'chinese':vtText.TRADE_TIME, 'cellType':BasicCell} <NEW_LINE> self.setHeaderDict(d) <NEW_LINE> self.setEventType(EVENT_TRADE) <NEW_LINE> self.setFont(BASIC_FONT) <NEW_LINE> self.setSorting(True) <NEW_LINE> self.setResizeMode(QtWidgets.QHeaderView.Stretch) <NEW_LINE> self.initTable() <NEW_LINE> self.registerEvent()
成交监控
62599058d99f1b3c44d06c3a
class ApplicationGatewaySslCertificate(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'type': {'readonly': True}, 'public_cert_data': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'data': {'key': 'properties.data', 'type': 'str'}, 'password': {'key': 'properties.password', 'type': 'str'}, 'public_cert_data': {'key': 'properties.publicCertData', 'type': 'str'}, 'key_vault_secret_id': {'key': 'properties.keyVaultSecretId', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, data: Optional[str] = None, password: Optional[str] = None, key_vault_secret_id: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationGatewaySslCertificate, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.etag = None <NEW_LINE> self.type = None <NEW_LINE> self.data = data <NEW_LINE> self.password = password <NEW_LINE> self.public_cert_data = None <NEW_LINE> self.key_vault_secret_id = key_vault_secret_id <NEW_LINE> self.provisioning_state = None
SSL certificates of an application gateway. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param name: Name of the SSL certificate that is unique within an Application Gateway. :type name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :param data: Base-64 encoded pfx certificate. Only applicable in PUT Request. :type data: str :param password: Password for the pfx file specified in data. Only applicable in PUT request. :type password: str :ivar public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in data. Only applicable in GET request. :vartype public_cert_data: str :param key_vault_secret_id: Secret Id of (base-64 encoded unencrypted pfx) 'Secret' or 'Certificate' object stored in KeyVault. :type key_vault_secret_id: str :ivar provisioning_state: The provisioning state of the SSL certificate resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2020_05_01.models.ProvisioningState
6259905882261d6c52730997
class ReverseProxied: <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _extract_prefix(environ: dict) -> str: <NEW_LINE> <INDENT> path = environ.get("HTTP_X_SCRIPT_NAME", "") <NEW_LINE> if not path: <NEW_LINE> <INDENT> path = environ.get("HTTP_X_FORWARDED_PREFIX", "") <NEW_LINE> <DEDENT> if path and not path.startswith("/"): <NEW_LINE> <INDENT> path = "/" + path <NEW_LINE> <DEDENT> return path <NEW_LINE> <DEDENT> def __call__(self, environ, start_response): <NEW_LINE> <INDENT> script_name = self._extract_prefix(environ) <NEW_LINE> if script_name: <NEW_LINE> <INDENT> environ["SCRIPT_NAME"] = script_name <NEW_LINE> path_info = environ["PATH_INFO"] <NEW_LINE> if path_info.startswith(script_name): <NEW_LINE> <INDENT> environ["PATH_INFO"] = path_info[len(script_name) :] <NEW_LINE> <DEDENT> <DEDENT> scheme = environ.get("HTTP_X_SCHEME", "") <NEW_LINE> if scheme: <NEW_LINE> <INDENT> environ["wsgi.url_scheme"] = scheme <NEW_LINE> <DEDENT> return self.app(environ, start_response)
Create a Proxy pattern https://microservices.io/patterns/apigateway.html. You can run the microservice A in your local machine in http://localhost:5000/my-endpoint/ If you deploy your microservice, in some cases this microservice run behind a cluster, a gateway... and this gateway redirect traffic to the microservice with a specific path like yourdomian.com/my-ms-a/my-endpoint/. This class understand this path if the gateway send a specific header
6259905899cbb53fe6832478
class PuchaseReportDetails(object): <NEW_LINE> <INDENT> def __init__(self, vendorName, vendorAddress, vendorGstin, vendorStateCode, billNo, billDate, dueDate, payBy, total, tax, amountPaid, remarks, status, cancel): <NEW_LINE> <INDENT> self.vendorName = _constants.valueWrapper(vendorName, False) <NEW_LINE> self.vendorAddress = _constants.valueWrapper(vendorAddress, False) <NEW_LINE> self.vendorGstin = _constants.valueWrapper(vendorGstin, False) <NEW_LINE> self.vendorStateCode = _constants.valueWrapper(vendorStateCode, False) <NEW_LINE> self.billNo = _constants.valueWrapper(billNo, False) <NEW_LINE> self.billDate = _constants.valueWrapper(billDate, False) <NEW_LINE> self.dueDate = _constants.valueWrapper(dueDate, False) <NEW_LINE> self.total = _constants.valueWrapper(total, False) <NEW_LINE> self.paidBy = _constants.valueWrapper(payBy, False) <NEW_LINE> self.tax = _constants.valueWrapper(tax, False) <NEW_LINE> self.amount = _constants.valueWrapper(float(total) - float(tax), False) <NEW_LINE> self.amountPaid = _constants.valueWrapper(amountPaid, False) <NEW_LINE> self.balance = _constants.valueWrapper(float(total) - float(amountPaid), False) <NEW_LINE> self.remarks = _constants.valueWrapper(remarks, False) <NEW_LINE> self.status = _constants.valueWrapper(status, False) <NEW_LINE> self.cancelReason = _constants.valueWrapper(cancel, False)
Wrapper class for adding purchase information
6259905845492302aabfda71
class PlayersViewSet(FiltersMixin, viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = PlayerSerializer <NEW_LINE> pagination_class = ResultSetPagination <NEW_LINE> filter_backends = (filters.OrderingFilter,) <NEW_LINE> ordering_fields = ('id', 'name', 'update_ts') <NEW_LINE> ordering = ('id',) <NEW_LINE> filter_mappings = { 'id': 'id', 'name': 'name__icontains', 'team_id': 'teams', 'install_ts': 'install_ts', 'update_ts': 'update_ts', 'update_ts__gte': 'update_ts__gte', 'update_ts__lte': 'update_ts__lte', } <NEW_LINE> filter_validation_schema = players_query_schema <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> query_params = self.request.query_params <NEW_LINE> url_params = self.kwargs <NEW_LINE> queryset_filters = self.get_db_filters(url_params, query_params) <NEW_LINE> db_filters = queryset_filters['db_filters'] <NEW_LINE> db_excludes = queryset_filters['db_excludes'] <NEW_LINE> queryset = Player.objects.prefetch_related( 'teams' ).all() <NEW_LINE> return queryset.filter(**db_filters).exclude(**db_excludes)
This viewset automatically provides `list`, `create`, `retrieve`, `update` and `destroy` actions.
625990584428ac0f6e659ad5
class NsemPsaVariableSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = NsemPsaVariable <NEW_LINE> fields = '__all__'
Named Storm Event Model PSA Variable Serializer
62599058cc0a2c111447c586
class Passage2Number(): <NEW_LINE> <INDENT> pass
To be used as mapping function in COMPARATIVE
6259905807d97122c4218245
class EmailHandlerTests(SimpleTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> prep_db() <NEW_LINE> self.factory = RequestFactory() <NEW_LINE> self.user = CRITsUser.objects(username=TUSER_NAME).first() <NEW_LINE> self.user.sources.append(TSRC) <NEW_LINE> self.user.save() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> clean_db() <NEW_LINE> <DEDENT> def testEmailRawAdd(self): <NEW_LINE> <INDENT> result = handlers.handle_pasted_eml(EML_DATA, TSRC, None, self.user, "Test") <NEW_LINE> self.assertEqual(result['status'], True) <NEW_LINE> self.assertEqual(result['data']['x_mailer'],"YahooMailWebService/0.8.121.416") <NEW_LINE> newdata = "" <NEW_LINE> for line in EML_DATA.split('\n'): <NEW_LINE> <INDENT> newdata += line.lstrip() + "\n" <NEW_LINE> <DEDENT> result = handlers.handle_pasted_eml(newdata, TSRC, None, self.user, "Test") <NEW_LINE> self.assertEqual(result['status'], True) <NEW_LINE> self.assertEqual(result['data']['x_mailer'],"YahooMailWebService/0.8.121.416") <NEW_LINE> <DEDENT> def testEmailAdd(self): <NEW_LINE> <INDENT> result = handlers.handle_eml(EML_DATA, TSRC, None, self.user, "Test") <NEW_LINE> self.assertEqual(result['status'], True) <NEW_LINE> self.assertEqual(result['data']['x_mailer'],"YahooMailWebService/0.8.121.416")
Email test class.
62599058009cb60464d02acf
class Characteristic(BaseMFDfromSlip): <NEW_LINE> <INDENT> def setUp(self, mfd_conf): <NEW_LINE> <INDENT> self.mfd_model = 'Characteristic' <NEW_LINE> self.mfd_weight = mfd_conf['Model_Weight'] <NEW_LINE> self.bin_width = mfd_conf['MFD_spacing'] <NEW_LINE> self.mmin = None <NEW_LINE> self.mmax = None <NEW_LINE> self.mmax_sigma = None <NEW_LINE> self.lower_bound = mfd_conf['Lower_Bound'] <NEW_LINE> self.upper_bound = mfd_conf['Upper_Bound'] <NEW_LINE> self.sigma = mfd_conf['Sigma'] <NEW_LINE> self.occurrence_rate = None <NEW_LINE> <DEDENT> def get_mmax(self, mfd_conf, msr, rake, area): <NEW_LINE> <INDENT> if mfd_conf['Maximum_Magnitude']: <NEW_LINE> <INDENT> self.mmax = mfd_conf['Maximum_Magnitude'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mmax = msr.get_median_mag(area, rake) <NEW_LINE> <DEDENT> self.mmax_sigma = (mfd_conf.get('Maximum_Magnitude_Uncertainty', None) or msr.get_std_dev_mag(None, rake)) <NEW_LINE> <DEDENT> def get_mfd(self, slip, area, shear_modulus=30.0): <NEW_LINE> <INDENT> moment_rate = (shear_modulus * 1.E9) * (area * 1.E6) * (slip / 1000.) <NEW_LINE> moment_mag = _scale_moment(self.mmax, in_nm=True) <NEW_LINE> characteristic_rate = moment_rate / moment_mag <NEW_LINE> if self.sigma and (fabs(self.sigma) > 1E-5): <NEW_LINE> <INDENT> self.mmin = self.mmax + (self.lower_bound * self.sigma) <NEW_LINE> mag_upper = self.mmax + (self.upper_bound * self.sigma) <NEW_LINE> mag_range = np.arange(self.mmin, mag_upper + self.bin_width, self.bin_width) <NEW_LINE> self.occurrence_rate = characteristic_rate * ( truncnorm.cdf(mag_range + (self.bin_width / 2.), self.lower_bound, self.upper_bound, loc=self.mmax, scale=self.sigma) - truncnorm.cdf(mag_range - (self.bin_width / 2.), self.lower_bound, self.upper_bound, loc=self.mmax, scale=self.sigma)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mmin = self.mmax <NEW_LINE> self.occurrence_rate = np.array([characteristic_rate], dtype=float) <NEW_LINE> <DEDENT> return self.mmin, self.bin_width, self.occurrence_rate
Class to implement the characteristic earthquake model assuming a truncated Gaussian distribution :param str mfd_model: Type of magnitude frequency distribution :param float mfd_weight: Weight of the mfd distribution (for subsequent logic tree processing) :param float bin_width: Width of the magnitude bin (rates are given for the centre point) :param float mmin: Minimum magnitude :param float mmax: Maximum magnitude :param float mmax_sigma: Uncertainty on maximum magnitude :param float lower_bound: Lower bound of Gaussian distribution (as number of standard deviations) :param float upper_bound: Upper bound of Gaussian distribution (as number of standard deviations) :param float sigma: Standard deviation (in magnitude units) of the Gaussian distribution :param numpy.ndarray occurrence_rate: Activity rates for magnitude in the range mmin to mmax in steps of bin_width
625990587b25080760ed87ac
class PrivateEndpointListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[PrivateEndpoint]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["PrivateEndpoint"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(PrivateEndpointListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = None
Response for the ListPrivateEndpoints API service call. Variables are only populated by the server, and will be ignored when sending a request. :param value: A list of private endpoint resources in a resource group. :type value: list[~azure.mgmt.network.v2020_03_01.models.PrivateEndpoint] :ivar next_link: The URL to get the next set of results. :vartype next_link: str
62599058a8ecb033258727b2
class BloomFilter(): <NEW_LINE> <INDENT> def __init__(self, items_count): <NEW_LINE> <INDENT> self.size = self.get_size(items_count, ERR_RATE) <NEW_LINE> self.hash_count = self.get_hash_count(self.size,items_count) <NEW_LINE> self.bit_array = bitarray(self.size) <NEW_LINE> self.bit_array.setall(0) <NEW_LINE> <DEDENT> def add(self, item): <NEW_LINE> <INDENT> for i in range(self.hash_count): <NEW_LINE> <INDENT> digest = mmh3.hash(item,i) % self.size <NEW_LINE> self.bit_array[digest] = True <NEW_LINE> <DEDENT> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> for i in range(self.hash_count): <NEW_LINE> <INDENT> digest = mmh3.hash(item,i)%self.size <NEW_LINE> if self.bit_array[digest]==False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_size(self,n,p): <NEW_LINE> <INDENT> m = -(n * math.log(p))/(math.log(2)**2) <NEW_LINE> return int(m) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_hash_count(self, m, n): <NEW_LINE> <INDENT> k = (m/n) * math.log(2) <NEW_LINE> return int(k)
Class for Bloom filter, using murmur3 hash function
62599058dd821e528d6da44d
class Conv2d_same_leaky(_ConvNd): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, relu=True): <NEW_LINE> <INDENT> kernel_size = _pair(kernel_size) <NEW_LINE> stride = _pair(stride) <NEW_LINE> padding = _pair(padding) <NEW_LINE> dilation = _pair(dilation) <NEW_LINE> self.relu = relu <NEW_LINE> super(Conv2d_same_leaky, self).__init__( in_channels, out_channels, kernel_size, stride, padding, dilation, False, _pair(0), groups, bias) <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> out = conv2d_same_padding(input, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups) <NEW_LINE> if self.relu: <NEW_LINE> <INDENT> out = F.leaky_relu(out, 0.2) <NEW_LINE> <DEDENT> return out
from CSDN
6259905876e4537e8c3f0b26
class Plant(Biology): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Biology, self).__init__() <NEW_LINE> <DEDENT> def collect_sunshine(self): <NEW_LINE> <INDENT> con = "%s %s" % (self.__class__.__name__,sys._getframe().f_code.co_name) <NEW_LINE> pylog.info(con) <NEW_LINE> return con
a class Plant
62599058a17c0f6771d5d66e
class OrderBook(Model): <NEW_LINE> <INDENT> def __init__(self, instrument: InstrumentName = sentinel, time: DateTime = sentinel, unix_time: DateTime = sentinel, price: PriceValue = sentinel, bucket_width: PriceValue = sentinel, buckets: ArrayOrderBookBucket = sentinel): <NEW_LINE> <INDENT> Model.__init__(**locals())
The representation of an instrument's order book at a point in time Attributes: instrument: :class:`~async_v20.InstrumentName` The order book's instrument time: :class:`~async_v20.DateTime` The time when the order book snapshot was created. unix_time: :class:`~async_v20.DateTime` The time when the order book snapshot was created in unix format. price: :class:`~async_v20.PriceValue` The price (midpoint) for the order book's instrument at the time of the order book snapshot bucket_width: :class:`~async_v20.PriceValue` The price width for each bucket. Each bucket covers the price range from the bucket's price to the bucket's price + bucketWidth. buckets: ( :class:`~async_v20.OrderBookBucket`, ...) The partitioned order book, divided into buckets using a default bucket width. These buckets are only provided for price ranges which actually contain order or position data.
62599058ac7a0e7691f73a7b
class Result(Store): <NEW_LINE> <INDENT> code = Field(target='resultCode') <NEW_LINE> basket = EmbeddedStoreField(target='basket', store_class='Basket')
Helper class to abstract `BasketModificationResult` reaktor object.
62599058b57a9660fecd3015
class DataprocProjectsRegionsJobsPatchRequest(_messages.Message): <NEW_LINE> <INDENT> job = _messages.MessageField('Job', 1) <NEW_LINE> jobId = _messages.StringField(2, required=True) <NEW_LINE> projectId = _messages.StringField(3, required=True) <NEW_LINE> region = _messages.StringField(4, required=True) <NEW_LINE> updateMask = _messages.StringField(5)
A DataprocProjectsRegionsJobsPatchRequest object. Fields: job: A Job resource to be passed as the request body. jobId: Required The job ID. projectId: Required The ID of the Google Cloud Platform project that the job belongs to. region: Required The Cloud Dataproc region in which to handle the request. updateMask: Required Specifies the path, relative to <code>Job</code>, of the field to update. For example, to update the labels of a Job the <code>update_mask</code> parameter would be specified as <code>labels</code>, and the PATCH request body would specify the new value. <strong>Note:</strong> Currently, <code>labels</code> is the only field that can be updated.
62599058b7558d58954649f8
class BaseDatatableView(JSONResponseMixin, TemplateView): <NEW_LINE> <INDENT> model = None <NEW_LINE> columns = [] <NEW_LINE> order_columns = [] <NEW_LINE> max_display_length = 100 <NEW_LINE> def initialize(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_order_columns(self): <NEW_LINE> <INDENT> return self.order_columns <NEW_LINE> <DEDENT> def get_columns(self): <NEW_LINE> <INDENT> return self.columns <NEW_LINE> <DEDENT> def render_column(self, row, column): <NEW_LINE> <INDENT> if hasattr(row, 'get_%s_display' % column): <NEW_LINE> <INDENT> text = getattr(row, 'get_%s_display' % column)() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> text = getattr(row, column) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> obj = row <NEW_LINE> for part in column.split('.'): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> obj = getattr(obj, part) <NEW_LINE> <DEDENT> text = obj <NEW_LINE> <DEDENT> <DEDENT> if hasattr(row, 'get_absolute_url'): <NEW_LINE> <INDENT> return '<a href="%s">%s</a>' % (row.get_absolute_url(), text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return text <NEW_LINE> <DEDENT> <DEDENT> def ordering(self, qs): <NEW_LINE> <INDENT> order_data = self.json_data.get('order', []) <NEW_LINE> order = [] <NEW_LINE> order_columns = self.get_order_columns() <NEW_LINE> for item in order_data: <NEW_LINE> <INDENT> column = item['column'] <NEW_LINE> column_dir = item['dir'] <NEW_LINE> sdir = '-' if column_dir == 'desc' else '' <NEW_LINE> sortcol = order_columns[column] <NEW_LINE> ann_kargs = { sortcol + '_foo': Count(sortcol) } <NEW_LINE> order.append('%s%s' % (sdir, sortcol)) <NEW_LINE> <DEDENT> if order: <NEW_LINE> <INDENT> return qs.order_by(*order) <NEW_LINE> <DEDENT> return qs <NEW_LINE> <DEDENT> def paging(self, qs): <NEW_LINE> <INDENT> limit = min(int(self.json_data.get('length', 10)), self.max_display_length) <NEW_LINE> if limit == -1: <NEW_LINE> <INDENT> return qs <NEW_LINE> <DEDENT> start = int(self.json_data.get('start', 0)) <NEW_LINE> offset = start + limit <NEW_LINE> return qs[start:offset] <NEW_LINE> <DEDENT> def get_initial_queryset(self): <NEW_LINE> <INDENT> if not self.model: <NEW_LINE> <INDENT> raise NotImplementedError("Need to provide a model or implement get_initial_queryset!") <NEW_LINE> <DEDENT> return self.model.objects.all() <NEW_LINE> <DEDENT> def filter_queryset(self, qs): <NEW_LINE> <INDENT> return qs <NEW_LINE> <DEDENT> def prepare_results(self, qs): <NEW_LINE> <INDENT> data = [] <NEW_LINE> for item in qs: <NEW_LINE> <INDENT> data.append([self.render_column(item, column) for column in self.get_columns()]) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def get_context_data(self, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.initialize(*args, **kwargs) <NEW_LINE> qs = self.get_initial_queryset() <NEW_LINE> records_total = qs.count() <NEW_LINE> qs = self.filter_queryset(qs) <NEW_LINE> records_filtered = qs.count() <NEW_LINE> qs = self.ordering(qs) <NEW_LINE> qs = self.paging(qs) <NEW_LINE> data = self.prepare_results(qs) <NEW_LINE> ret = {'draw': int(self.json_data.get('draw', 0)), 'recordsTotal': records_total, 'recordsFiltered': records_filtered, 'data': data } <NEW_LINE> <DEDENT> except (ValidationError, DataError): <NEW_LINE> <INDENT> ret = {'draw': int(self.json_data.get('draw', 0)), 'recordsTotal': records_total, 'recordsFiltered': 0, 'data': [] } <NEW_LINE> <DEDENT> return ret
JSON data for datatables
6259905801c39578d7f14204
class TrainableHanoi(Hanoi, base.TrainableDenseToDenseEnv): <NEW_LINE> <INDENT> def __init__(self, n_disks=None, modeled_env=None, predict_delta=True, done_threshold=0.5, reward_threshold=0.5): <NEW_LINE> <INDENT> super().__init__(n_disks=modeled_env.n_disks, reward_for_solved=modeled_env._reward_for_solved, reward_for_invalid_action= modeled_env._reward_for_invalid_action) <NEW_LINE> self.observation_space.dtype = np.float32 <NEW_LINE> self.done_threshold = done_threshold <NEW_LINE> self.reward_threshold = reward_threshold <NEW_LINE> self._predict_delta = predict_delta <NEW_LINE> self._perfect_env = modeled_env <NEW_LINE> <DEDENT> def transform_predicted_observations( self, observations, predicted_observation ): <NEW_LINE> <INDENT> if self._predict_delta: <NEW_LINE> <INDENT> predicted_observation = observations + predicted_observation <NEW_LINE> <DEDENT> clipped_predicted_observation = np.clip( predicted_observation, self.observation_space.low, self.observation_space.high ) <NEW_LINE> return np.around(clipped_predicted_observation)
Hanoi tower environment based on Neural Network.
625990584a966d76dd5f048c
class fixFormatter(argparse.HelpFormatter): <NEW_LINE> <INDENT> def _split_lines(self, text, width): <NEW_LINE> <INDENT> if text.startswith("M|"): <NEW_LINE> <INDENT> return text[2:].splitlines() <NEW_LINE> <DEDENT> return argparse.HelpFormatter._split_lines(self, text, width)
Class to allow multi line help statements in argparse
6259905832920d7e50bc75e1
class Cache: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._redis = redis.Redis() <NEW_LINE> self._redis.flushdb() <NEW_LINE> <DEDENT> @call_history <NEW_LINE> @count_calls <NEW_LINE> def store(self, data: UnionOfTypes) -> str: <NEW_LINE> <INDENT> key = str(uuid4()) <NEW_LINE> self._redis.mset({key: data}) <NEW_LINE> return key <NEW_LINE> <DEDENT> def get(self, key: str, fn: Optional[Callable] = None) -> UnionOfTypes: <NEW_LINE> <INDENT> if fn: <NEW_LINE> <INDENT> return fn(self._redis.get(key)) <NEW_LINE> <DEDENT> data = self._redis.get(key) <NEW_LINE> return data <NEW_LINE> <DEDENT> def get_str(self, string: bytes) -> str: <NEW_LINE> <INDENT> return string.decode("utf-8") <NEW_LINE> <DEDENT> def get_int(self, number: int) -> int: <NEW_LINE> <INDENT> result = 0 * 256 + int(number) <NEW_LINE> return result
Represents a class called Cache with a protected instance attribute called redis
6259905829b78933be26ab92
class ComputeTargetInstancesListRequest(messages.Message): <NEW_LINE> <INDENT> filter = messages.StringField(1) <NEW_LINE> maxResults = messages.IntegerField(2, variant=messages.Variant.UINT32, default=500) <NEW_LINE> pageToken = messages.StringField(3) <NEW_LINE> project = messages.StringField(4, required=True) <NEW_LINE> zone = messages.StringField(5, required=True)
A ComputeTargetInstancesListRequest object. Fields: filter: Filter expression for filtering listed resources. maxResults: Maximum count of results to be returned. pageToken: Tag returned by a previous list request when that list was truncated to maxResults. Used to continue a previous list request. project: Name of the project scoping this request. zone: Name of the zone scoping this request.
62599058462c4b4f79dbcfa0
class MediaProcessTaskResult(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Type = None <NEW_LINE> self.TranscodeTask = None <NEW_LINE> self.AnimatedGraphicTask = None <NEW_LINE> self.SnapshotByTimeOffsetTask = None <NEW_LINE> self.SampleSnapshotTask = None <NEW_LINE> self.ImageSpriteTask = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Type = params.get("Type") <NEW_LINE> if params.get("TranscodeTask") is not None: <NEW_LINE> <INDENT> self.TranscodeTask = MediaProcessTaskTranscodeResult() <NEW_LINE> self.TranscodeTask._deserialize(params.get("TranscodeTask")) <NEW_LINE> <DEDENT> if params.get("AnimatedGraphicTask") is not None: <NEW_LINE> <INDENT> self.AnimatedGraphicTask = MediaProcessTaskAnimatedGraphicResult() <NEW_LINE> self.AnimatedGraphicTask._deserialize(params.get("AnimatedGraphicTask")) <NEW_LINE> <DEDENT> if params.get("SnapshotByTimeOffsetTask") is not None: <NEW_LINE> <INDENT> self.SnapshotByTimeOffsetTask = MediaProcessTaskSnapshotByTimeOffsetResult() <NEW_LINE> self.SnapshotByTimeOffsetTask._deserialize(params.get("SnapshotByTimeOffsetTask")) <NEW_LINE> <DEDENT> if params.get("SampleSnapshotTask") is not None: <NEW_LINE> <INDENT> self.SampleSnapshotTask = MediaProcessTaskSampleSnapshotResult() <NEW_LINE> self.SampleSnapshotTask._deserialize(params.get("SampleSnapshotTask")) <NEW_LINE> <DEDENT> if params.get("ImageSpriteTask") is not None: <NEW_LINE> <INDENT> self.ImageSpriteTask = MediaProcessTaskImageSpriteResult() <NEW_LINE> self.ImageSpriteTask._deserialize(params.get("ImageSpriteTask"))
任务查询结果类型
625990581f037a2d8b9e5339
class Exporter(Process): <NEW_LINE> <INDENT> def __init__(self, file, input_queue): <NEW_LINE> <INDENT> self.file = open(file, 'w') <NEW_LINE> self.input_q = input_queue <NEW_LINE> super().__init__() <NEW_LINE> <DEDENT> def export(self, item): <NEW_LINE> <INDENT> line = ','.join(item) + '\n' <NEW_LINE> self.file.write(line) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = self.input_q.get(timeout=1) <NEW_LINE> <DEDENT> except queue.Empty: <NEW_LINE> <INDENT> self.close() <NEW_LINE> return <NEW_LINE> <DEDENT> self.export(item)
导出类实现
625990582c8b7c6e89bd4d89
class SecureHTTPServer(HTTPServer, object): <NEW_LINE> <INDENT> def __init__(self, address, handler, cert_file): <NEW_LINE> <INDENT> super(SecureHTTPServer, self).__init__(address, handler) <NEW_LINE> self.socket = ssl.wrap_socket(self.socket, certfile=cert_file)
A HTTP Server object that support HTTPS
625990580a50d4780f70688c
class Parser(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def from_spec(spec): <NEW_LINE> <INDENT> if isinstance(spec, Parser): <NEW_LINE> <INDENT> return spec <NEW_LINE> <DEDENT> elif isinstance(spec, dict): <NEW_LINE> <INDENT> return SpecParser(spec) <NEW_LINE> <DEDENT> elif callable(spec): <NEW_LINE> <INDENT> return CallableParser(spec) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AssertionError("invalid specification: %r" % spec) <NEW_LINE> <DEDENT> <DEDENT> def parse(self, key_path, raw_config): <NEW_LINE> <INDENT> raise NotImplementedError
Base for config parsers.
625990587cff6e4e811b6fde
class outgoing(object): <NEW_LINE> <INDENT> def __init__(self, repo, commonheads=None, missingheads=None, missingroots=None): <NEW_LINE> <INDENT> assert None in (commonheads, missingroots) <NEW_LINE> cl = repo.changelog <NEW_LINE> if missingheads is None: <NEW_LINE> <INDENT> missingheads = cl.heads() <NEW_LINE> <DEDENT> if missingroots: <NEW_LINE> <INDENT> discbases = [] <NEW_LINE> for n in missingroots: <NEW_LINE> <INDENT> discbases.extend([p for p in cl.parents(n) if p != nullid]) <NEW_LINE> <DEDENT> csets, roots, heads = cl.nodesbetween(missingroots, missingheads) <NEW_LINE> included = set(csets) <NEW_LINE> missingheads = heads <NEW_LINE> commonheads = [n for n in discbases if n not in included] <NEW_LINE> <DEDENT> elif not commonheads: <NEW_LINE> <INDENT> commonheads = [nullid] <NEW_LINE> <DEDENT> self.commonheads = commonheads <NEW_LINE> self.missingheads = missingheads <NEW_LINE> self._revlog = cl <NEW_LINE> self._common = None <NEW_LINE> self._missing = None <NEW_LINE> self.excluded = [] <NEW_LINE> <DEDENT> def _computecommonmissing(self): <NEW_LINE> <INDENT> sets = self._revlog.findcommonmissing(self.commonheads, self.missingheads) <NEW_LINE> self._common, self._missing = sets <NEW_LINE> <DEDENT> @util.propertycache <NEW_LINE> def common(self): <NEW_LINE> <INDENT> if self._common is None: <NEW_LINE> <INDENT> self._computecommonmissing() <NEW_LINE> <DEDENT> return self._common <NEW_LINE> <DEDENT> @util.propertycache <NEW_LINE> def missing(self): <NEW_LINE> <INDENT> if self._missing is None: <NEW_LINE> <INDENT> self._computecommonmissing() <NEW_LINE> <DEDENT> return self._missing
Represents the set of nodes present in a local repo but not in a (possibly) remote one. Members: missing is a list of all nodes present in local but not in remote. common is a list of all nodes shared between the two repos. excluded is the list of missing changeset that shouldn't be sent remotely. missingheads is the list of heads of missing. commonheads is the list of heads of common. The sets are computed on demand from the heads, unless provided upfront by discovery.
62599058e64d504609df9e9d
class TaskParamWidget(BasicDialog): <NEW_LINE> <INDENT> index = 0 <NEW_LINE> name = '' <NEW_LINE> paramDict = {} <NEW_LINE> valueEdit = {} <NEW_LINE> def __init__(self, paramDict, parent=None): <NEW_LINE> <INDENT> super(TaskParamWidget, self).__init__(parent) <NEW_LINE> self.valueEdit = {} <NEW_LINE> self.paramDict = paramDict <NEW_LINE> self.initUi() <NEW_LINE> <DEDENT> def initUi(self): <NEW_LINE> <INDENT> QWidget.__init__(self) <NEW_LINE> self.setWindowTitle(u'设置参数') <NEW_LINE> self.resize(300,400) <NEW_LINE> gridlayout = QGridLayout() <NEW_LINE> i = 0 <NEW_LINE> lName = QLabel(u'参数') <NEW_LINE> lValue = QLabel(u'数值') <NEW_LINE> gridlayout.addWidget(lName, i, 0 ) <NEW_LINE> gridlayout.addWidget(lValue, i, 1 ) <NEW_LINE> for name in self.paramDict: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> label = QLabel(name) <NEW_LINE> self.valueEdit[name] = QLineEdit() <NEW_LINE> self.valueEdit[name].setText(str(self.paramDict[name])) <NEW_LINE> self.valueEdit[name].setFocusPolicy(QtCore.Qt.NoFocus) <NEW_LINE> gridlayout.addWidget(label, i, 0 ) <NEW_LINE> gridlayout.addWidget(self.valueEdit[name], i, 1) <NEW_LINE> <DEDENT> vbox = QVBoxLayout() <NEW_LINE> vbox.addLayout(gridlayout) <NEW_LINE> self.addButton(vbox) <NEW_LINE> self.setLayout(vbox)
策略配置对话框
62599058097d151d1a2c2607
class Offset(BaseType): <NEW_LINE> <INDENT> size = 4 <NEW_LINE> fmt = 'I' <NEW_LINE> def tostring(self, val): <NEW_LINE> <INDENT> return '%10d (%08X)' % (val, val)
An offset in an FRES file.
6259905807f4c71912bb09d6
class SMSMessage: <NEW_LINE> <INDENT> __PHONE_NUMBER_PATTERN = "^\+?\d+$" <NEW_LINE> def __init__(self, phone_number, data): <NEW_LINE> <INDENT> if phone_number is None: <NEW_LINE> <INDENT> raise ValueError("Phone number cannot be None") <NEW_LINE> <DEDENT> if data is None: <NEW_LINE> <INDENT> raise ValueError("Data cannot be None") <NEW_LINE> <DEDENT> if not re.compile(SMSMessage.__PHONE_NUMBER_PATTERN).match(phone_number): <NEW_LINE> <INDENT> raise ValueError("Invalid phone number") <NEW_LINE> <DEDENT> self.__phone_number = phone_number <NEW_LINE> self.__data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def phone_number(self): <NEW_LINE> <INDENT> return self.__phone_number <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self.__data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return {"Phone number: ": self.__phone_number, "Data: ": self.__data}
This class represents an SMS message containing the phone number that sent the message and the content (data) of the message. This class is used within the library to read SMS sent to Cellular devices.
625990587b25080760ed87ad
class TestIntegerColumn(unittest.TestCase): <NEW_LINE> <INDENT> def test_create(self): <NEW_LINE> <INDENT> self.assertRaises(TypeError, IntegerColumn, -1) <NEW_LINE> self.assertRaises(TypeError, IntegerColumn, maxvalue=-1) <NEW_LINE> <DEDENT> def test_check(self): <NEW_LINE> <INDENT> col = IntegerColumn(maxvalue=5) <NEW_LINE> self.assertFalse(col._check(-1)) <NEW_LINE> self.assertFalse(col._check(6)) <NEW_LINE> self.assertTrue(col._check(3)) <NEW_LINE> <DEDENT> def test_table(self): <NEW_LINE> <INDENT> class TestTable: <NEW_LINE> <INDENT> intfld = IntegerColumn() <NEW_LINE> <DEDENT> tt = TestTable() <NEW_LINE> with self.assertRaises(DatabaseValueError): <NEW_LINE> <INDENT> tt.intfld = -1 <NEW_LINE> <DEDENT> with self.assertRaises(DatabaseValueError): <NEW_LINE> <INDENT> tt.intfld = 900000000 <NEW_LINE> <DEDENT> with self.assertRaises(DatabaseValueError): <NEW_LINE> <INDENT> tt.intfld = 1.25 <NEW_LINE> <DEDENT> tt.intfld = False <NEW_LINE> self.assertEqual(tt.intfld, 0)
Test that the IntegerColumn class correctly operates
6259905807f4c71912bb09d7
class StepSchedule(Schedule): <NEW_LINE> <INDENT> def __init__(self, step_config, change): <NEW_LINE> <INDENT> assert isinstance(step_config, list) and isinstance(change, list), "The arguments change and step_config must be lists." <NEW_LINE> assert len(step_config) == len(change), "The arguments change and step_config must have the same length." <NEW_LINE> self.step_config = step_config <NEW_LINE> self.change = change <NEW_LINE> self.steps = 0 <NEW_LINE> <DEDENT> def get_learning_rate(self, learning_rate, epoch): <NEW_LINE> <INDENT> if epoch in self.step_config: <NEW_LINE> <INDENT> self.steps = self.change[self.step_config.index(epoch)] <NEW_LINE> <DEDENT> if self.steps == 0: <NEW_LINE> <INDENT> return learning_rate <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.steps
Steps the learning rate over training time. To set a step schedule, pass as arguments ``step_config`` and ``change``. The schedule will set the learning rate at ``step[i]`` to ``change[i]``. For example, the call: .. code-block:: python schedule = Schedule(step_config=[2, 6], change=[0.6, 0.4]) will set the learning rate to 0.6 at step 2, and to 0.4 at step 6.
6259905816aa5153ce401a80
class IndentedHelpFormatter (HelpFormatter): <NEW_LINE> <INDENT> def __init__(self, indent_increment=2, max_help_position=24, width=None, short_first=1): <NEW_LINE> <INDENT> HelpFormatter.__init__( self, indent_increment, max_help_position, width, short_first) <NEW_LINE> <DEDENT> def format_usage(self, usage): <NEW_LINE> <INDENT> return _("Usage: %s\n") % usage <NEW_LINE> <DEDENT> def format_heading(self, heading): <NEW_LINE> <INDENT> return "%*s%s:\n" % (self.current_indent, "", heading)
Format help with indented section bodies.
625990588e7ae83300eea629
class FakeSlicer(object): <NEW_LINE> <INDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return FakeMetadata(), np.random.standard_normal((3, 3)) <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> return FakeMetadata(), np.random.standard_normal((3, 3))
Fake slicer for mipp.
6259905824f1403a9268639d
class ProtocolError(WMR300Error): <NEW_LINE> <INDENT> pass
communication protocol error
62599058004d5f362081fabb
class mod_layer(object): <NEW_LINE> <INDENT> def __init__(self, l): <NEW_LINE> <INDENT> assert valid_layer(l), 'invalid layer %s' % l <NEW_LINE> self.layer = l <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '(layer %s)' % self.layer
single footprint layer
62599058435de62698e9d3a0
class HTMLField(models.TextField): <NEW_LINE> <INDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> defaults = {'widget': mercury_widgets.TextareaMercury} <NEW_LINE> defaults.update(kwargs) <NEW_LINE> if defaults['widget'] == admin_widgets.AdminTextareaWidget: <NEW_LINE> <INDENT> defaults['widget'] = mercury_widgets.AdminTextareaMercury <NEW_LINE> <DEDENT> return super(HTMLField, self).formfield(**defaults)
A large string field for HTML content. It uses the mercury widget in forms.
625990586e29344779b01be8
class ptuple(object): <NEW_LINE> <INDENT> h=femSpace.mesh.hMin <NEW_LINE> def __init__(self,p): <NEW_LINE> <INDENT> self.p=p <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(tuple(self.p)) <NEW_LINE> <DEDENT> def __eq__(self,other): <NEW_LINE> <INDENT> return enorm(self.p - other.p) <= self.h
define a dictionary key that defines points as equal if they're "close"
625990581b99ca4002290006
class ComputedStaves(BaseStaffDetector): <NEW_LINE> <INDENT> def __init__(self, staves, staffline_distance, staffline_thickness, staves_interpolated_y): <NEW_LINE> <INDENT> super(ComputedStaves, self).__init__() <NEW_LINE> self.staves = np.asarray(staves) <NEW_LINE> self.staffline_distance = np.asarray(staffline_distance) <NEW_LINE> self.staffline_thickness = np.asarray(staffline_thickness) <NEW_LINE> self.staves_interpolated_y_arr = np.asarray(staves_interpolated_y) <NEW_LINE> <DEDENT> @property <NEW_LINE> def staves_interpolated_y(self): <NEW_LINE> <INDENT> return self.staves_interpolated_y_arr <NEW_LINE> <DEDENT> def compute(self, session=None, feed_dict=None): <NEW_LINE> <INDENT> return self
Computed staves holder. The result of `BaseStaffDetector.compute()`. Holds NumPy arrays with the result of staff detection.
625990588a43f66fc4bf372a
class TestVelCtrlVsPosCtrl(DualAxisTest): <NEW_LINE> <INDENT> def run_test(self, axis0_ctx: AxisTestContext, axis1_ctx: AxisTestContext, logger): <NEW_LINE> <INDENT> load_ctx = axis0_ctx <NEW_LINE> driver_ctx = axis1_ctx <NEW_LINE> logger.debug("activating load on {}...".format(load_ctx.name)) <NEW_LINE> load_ctx.handle.controller.config.vel_integrator_gain = 0 <NEW_LINE> load_ctx.handle.controller.vel_integrator_torque = 0 <NEW_LINE> set_limits(load_ctx, logger, vel_limit=100000, current_limit=50) <NEW_LINE> load_ctx.handle.controller.set_vel_setpoint(0, 0) <NEW_LINE> request_state(load_ctx, AXIS_STATE_CLOSED_LOOP_CONTROL) <NEW_LINE> logger.debug("using {} as driver against load, vel=100000...".format(driver_ctx.name)) <NEW_LINE> set_limits(driver_ctx, logger, vel_limit=100000, current_limit=50) <NEW_LINE> init_pos = driver_ctx.handle.encoder.pos_estimate <NEW_LINE> driver_ctx.handle.controller.set_pos_setpoint(init_pos + 100000, 0, 0) <NEW_LINE> request_state(driver_ctx, AXIS_STATE_CLOSED_LOOP_CONTROL) <NEW_LINE> for _ in range(int(4000/5)): <NEW_LINE> <INDENT> logger.debug(str(driver_ctx.handle.motor.current_control.Iq_setpoint)) <NEW_LINE> time.sleep(0.005) <NEW_LINE> <DEDENT> test_assert_no_error(load_ctx) <NEW_LINE> test_assert_no_error(driver_ctx) <NEW_LINE> logger.debug("using {} as driver against load, vel=20000...".format(driver_ctx.name)) <NEW_LINE> set_limits(driver_ctx, logger, vel_limit=20000, current_limit=50) <NEW_LINE> init_pos = driver_ctx.handle.encoder.pos_estimate <NEW_LINE> driver_ctx.handle.controller.set_pos_setpoint(init_pos + 100000, 0, 0) <NEW_LINE> request_state(driver_ctx, AXIS_STATE_CLOSED_LOOP_CONTROL) <NEW_LINE> time.sleep(7) <NEW_LINE> odrive.utils.print_drv_regs("load motor ({})".format(load_ctx.name), load_ctx.handle.motor) <NEW_LINE> odrive.utils.print_drv_regs("driver motor ({})".format(driver_ctx.name), driver_ctx.handle.motor) <NEW_LINE> test_assert_no_error(load_ctx) <NEW_LINE> test_assert_no_error(driver_ctx)
Uses one ODrive as a load operating in velocity control mode. The other ODrive tries to "fight" against the load in position mode.
62599058e5267d203ee6ce8c
class RobotsCrawler(grab.spider.Spider): <NEW_LINE> <INDENT> def __init__(self, db, domainfile, *args, **kwargs): <NEW_LINE> <INDENT> self.domainfile = domainfile <NEW_LINE> self.db = db <NEW_LINE> super(RobotsCrawler, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def valid_response_code(self, code, task): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def task_generator(self): <NEW_LINE> <INDENT> for line in open(self.domainfile): <NEW_LINE> <INDENT> tokens = line.strip().split(',') <NEW_LINE> if len(tokens) != 2: <NEW_LINE> <INDENT> print("malformed line '%s'" % line) <NEW_LINE> break <NEW_LINE> <DEDENT> rank = int(tokens[0].strip()) - 1 <NEW_LINE> domain = tokens[1].strip() <NEW_LINE> if self.db.has_robots(domain): <NEW_LINE> <INDENT> print("Skipping domain '%s'", domain) <NEW_LINE> continue <NEW_LINE> <DEDENT> url = "http://%s/robots.txt" % domain <NEW_LINE> yield grab.spider.Task('download', url=url, domain=domain, rank=rank, root=True, raw=True) <NEW_LINE> <DEDENT> <DEDENT> def task_download(self, g, task): <NEW_LINE> <INDENT> if g.doc.error_code: <NEW_LINE> <INDENT> print('Request failed for "%s". Code %s Reason: %s' % (task.domain, g.doc.error_code, g.doc.error_msg)) <NEW_LINE> if task.root: <NEW_LINE> <INDENT> url = "http://www.%s/robots.txt" % task.domain <NEW_LINE> yield grab.spider.Task('download', url=url, domain=task.domain, rank=task.rank, root=False, raw=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.db.insert(task.domain, task.url, task.rank, -g.doc.error_code, g.doc.error_msg, None) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> resp = g.doc <NEW_LINE> self.db.insert(task.domain, task.url, task.rank, resp.code, resp.unicode_body(), resp.headers) <NEW_LINE> if g.doc.code == 404 and task.root: <NEW_LINE> <INDENT> url = "http://www.%s/robots.txt" % task.domain <NEW_LINE> yield grab.spider.Task('download', url=url, domain=task.domain, rank=task.rank, root=False, raw=True)
Simple grab spider to iterate through a list of urls, and store in sqllite database
6259905891af0d3eaad3b3c5
class IbmPerfTool(object): <NEW_LINE> <INDENT> def __init__(self, ibmperf_dir=DEFAULT_DIR): <NEW_LINE> <INDENT> self._ibmperf_dir = os.path.abspath(ibmperf_dir) <NEW_LINE> try: <NEW_LINE> <INDENT> _LOGGER.info("Checking if driver installed.") <NEW_LINE> self._Run(_DDQ, []) <NEW_LINE> _LOGGER.info("Driver already installed.") <NEW_LINE> <DEDENT> except Error: <NEW_LINE> <INDENT> _LOGGER.info("Installing IBM Performance Inspector driver.") <NEW_LINE> self._Run(_TINSTALL, []) <NEW_LINE> <DEDENT> <DEDENT> def _Popen(self, cmd_line): <NEW_LINE> <INDENT> return subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self._ibmperf_dir) <NEW_LINE> <DEDENT> def _Run(self, toolname, args, expected_returncode=0): <NEW_LINE> <INDENT> tool_path = os.path.join(self._ibmperf_dir, toolname) <NEW_LINE> cmd_line = [tool_path] + args <NEW_LINE> _LOGGER.debug("Running command '%s'.", " ".join(cmd_line)) <NEW_LINE> cmd = self._Popen(cmd_line) <NEW_LINE> stdout, stderr = cmd.communicate() <NEW_LINE> returncode = cmd.returncode <NEW_LINE> if returncode != expected_returncode: <NEW_LINE> <INDENT> raise ExecutionFailed("'%s' returned code '%d'.\n STDOUT: %s\n" " STDERR: %s\n" % (toolname, returncode, stdout, stderr)) <NEW_LINE> <DEDENT> return stdout.splitlines()
Base class wrapper for IBM Performance Inspector tools. Provides utility functions for accessing the toolkit, and automatically checks if it is installed, trying to install it if necessary.
625990587cff6e4e811b6fe0
class Login(Page): <NEW_LINE> <INDENT> url = "xxxxx" <NEW_LINE> login_username_loc = (By.ID, "idToken1") <NEW_LINE> login_password_loc = (By.ID, "idToken2") <NEW_LINE> login_button_loc = (By.ID, "loginButton_0") <NEW_LINE> def login_username(self, username): <NEW_LINE> <INDENT> self.find_element(*self.login_username_loc).send_keys(username) <NEW_LINE> <DEDENT> def login_password(self, password): <NEW_LINE> <INDENT> self.find_element(*self.login_password_loc).send_keys(password) <NEW_LINE> <DEDENT> def login_button(self): <NEW_LINE> <INDENT> self.find_element(*self.login_button_loc).click() <NEW_LINE> <DEDENT> def user_login(self, username="bad_username", password="bad_password"): <NEW_LINE> <INDENT> self.open() <NEW_LINE> self.login_username(username) <NEW_LINE> self.login_password(password) <NEW_LINE> self.login_button() <NEW_LINE> sleep(1) <NEW_LINE> <DEDENT> login_success_user_loc = (By.ID, "currentUserId") <NEW_LINE> def user_login_success(self): <NEW_LINE> <INDENT> return self.find_element(*self.login_success_user_loc)
用户登录
625990587d847024c075d979
class UserCreationForm(forms.ModelForm): <NEW_LINE> <INDENT> password1 = forms.CharField(label='Password', widget=forms.PasswordInput, required=False) <NEW_LINE> password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput, required=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('email',) <NEW_LINE> <DEDENT> def clean_password(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get("password1") <NEW_LINE> password2 = self.cleaned_data.get("password2") <NEW_LINE> if password1 and password2 and password1 != password2: <NEW_LINE> <INDENT> raise forms.ValidationError("Passwords don't match") <NEW_LINE> <DEDENT> return password2 <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super(UserCreationForm, self).save(commit=False) <NEW_LINE> user.set_password(self.cleaned_data["password1"]) <NEW_LINE> if commit: <NEW_LINE> <INDENT> user.save() <NEW_LINE> <DEDENT> return user
Форма для создания новых пользователей. Включает все требуемые поля, а также повторение пароля.
6259905816aa5153ce401a81
class YancPlugin(Plugin): <NEW_LINE> <INDENT> name = "yanc" <NEW_LINE> def options(self, parser, env): <NEW_LINE> <INDENT> super(YancPlugin, self).options(parser, env) <NEW_LINE> parser.add_option( "--yanc-color", action="store", dest="yanc_color", default=env.get("NOSE_YANC_COLOR"), help="YANC color override - one of on,off [NOSE_YANC_COLOR]", ) <NEW_LINE> <DEDENT> def configure(self, options, conf): <NEW_LINE> <INDENT> super(YancPlugin, self).configure(options, conf) <NEW_LINE> if options.yanc_color is None and not conf.worker and hasattr(conf.stream, "isatty") and conf.stream.isatty(): <NEW_LINE> <INDENT> options.yanc_color = "on" <NEW_LINE> <DEDENT> self.color = options.yanc_color != "off" <NEW_LINE> <DEDENT> def setOutputStream(self, stream): <NEW_LINE> <INDENT> return self.color and ColorStream(stream) or stream <NEW_LINE> <DEDENT> def prepareTestResult(self, result): <NEW_LINE> <INDENT> if not isinstance(result.stream, ColorStream): <NEW_LINE> <INDENT> result.stream = self.setOutputStream(result.stream)
Yet another nose colorer
6259905863d6d428bbee3d56
class RepFormatter(Formatter): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(RepFormatter, self).__init__(*args, **kwargs) <NEW_LINE> self.repeats = {} <NEW_LINE> self.repindex = 0 <NEW_LINE> <DEDENT> def format(self, *args, **kwargs): <NEW_LINE> <INDENT> self.repindex = 0 <NEW_LINE> result = super(RepFormatter, self).format(*args, **kwargs) <NEW_LINE> if result in self.repeats: <NEW_LINE> <INDENT> self.repindex = self.repeats[result] + 1 <NEW_LINE> self.repeats[result] = self.repindex <NEW_LINE> result = super(RepFormatter, self).format(*args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.repeats[result] = 0 <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def get_value(self, key, args, kwargs): <NEW_LINE> <INDENT> args[0]["_repindex"] = self.repindex <NEW_LINE> return super(RepFormatter, self).get_value(key, args, kwargs)
Extend Formatter to support a {_repindex} placeholder.
6259905882261d6c52730999
class CircularQueue: <NEW_LINE> <INDENT> class _Node: <NEW_LINE> <INDENT> __slots__ = '_element', '_next' <NEW_LINE> def __init__(self, element, next): <NEW_LINE> <INDENT> self._element = element <NEW_LINE> self._next = next <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self._tail = None <NEW_LINE> self._size = 0 <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return self._size == 0 <NEW_LINE> <DEDENT> def first(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise Empty('Queue is empty') <NEW_LINE> <DEDENT> head = self._tail._next <NEW_LINE> return head._element <NEW_LINE> <DEDENT> def dequeue(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise Empty('Queue is empty') <NEW_LINE> <DEDENT> oldhead = self._tail._next <NEW_LINE> if self._size == 1: <NEW_LINE> <INDENT> self._tail = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._tail._next = oldhead._next <NEW_LINE> <DEDENT> self._size -= 1 <NEW_LINE> return oldhead._element <NEW_LINE> <DEDENT> def enqueue(self, e): <NEW_LINE> <INDENT> newest = self._Node(e, None) <NEW_LINE> if self.is_empty(): <NEW_LINE> <INDENT> newest._next = newest <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newest._next = self._tail._next <NEW_LINE> self._tail._next = newest <NEW_LINE> <DEDENT> self._tail = newest <NEW_LINE> self._size += 1 <NEW_LINE> <DEDENT> def rotate(self): <NEW_LINE> <INDENT> if self._size > 0: <NEW_LINE> <INDENT> self._tail = self._tail._next
Queue implementation using a singly linked list for storage.
62599058e64d504609df9e9e
class Tournament: <NEW_LINE> <INDENT> registered_tournament_types = {} <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __init_subclass__(cls, **kwargs): <NEW_LINE> <INDENT> tournament_type = cls.__name__ <NEW_LINE> Tournament.registered_tournament_types[tournament_type] = cls <NEW_LINE> <DEDENT> def save(self, path=None): <NEW_LINE> <INDENT> path = Path(path or DEFAULT_SAVE_PATH) / f"{self.name}.pkl" <NEW_LINE> with open(path, "wb") as fi: <NEW_LINE> <INDENT> pickle.dump(self, fi) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def load(name, path=None): <NEW_LINE> <INDENT> return load_tournament(name, path=path)
Base class for a tournament model
62599058097d151d1a2c2609
class Operator(Entity): <NEW_LINE> <INDENT> onestop_type = 'o' <NEW_LINE> def init(self, **data): <NEW_LINE> <INDENT> self.timezone = data.pop('timezone', None) <NEW_LINE> <DEDENT> def geohash(self): <NEW_LINE> <INDENT> return geom.geohash_features(self.stops()) <NEW_LINE> <DEDENT> def _cache_onestop(self): <NEW_LINE> <INDENT> key = 'onestopId' <NEW_LINE> self.data[key] = self.data.get(key) or self.make_onestop() <NEW_LINE> for i in self.routes(): <NEW_LINE> <INDENT> i.data[key] = i.data.get(key) or i.make_onestop() <NEW_LINE> <DEDENT> for i in self.stops(): <NEW_LINE> <INDENT> i.data[key] = i.data.get(key) or i.make_onestop() <NEW_LINE> <DEDENT> <DEDENT> def add_tags_gtfs(self, gtfs_entity): <NEW_LINE> <INDENT> keys = [ 'agency_url', 'agency_phone', 'agency_lang', 'agency_fare_url', 'agency_id' ] <NEW_LINE> data = gtfs_entity.data._asdict() <NEW_LINE> self.timezone = data.pop('agency_timezone', None) <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> if key in data: <NEW_LINE> <INDENT> self.set_tag(key, data[key]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_json(cls, data): <NEW_LINE> <INDENT> agency = cls(**data) <NEW_LINE> stops = {} <NEW_LINE> for feature in data['features']: <NEW_LINE> <INDENT> if feature['onestopId'].startswith('s'): <NEW_LINE> <INDENT> stop = Stop.from_json(feature) <NEW_LINE> stops[stop.onestop()] = stop <NEW_LINE> <DEDENT> <DEDENT> for feature in data['features']: <NEW_LINE> <INDENT> if feature['onestopId'].startswith('r'): <NEW_LINE> <INDENT> route = Route.from_json(feature) <NEW_LINE> for stop in feature['serves']: <NEW_LINE> <INDENT> route.pclink(route, stops[stop]) <NEW_LINE> <DEDENT> agency.pclink(agency, route) <NEW_LINE> <DEDENT> <DEDENT> return agency <NEW_LINE> <DEDENT> def json(self): <NEW_LINE> <INDENT> return { 'type': 'FeatureCollection', 'geometry': self.geometry(), 'properties': {}, 'name': self.name(), 'tags': self.tags(), 'timezone': self.timezone, 'onestopId': self.onestop(), 'identifiers': sorted(self.identifiers()), 'serves': sorted(self.serves()), 'features': [ i.json() for i in sorted_onestop(self.routes() | self.stops()) ] } <NEW_LINE> <DEDENT> def serves(self): <NEW_LINE> <INDENT> ret = set([i.onestop() for i in self.stops()]) <NEW_LINE> ret |= set(self.data.get('serves', [])) <NEW_LINE> return ret <NEW_LINE> <DEDENT> def routes(self): <NEW_LINE> <INDENT> return set(self.children) <NEW_LINE> <DEDENT> def route(self, onestop_id): <NEW_LINE> <INDENT> return util.filtfirst(self.routes(), onestop=onestop_id) <NEW_LINE> <DEDENT> def stops(self): <NEW_LINE> <INDENT> stops = set() <NEW_LINE> for i in self.routes(): <NEW_LINE> <INDENT> stops |= i.stops() <NEW_LINE> <DEDENT> return stops <NEW_LINE> <DEDENT> def stop(self, onestop_id): <NEW_LINE> <INDENT> return util.filtfirst(self.stops(), onestop=onestop_id)
Transitland Operator Entity.
62599058507cdc57c63a6343
@Role.filter_registry.register('no-specific-managed-policy') <NEW_LINE> class NoSpecificIamRoleManagedPolicy(Filter): <NEW_LINE> <INDENT> schema = type_schema('no-specific-managed-policy', value={'type': 'string'}) <NEW_LINE> permissions = ('iam:ListAttachedRolePolicies',) <NEW_LINE> def _managed_policies(self, client, resource): <NEW_LINE> <INDENT> return [r['PolicyName'] for r in client.list_attached_role_policies( RoleName=resource['RoleName'])['AttachedPolicies']] <NEW_LINE> <DEDENT> def process(self, resources, event=None): <NEW_LINE> <INDENT> c = local_session(self.manager.session_factory).client('iam') <NEW_LINE> if self.data.get('value'): <NEW_LINE> <INDENT> return [r for r in resources if not self.data.get('value') in self._managed_policies(c, r)] <NEW_LINE> <DEDENT> return []
Filter IAM roles that do not have a specific policy attached For example, if the user wants to check all roles without 'ip-restriction': .. code-block: yaml - name: iam-roles-no-ip-restriction resource: iam-role filters: - type: no-specific-managed-policy value: ip-restriction
62599058d7e4931a7ef3d61d
class InputStreamSource(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def get_input_stream(self): <NEW_LINE> <INDENT> pass
Simple interface for objects that are sources for an {@link InputStream}. <p>This is the base interface for Spring's more extensive {@link Resource} interface. <p>For single-use streams, {@link InputStreamResource} can be used for any given {@code InputStream}. Spring's {@link ByteArrayResource} or any file-based {@code Resource} implementation can be used as a concrete instance, allowing one to read the underlying content stream multiple times. This makes this interface useful as an abstract content source for mail attachments, for example.
62599058379a373c97d9a5c2
class TestDeleteHardwareTypeHandler(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> interactor_factory = Mock(factory.InteractorFactory) <NEW_LINE> self.__interactor = Mock(hi.DeleteHardwareTypeInteractor) <NEW_LINE> interactor_factory.create = Mock(return_value=self.__interactor) <NEW_LINE> self.__target = handler.DeleteHardwareTypeHandler(interactor_factory, None) <NEW_LINE> self.__target.session = Mock(session.Session) <NEW_LINE> <DEDENT> def test_is_instance_of_authenticated_handler(self): <NEW_LINE> <INDENT> self.assertIsInstance(self.__target, ah.AuthenticatedHandler) <NEW_LINE> <DEDENT> def test_deletion_successful_returns_json_ok_message(self): <NEW_LINE> <INDENT> assertion = hta.get_params_returns_json_result_value_assertion(self, self.__target) <NEW_LINE> assertion(self.__get_params(), 'ok') <NEW_LINE> <DEDENT> def test_exceptions_return_expected_json_results(self): <NEW_LINE> <INDENT> assertion = hta.get_exceptions_returns_json_result_value_assertion(self, self.__target, self.__interactor) <NEW_LINE> expected_combos = [(hi.HardwareTypeNotFoundException, 'not_found'), (Exception, 'error')] <NEW_LINE> assertion(self.__get_params(), expected_combos) <NEW_LINE> <DEDENT> def __get_params(self): <NEW_LINE> <INDENT> return {"name": "n", "description": "desc"} <NEW_LINE> <DEDENT> def __get_hardware_type(self): <NEW_LINE> <INDENT> return ht.HardwareType.from_dict(self.__get_params())
Unit tests for the DeleteHardwareTypeHandler class
62599058596a89723612907f
class String80Type (pyxb.binding.datatypes.normalizedString): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'String80Type') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location( 'http://www.fatturapa.gov.it/export/fatturazione/sdi/fatturapa/v1.2/' 'Schema_del_file_xml_FatturaPA_versione_1.2.xsd', 1189, 2) <NEW_LINE> _Documentation = None
An atomic simple type.
625990582ae34c7f260ac685
class AbstractTestFunction(): <NEW_LINE> <INDENT> def __call__(self, x): <NEW_LINE> <INDENT> return self.evaluate(x) <NEW_LINE> <DEDENT> def evaluate(self, x): <NEW_LINE> <INDENT> return self._evalfull(x)[0] <NEW_LINE> <DEDENT> def _evalfull(self, x): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getfopt(self): <NEW_LINE> <INDENT> if not hasattr(self, 'iinstance'): <NEW_LINE> <INDENT> raise Exception('This function class has not been instantiated yet.') <NEW_LINE> <DEDENT> return self._fopt <NEW_LINE> <DEDENT> def setfopt(self, fopt): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._fopt = float(fopt) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise Exception('Optimal function value must be cast-able to a float.') <NEW_LINE> <DEDENT> <DEDENT> fopt = property(getfopt, setfopt)
Abstract class for test functions. Defines methods to be implemented in test functions which are to be provided to method setfun of class Logger. In particular, (a) the attribute fopt and (b) the method _evalfull. The _evalfull method returns two values, the possibly noisy value and the noise-free value. The latter is only meant to be for recording purpose.
6259905863b5f9789fe86710
class BarcodeSetMetadataType (DataSetMetadataType): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'BarcodeSetMetadataType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/tmp/tmpHOdumhxsds/PacBioDatasets.xsd', 175, 1) <NEW_LINE> _ElementMap = DataSetMetadataType._ElementMap.copy() <NEW_LINE> _AttributeMap = DataSetMetadataType._AttributeMap.copy() <NEW_LINE> __BarcodeConstruction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'BarcodeConstruction'), 'BarcodeConstruction', '__httppacificbiosciences_comPacBioDatasets_xsd_BarcodeSetMetadataType_httppacificbiosciences_comPacBioDatasets_xsdBarcodeConstruction', False, pyxb.utils.utility.Location('/tmp/tmpHOdumhxsds/PacBioDatasets.xsd', 179, 5), ) <NEW_LINE> BarcodeConstruction = property(__BarcodeConstruction.value, __BarcodeConstruction.set, None, None) <NEW_LINE> _ElementMap.update({ __BarcodeConstruction.name() : __BarcodeConstruction }) <NEW_LINE> _AttributeMap.update({ })
Complex type {http://pacificbiosciences.com/PacBioDatasets.xsd}BarcodeSetMetadataType with content type ELEMENT_ONLY
625990588e71fb1e983bd068
class JaroSimilarity: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def jaroSimilarity(s1,s2): <NEW_LINE> <INDENT> nLen1 = len(s1) <NEW_LINE> nLen2 = len(s2) <NEW_LINE> if nLen1 == 0 or nLen2 == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> nMatch = 0 <NEW_LINE> nTransposition = 0 <NEW_LINE> nMatchRange = max(max(nLen1,nLen2) // 2 - 1, 0) <NEW_LINE> bS2Match = [False]*nLen2 <NEW_LINE> sMatch1 = "" <NEW_LINE> sMatch2 = "" <NEW_LINE> for i in range(0,nLen1): <NEW_LINE> <INDENT> nMinIndex = max(i-nMatchRange,0) <NEW_LINE> nMaxIndex = min(i+nMatchRange,nLen2-1) <NEW_LINE> for j in range(nMinIndex,nMaxIndex+1): <NEW_LINE> <INDENT> if (not bS2Match[j]) and s1[i] == s2[j]: <NEW_LINE> <INDENT> sMatch1 = sMatch1 + s1[i] <NEW_LINE> bS2Match[j] = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> nMatch = len(sMatch1) <NEW_LINE> nS2MatchSize = 0 <NEW_LINE> for i in range(0,nLen2): <NEW_LINE> <INDENT> if bS2Match[i]: <NEW_LINE> <INDENT> sMatch2 += s2[i] <NEW_LINE> nS2MatchSize += 1 <NEW_LINE> if nS2MatchSize == nMatch: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> nIndexMatch = 0 <NEW_LINE> for i in range(0,nMatch): <NEW_LINE> <INDENT> if sMatch1[i] == sMatch2[i]: <NEW_LINE> <INDENT> nIndexMatch += 1 <NEW_LINE> <DEDENT> <DEDENT> nTransposition = (nMatch - nIndexMatch) // 2 <NEW_LINE> jaroSim = (nMatch/nLen1 + nMatch/nLen2 + (nMatch - nTransposition) / nMatch)/3.0 <NEW_LINE> return jaroSim
to calculate the jaro distance between two strings
62599058435de62698e9d3a2
class ExponentialLR(_LRScheduler): <NEW_LINE> <INDENT> def __init__(self, optimizer, init_lr, num_steps, last_epoch=-1): <NEW_LINE> <INDENT> self.init_lr = init_lr <NEW_LINE> self.num_steps = num_steps <NEW_LINE> super(ExponentialLR, self).__init__(optimizer, last_epoch) <NEW_LINE> <DEDENT> def get_lr(self): <NEW_LINE> <INDENT> curr_step = self.last_epoch + 1 <NEW_LINE> r = curr_step / self.num_steps <NEW_LINE> return [self.init_lr * (base_lr / self.init_lr) ** r for base_lr in self.base_lrs]
Exponentially increases the learning rate between two boundaries over a number of iterations. Arguments: optimizer (torch.optim.Optimizer): wrapped optimizer. init_lr (float): the initial learning rate which is the lower boundary of the test. num_steps (int): the number of steps over which the test occurs. last_epoch (int): the index of last epoch. Default: -1.
625990583cc13d1c6d466cde
class BaseCommit(ShellMixin): <NEW_LINE> <INDENT> id = None <NEW_LINE> tree = None <NEW_LINE> parents = [] <NEW_LINE> message = None <NEW_LINE> short_message = None <NEW_LINE> author = None <NEW_LINE> author_email = None <NEW_LINE> committer = None <NEW_LINE> commiter_email = None <NEW_LINE> commit_date = None <NEW_LINE> repo = None <NEW_LINE> def __init__(self, repo, params): <NEW_LINE> <INDENT> self.repo = repo <NEW_LINE> self.__dict__.update(**dict([(str(k), v) for k,v in params.items()])) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return u'<Commit: %s>' % self.id <NEW_LINE> <DEDENT> @property <NEW_LINE> def short_id(self): <NEW_LINE> <INDENT> return (self.id or '')[:7] <NEW_LINE> <DEDENT> @property <NEW_LINE> def short_tree(self): <NEW_LINE> <INDENT> return (self.id or '')[:7] <NEW_LINE> <DEDENT> @property <NEW_LINE> def short_parents(self): <NEW_LINE> <INDENT> return [parent[:7] for parent in self.parents] <NEW_LINE> <DEDENT> def get_archive(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_tree(self, path): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_file(self, path): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def changed_files(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def commit_diff(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def file_diffs(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_file_diff(self, path): <NEW_LINE> <INDENT> raise NotImplementedError
Base Commit class to inherit from.
625990588e7ae83300eea62c
class RangePartitionAssignor(AbstractPartitionAssignor): <NEW_LINE> <INDENT> name = 'range' <NEW_LINE> version = 0 <NEW_LINE> @classmethod <NEW_LINE> def assign(cls, cluster, member_metadata): <NEW_LINE> <INDENT> consumers_per_topic = collections.defaultdict(list) <NEW_LINE> for member, metadata in six.iteritems(member_metadata): <NEW_LINE> <INDENT> for topic in metadata.subscription: <NEW_LINE> <INDENT> consumers_per_topic[topic].append(member) <NEW_LINE> <DEDENT> <DEDENT> assignment = collections.defaultdict(dict) <NEW_LINE> for topic, consumers_for_topic in six.iteritems(consumers_per_topic): <NEW_LINE> <INDENT> partitions = cluster.partitions_for_topic(topic) <NEW_LINE> if partitions is None: <NEW_LINE> <INDENT> log.warning('No partition metadata for topic %s', topic) <NEW_LINE> continue <NEW_LINE> <DEDENT> partitions = sorted(partitions) <NEW_LINE> consumers_for_topic.sort() <NEW_LINE> partitions_per_consumer = len(partitions) // len(consumers_for_topic) <NEW_LINE> consumers_with_extra = len(partitions) % len(consumers_for_topic) <NEW_LINE> for i, member in enumerate(consumers_for_topic): <NEW_LINE> <INDENT> start = partitions_per_consumer * i <NEW_LINE> start += min(i, consumers_with_extra) <NEW_LINE> length = partitions_per_consumer <NEW_LINE> if not i + 1 > consumers_with_extra: <NEW_LINE> <INDENT> length += 1 <NEW_LINE> <DEDENT> assignment[member][topic] = partitions[start:start+length] <NEW_LINE> <DEDENT> <DEDENT> protocol_assignment = {} <NEW_LINE> for member_id in member_metadata: <NEW_LINE> <INDENT> protocol_assignment[member_id] = ConsumerProtocolMemberAssignment( cls.version, sorted(assignment[member_id].items()), b'') <NEW_LINE> <DEDENT> return protocol_assignment <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def metadata(cls, topics): <NEW_LINE> <INDENT> return ConsumerProtocolMemberMetadata(cls.version, list(topics), b'') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def on_assignment(cls, assignment): <NEW_LINE> <INDENT> pass
The range assignor works on a per-topic basis. For each topic, we lay out the available partitions in numeric order and the consumers in lexicographic order. We then divide the number of partitions by the total number of consumers to determine the number of partitions to assign to each consumer. If it does not evenly divide, then the first few consumers will have one extra partition. For example, suppose there are two consumers C0 and C1, two topics t0 and t1, and each topic has 3 partitions, resulting in partitions t0p0, t0p1, t0p2, t1p0, t1p1, and t1p2. The assignment will be: C0: [t0p0, t0p1, t1p0, t1p1] C1: [t0p2, t1p2]
625990581b99ca4002290007
class MySQLDB(object): <NEW_LINE> <INDENT> _instance = None <NEW_LINE> def __new__(cls,*args,**kargv): <NEW_LINE> <INDENT> if not cls._instance: <NEW_LINE> <INDENT> cls._instance = super(MySQLDB, cls).__new__(cls, *args, **kargv) <NEW_LINE> <DEDENT> return cls._instance <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self._db = MySQLdb.connect(host=my_host, user=my_user, passwd=my_passwd, db=my_db) <NEW_LINE> self._cur = self._db.cursor() <NEW_LINE> self._db.set_character_set('utf8') <NEW_LINE> self._cur.execute('SET NAMES utf8;') <NEW_LINE> self._cur.execute('SET CHARACTER SET utf8;') <NEW_LINE> self._cur.execute('SET character_set_connection=utf8;') <NEW_LINE> <DEDENT> def create_table(self): <NEW_LINE> <INDENT> if DROP_OLD_TABLE: <NEW_LINE> <INDENT> drop_tables = "DROP TABLE IF EXISTS {}".format(table_name) <NEW_LINE> self._cur.execute(drop_tables) <NEW_LINE> self._db.commit() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._cur.execute(create_table_sql) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(create_table_sql) <NEW_LINE> print(e) <NEW_LINE> raise e <NEW_LINE> <DEDENT> <DEDENT> def query_info_re(self,colnums = '*',key='id' ,reg_exp='*'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> command = "SELECT {} FROM {} WHERE {} REGEXP '{}'".format(colnums,table_name,key,reg_exp) <NEW_LINE> self._cur.execute(command) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(command) <NEW_LINE> print(e) <NEW_LINE> raise e <NEW_LINE> <DEDENT> return self._cur.fetchall() <NEW_LINE> <DEDENT> def insert_values(self,data): <NEW_LINE> <INDENT> command = fillup_insert_command(data) <NEW_LINE> try: <NEW_LINE> <INDENT> self._cur.execute(command) <NEW_LINE> self._db.commit() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self._db.rollback() <NEW_LINE> print(command) <NEW_LINE> print(e) <NEW_LINE> raise e <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self._db.close()
docstring for MySQLDB
62599058d486a94d0ba2d567
class ClientConnectionJob(object): <NEW_LINE> <INDENT> def __init__(self, clientSocket, clientAddr, daemon): <NEW_LINE> <INDENT> self.csock = socketutil.SocketConnection(clientSocket) <NEW_LINE> self.caddr = clientAddr <NEW_LINE> self.daemon = daemon <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> if self.handleConnection(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.daemon.handleRequest(self.csock) <NEW_LINE> <DEDENT> except (socket.error, errors.ConnectionClosedError): <NEW_LINE> <INDENT> log.debug("disconnected %s", self.caddr) <NEW_LINE> break <NEW_LINE> <DEDENT> except errors.SecurityError: <NEW_LINE> <INDENT> log.debug("security error on client %s", self.caddr) <NEW_LINE> break <NEW_LINE> <DEDENT> except errors.TimeoutError as x: <NEW_LINE> <INDENT> log.warning("error during handleRequest: %s" % x) <NEW_LINE> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> ex_t, ex_v, ex_tb = sys.exc_info() <NEW_LINE> tb = util.formatTraceback(ex_t, ex_v, ex_tb) <NEW_LINE> msg = "error during handleRequest: %s; %s" % (ex_v, "".join(tb)) <NEW_LINE> log.warning(msg) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> with _client_disconnect_lock: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.daemon._clientDisconnect(self.csock) <NEW_LINE> <DEDENT> except Exception as x: <NEW_LINE> <INDENT> log.warning("Error in clientDisconnect: " + str(x)) <NEW_LINE> <DEDENT> <DEDENT> self.csock.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def handleConnection(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.daemon._handshake(self.csock): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> self.csock.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> ex_t, ex_v, ex_tb = sys.exc_info() <NEW_LINE> tb = util.formatTraceback(ex_t, ex_v, ex_tb) <NEW_LINE> log.warning("error during connect/handshake: %s; %s", ex_v, "\n".join(tb)) <NEW_LINE> self.csock.close() <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def denyConnection(self, reason): <NEW_LINE> <INDENT> log.warning("client connection was denied: " + reason) <NEW_LINE> self.daemon._handshake(self.csock, denied_reason=reason) <NEW_LINE> self.csock.close()
Takes care of a single client connection and all requests that may arrive during its life span.
62599058460517430c432b21
class AddPlaceholderInstruction(Instruction): <NEW_LINE> <INDENT> @Overrides(Instruction) <NEW_LINE> def _execute(self): <NEW_LINE> <INDENT> super(AddPlaceholderInstruction, self).add_placeholder()
add placeholder ( PLACEHOLDER | STRING ) Example 1: add placeholder (About, "About|Om")
62599058a79ad1619776b58d
class Attribute(BaseReportElement): <NEW_LINE> <INDENT> def __init__(self, id_, value, name=None): <NEW_LINE> <INDENT> BaseReportElement.__init__(self, id_) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> raise PbReportError("value cannot be None. {n} given.".format(n=value)) <NEW_LINE> <DEDENT> self._value = value <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def _get_attrs_simple(self): <NEW_LINE> <INDENT> return ['value', 'name'] <NEW_LINE> <DEDENT> def _get_attrs_complex_list(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Attribute): <NEW_LINE> <INDENT> if self.name == other.name and self.value == other.value and self.id == other.id: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> _d = dict(k=self.__class__.__name__, i=self.id, v=self.value, n=self.name) <NEW_LINE> return "<{k} id:{i} value:{v} name:{n} >".format(**_d)
An attribute always has an id and a value. A name is optional.
6259905807d97122c4218249
class RBFDivergenceFreeKernelReparametrised(Kernel): <NEW_LINE> <INDENT> def __init__(self, dim, log_length_scale, sigma_var=1): <NEW_LINE> <INDENT> super().__init__(dim, dim) <NEW_LINE> self.log_length_scale = log_length_scale <NEW_LINE> self.sigma_var = sigma_var <NEW_LINE> <DEDENT> def forward(self, X, Y=None, flatten=True): <NEW_LINE> <INDENT> X, Y, upranked = self.uprank_inputs(X, Y) <NEW_LINE> X = X.unsqueeze(-2) <NEW_LINE> Y = Y.unsqueeze(-3) <NEW_LINE> length_scale = torch.exp(self.log_length_scale) <NEW_LINE> diff = X - Y <NEW_LINE> dists = (diff ** 2).sum(dim=-1) <NEW_LINE> K = self.sigma_var * torch.exp(-0.5 * dists / length_scale).unsqueeze( -1 ).unsqueeze(-1) <NEW_LINE> outer_product = diff.unsqueeze(-1) @ diff.unsqueeze(-2) <NEW_LINE> I = torch.eye(self.rkhs_dim).to(X.device) <NEW_LINE> A = (outer_product / length_scale) + ( self.rkhs_dim - 1 - dists / length_scale ).unsqueeze(-1).unsqueeze(-1) * I <NEW_LINE> K = A * K <NEW_LINE> if flatten: <NEW_LINE> <INDENT> K = self.flatten_gram_matrix(K) <NEW_LINE> <DEDENT> if upranked: <NEW_LINE> <INDENT> K = K.squeeze(0) <NEW_LINE> <DEDENT> return K
Based on the kernels defined in equation (24) in "Kernels for Vector-Valued Functions: a Review" by Alvarez et al
62599058097d151d1a2c260b
class VipServerNameList(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.b_key = "vip-server-name-list" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.oper = {} <NEW_LINE> self.vip_name = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
This class does not support CRUD Operations please use parent. :param vip_name: {"description": "Specify a VIP name for the SLB device", "format": "string", "minLength": 1, "oid": "1001", "optional": false, "maxLength": 63, "type": "string"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
625990588e71fb1e983bd069
class DefaultDesignRule(DesignContentRule): <NEW_LINE> <INDENT> grok.implements(interfaces.IDefaultDesignRule)
Default design for a content type.
6259905821bff66bcd724204
class AlphaBetaAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> depth = self.depth <NEW_LINE> move = self.alphaBetaMinimax(gameState, 0, True, 1, float("-inf"), float("inf"))[1] <NEW_LINE> return move <NEW_LINE> <DEDENT> def alphaBetaMinimax(self, gameState, depth, isMaximizing, ghostItr, alpha, beta): <NEW_LINE> <INDENT> numGhosts = gameState.getNumAgents() - 1 <NEW_LINE> if gameState.isWin() or gameState.isLose() or depth == self.depth: <NEW_LINE> <INDENT> return (self.evaluationFunction(gameState),None) <NEW_LINE> <DEDENT> if isMaximizing: <NEW_LINE> <INDENT> bestValue = (float("-inf"), None) <NEW_LINE> for move in gameState.getLegalActions(0): <NEW_LINE> <INDENT> succState = gameState.generateSuccessor(0, move) <NEW_LINE> v = self.alphaBetaMinimax(succState, depth, False, 1, alpha, beta) <NEW_LINE> bestValue = (v[0],move) if v[0] > bestValue[0] else bestValue <NEW_LINE> if v[0] > beta: return v <NEW_LINE> alpha = max(v[0],alpha) <NEW_LINE> <DEDENT> return bestValue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bestValue = (float("inf"), None) <NEW_LINE> for move in gameState.getLegalActions(ghostItr): <NEW_LINE> <INDENT> succState = gameState.generateSuccessor(ghostItr, move) <NEW_LINE> v = self.alphaBetaMinimax(succState, (depth + 1 if ghostItr == numGhosts else depth), (True if ghostItr == numGhosts else False), ghostItr + 1, alpha, beta) <NEW_LINE> bestValue = (v[0],move) if v[0] < bestValue[0] else bestValue <NEW_LINE> if v[0] < alpha: return v <NEW_LINE> beta = min(v[0],beta) <NEW_LINE> <DEDENT> return bestValue
Your minimax agent with alpha-beta pruning (question 3)
625990584428ac0f6e659adb
class NSNitroNserrSslDupSnicertBrklink(NSNitroSsl2Errors): <NEW_LINE> <INDENT> pass
Nitro error code 3637 Some of the existing SNI cert bindings are broken due to presence of certificate with duplicate Common Name.
62599058e76e3b2f99fd9f9f
class HalfAdder(): <NEW_LINE> <INDENT> def __init__(self, *inputs): <NEW_LINE> <INDENT> if len(inputs) is not 2: <NEW_LINE> <INDENT> raise Exception("ERROR: Number of arguments not consistent") <NEW_LINE> <DEDENT> self.inputs = list(inputs[:]) <NEW_LINE> self.S = XOR(self.inputs[0], self.inputs[1]) <NEW_LINE> self.C = AND(self.inputs[0], self.inputs[1]) <NEW_LINE> <DEDENT> def set_input(self, index, value): <NEW_LINE> <INDENT> if index > 1 or index < 0: <NEW_LINE> <INDENT> raise Exception("ERROR: Not a valid index value") <NEW_LINE> <DEDENT> self.inputs[index] = value <NEW_LINE> if index == 0: <NEW_LINE> <INDENT> self.S.setInput(0, self.inputs[0]) <NEW_LINE> self.C.setInput(0, self.inputs[0]) <NEW_LINE> <DEDENT> elif index == 1: <NEW_LINE> <INDENT> self.S.setInput(1, self.inputs[1]) <NEW_LINE> self.C.setInput(1, self.inputs[1]) <NEW_LINE> <DEDENT> <DEDENT> def set_inputs(self, *inputs): <NEW_LINE> <INDENT> self.inputs = list(inputs)[:] <NEW_LINE> self.S.setInputs(*inputs) <NEW_LINE> self.C.setInputs(*inputs) <NEW_LINE> <DEDENT> def set_output(self, index, value): <NEW_LINE> <INDENT> if not isinstance(value, Connector): <NEW_LINE> <INDENT> raise Exception("ERROR: Expecting a Connector Class Object") <NEW_LINE> <DEDENT> if index == 0: <NEW_LINE> <INDENT> self.C.setOutput(value) <NEW_LINE> <DEDENT> elif index == 1: <NEW_LINE> <INDENT> self.S.setOutput(value) <NEW_LINE> <DEDENT> <DEDENT> def output(self): <NEW_LINE> <INDENT> return [self.C.output(), self.S.output()]
This Class implements Half Adder, Arithmetic sum of two bits and return its Sum and Carry Output: [CARRY, SUM] Example: >>> from BinPy import * >>> ha = HalfAdder(0, 1) >>> ha.output() [0, 1]
625990588da39b475be0478a
@attr('UNIT', group='mi') <NEW_LINE> class FLORTTelemeteredGliderTest(GliderParserUnitTestCase): <NEW_LINE> <INDENT> config = { DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.glider', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortTelemeteredDataParticle' } <NEW_LINE> def test_flort_telemetered_particle(self): <NEW_LINE> <INDENT> self.set_data(HEADER3, FLORT_RECORD) <NEW_LINE> self.reset_parser() <NEW_LINE> record_1 = {FlortTelemeteredParticleKey.SCI_FLBBCD_BB_UNITS: 0.000281336, FlortTelemeteredParticleKey.SCI_FLBBCD_CDOM_UNITS: 2.0352, FlortTelemeteredParticleKey.SCI_FLBBCD_CHLOR_UNITS: 0.8349} <NEW_LINE> record_2 = {FlortTelemeteredParticleKey.SCI_FLBBCD_BB_UNITS: 0.000262988, FlortTelemeteredParticleKey.SCI_FLBBCD_CDOM_UNITS: 2.12, FlortTelemeteredParticleKey.SCI_FLBBCD_CHLOR_UNITS: 0.847} <NEW_LINE> self.assert_generate_particle(FlortTelemeteredDataParticle, record_1, 10534) <NEW_LINE> self.assert_generate_particle(FlortTelemeteredDataParticle, record_2, 11977) <NEW_LINE> self.assert_no_more_data() <NEW_LINE> self.set_data(HEADER3, FLORT_RECORD) <NEW_LINE> self.reset_parser({StateKey.POSITION: 10534}) <NEW_LINE> self.assert_generate_particle(FlortTelemeteredDataParticle, record_2, 11977) <NEW_LINE> self.assert_no_more_data()
Test cases for flort glider data
6259905873bcbd0ca4bcb833
class EndpointEnumerationServiceClient(BaseServiceClient): <NEW_LINE> <INDENT> def __init__(self, api_configuration): <NEW_LINE> <INDENT> super(EndpointEnumerationServiceClient, self).__init__(api_configuration) <NEW_LINE> <DEDENT> def get_endpoints(self, **kwargs): <NEW_LINE> <INDENT> operation_name = "get_endpoints" <NEW_LINE> params = locals() <NEW_LINE> for key, val in six.iteritems(params['kwargs']): <NEW_LINE> <INDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> resource_path = '/v1/endpoints/' <NEW_LINE> resource_path = resource_path.replace('{format}', 'json') <NEW_LINE> path_params = {} <NEW_LINE> query_params = [] <NEW_LINE> header_params = [] <NEW_LINE> body_params = None <NEW_LINE> header_params.append(('Content-type', 'application/json')) <NEW_LINE> full_response = False <NEW_LINE> if 'full_response' in params: <NEW_LINE> <INDENT> full_response = params['full_response'] <NEW_LINE> <DEDENT> authorization_value = "Bearer " + self._authorization_value <NEW_LINE> header_params.append(("Authorization", authorization_value)) <NEW_LINE> error_definitions = [] <NEW_LINE> error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.endpoint_enumeration.endpoint_enumeration_response.EndpointEnumerationResponse", status_code=200, message="Successfully retrieved the list of connected endpoints.")) <NEW_LINE> error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.endpoint_enumeration.error.Error", status_code=400, message="Bad request. Returned when a required parameter is not present or badly formatted.")) <NEW_LINE> error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.endpoint_enumeration.error.Error", status_code=401, message="Unauthenticated. Returned when the request is not authenticated.")) <NEW_LINE> error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.endpoint_enumeration.error.Error", status_code=403, message="Forbidden. Returned when the request is authenticated but does not have sufficient permission.")) <NEW_LINE> error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.endpoint_enumeration.error.Error", status_code=500, message="Server Error. Returned when the server encountered an error processing the request.")) <NEW_LINE> error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.endpoint_enumeration.error.Error", status_code=503, message="Service Unavailable. Returned when the server is not ready to handle the request.")) <NEW_LINE> error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.endpoint_enumeration.error.Error", status_code=0, message="Unexpected error")) <NEW_LINE> api_response = self.invoke( method="GET", endpoint=self._api_endpoint, path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, response_definitions=error_definitions, response_type="ask_sdk_model.services.endpoint_enumeration.endpoint_enumeration_response.EndpointEnumerationResponse") <NEW_LINE> if full_response: <NEW_LINE> <INDENT> return api_response <NEW_LINE> <DEDENT> return api_response.body
ServiceClient for calling the EndpointEnumerationService APIs. :param api_configuration: Instance of ApiConfiguration :type api_configuration: ask_sdk_model.services.api_configuration.ApiConfiguration
625990584e4d5625663739a8
class Scheduling(messages.Message): <NEW_LINE> <INDENT> class OnHostMaintenanceValueValuesEnum(messages.Enum): <NEW_LINE> <INDENT> MIGRATE = 0 <NEW_LINE> TERMINATE = 1 <NEW_LINE> <DEDENT> automaticRestart = messages.BooleanField(1) <NEW_LINE> onHostMaintenance = messages.EnumField('OnHostMaintenanceValueValuesEnum', 2) <NEW_LINE> preemptible = messages.BooleanField(3)
Sets the scheduling options for an Instance. Enums: OnHostMaintenanceValueValuesEnum: Defines the maintenance behavior for this instance. The default behavior is MIGRATE. For more information, see Setting maintenance behavior. Fields: automaticRestart: Specifies whether the instance should be automatically restarted if it is terminated by Compute Engine (not terminated by a user). onHostMaintenance: Defines the maintenance behavior for this instance. The default behavior is MIGRATE. For more information, see Setting maintenance behavior. preemptible: Whether the Instance is preemptible.
62599058ac7a0e7691f73a81
@global_scope <NEW_LINE> class ProviderFirewallRule(BASE, NovaBase): <NEW_LINE> <INDENT> __tablename__ = 'provider_fw_rules' <NEW_LINE> __table_args__ = () <NEW_LINE> id = Column(Integer, primary_key=True, nullable=False) <NEW_LINE> protocol = Column(String(5)) <NEW_LINE> from_port = Column(Integer) <NEW_LINE> to_port = Column(Integer) <NEW_LINE> cidr = Column(MediumText())
Represents a rule in a security group.
62599058b5575c28eb71379c
class BagDataCollatePretrain(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __call__(self, batch): <NEW_LINE> <INDENT> imgs_basic1, imgs_basic2, imgs_aux, anns = batch <NEW_LINE> return imgs_basic1, imgs_basic2, imgs_aux, anns
collect bag data on pretrain stage
625990586e29344779b01bec
class CursesWindow(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen = curses.initscr() <NEW_LINE> curses.start_color() <NEW_LINE> curses.use_default_colors() <NEW_LINE> curses.curs_set(0) <NEW_LINE> self.use_black_text() <NEW_LINE> self.shape = (curses.COLS - 1, curses.LINES - 1) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> curses.nocbreak() <NEW_LINE> self.screen.keypad(0) <NEW_LINE> curses.echo() <NEW_LINE> curses.endwin() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exception_type, exception, traceback): <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> def use_black_text(self): <NEW_LINE> <INDENT> black_foreground = 0 <NEW_LINE> for color in range(curses.COLORS): <NEW_LINE> <INDENT> curses.init_pair(color, black_foreground, color) <NEW_LINE> <DEDENT> <DEDENT> def draw(self, curses_frame): <NEW_LINE> <INDENT> nrows, ncols = self.shape[1], self.shape[0] <NEW_LINE> for row in range(nrows): <NEW_LINE> <INDENT> for col in range(ncols): <NEW_LINE> <INDENT> self.screen.addch(row, col, curses_frame.characters[row][col], curses.color_pair(curses_frame.colors[row][col])) <NEW_LINE> <DEDENT> <DEDENT> self.screen.refresh()
An interface for drawing to curses
62599058379a373c97d9a5c5
class Caster: <NEW_LINE> <INDENT> def __init__(self, target: Type[Any]): <NEW_LINE> <INDENT> self.dest: Type[Any] <NEW_LINE> self.args: List[Caster] <NEW_LINE> if hasattr(target, "__origin__"): <NEW_LINE> <INDENT> self.dest = target.__origin__ <NEW_LINE> args = target.__args__ <NEW_LINE> self.args = ( [Caster(args[0])] if Ellipsis in args else [Caster(arg) for arg in args] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dest = target <NEW_LINE> self.args = [] <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, value: Any) -> Any: <NEW_LINE> <INDENT> if isinstance(value, bytes): <NEW_LINE> <INDENT> value = value.decode() <NEW_LINE> <DEDENT> if self.dest is dict: <NEW_LINE> <INDENT> return { self.args[0](key): self.args[1](val) for key, val in dict(value).items() } <NEW_LINE> <DEDENT> if self.dest is list: <NEW_LINE> <INDENT> return [self.args[0](val) for val in value] <NEW_LINE> <DEDENT> if self.dest is tuple: <NEW_LINE> <INDENT> if len(self.args) == 1: <NEW_LINE> <INDENT> return tuple([self.args[0](val) for val in value]) <NEW_LINE> <DEDENT> return tuple([conv(val) for conv, val in zip(self.args, value)]) <NEW_LINE> <DEDENT> return self.dest(value) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> args = ( "" if len(self.args) == 0 else " , ".join([str(arg) for arg in self.args]) ) <NEW_LINE> return f"TO{self.dest}[{args}]"
Generic type-caster generator.
625990584a966d76dd5f0492
class TestViewsFailureCondition(BaseTest): <NEW_LINE> <INDENT> def test_individual_blog_post_route_404_wrong_id(self): <NEW_LINE> <INDENT> response = self.testapp.get('/blog/2', status=404) <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> <DEDENT> def test_api_individual_blog_posts_404_wrong_id(self): <NEW_LINE> <INDENT> response = self.testapp.get('/api/posts/2', status=404) <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> <DEDENT> def test_cannot_create_without_login(self): <NEW_LINE> <INDENT> response = self.testapp.post('/blog/create', status=403) <NEW_LINE> self.assertEqual(response.status_code, 403) <NEW_LINE> <DEDENT> def test_cannot_delete_without_login(self): <NEW_LINE> <INDENT> response = self.testapp.post('/blog/1/delete', status=403) <NEW_LINE> self.assertEqual(response.status_code, 403) <NEW_LINE> <DEDENT> def test_cannot_edit_without_login(self): <NEW_LINE> <INDENT> response = self.testapp.post('/blog/1/edit', status=403) <NEW_LINE> self.assertEqual(response.status_code, 403)
Test Views Failure Condition.
6259905829b78933be26ab95
class Raumbuch_elektro(Verantwortung): <NEW_LINE> <INDENT> wohnung = models.OneToOneField(Wohnung, on_delete=models.CASCADE, null=True, blank=True) <NEW_LINE> bad = models.TextField(null=True, blank=True) <NEW_LINE> kueche = models.TextField(null=True, blank=True) <NEW_LINE> flur = models.TextField(null=True, blank=True) <NEW_LINE> wohnzimmer = models.TextField(null=True, blank=True) <NEW_LINE> gaeste_wc = models.TextField(null=True, blank=True) <NEW_LINE> schlafzimmer = models.TextField(null=True, blank=True) <NEW_LINE> kinderzimmer = models.TextField(null=True, blank=True) <NEW_LINE> abstellraum = models.TextField(null=True, blank=True) <NEW_LINE> schalterprogramm = models.TextField(null=True, blank=True) <NEW_LINE> sonstiges = models.TextField(null=True, blank=True)
electric fitting per room ForeignKey of wohnung
62599058e5267d203ee6ce90
class IRestrictKeywords(Interface): <NEW_LINE> <INDENT> pass
Marker interface for restricting the keyword vocabulary
62599058009cb60464d02ad6
class RequestHistory(ModelSQL, ModelView): <NEW_LINE> <INDENT> __name__ = 'res.request.history' <NEW_LINE> name = fields.Char('Summary', required=True, readonly=True) <NEW_LINE> request = fields.Many2One('res.request', 'Request', required=True, ondelete='CASCADE', select=True, readonly=True) <NEW_LINE> act_from = fields.Many2One('res.user', 'From', required=True, readonly=True) <NEW_LINE> act_to = fields.Many2One('res.user', 'To', required=True, readonly=True) <NEW_LINE> body = fields.Text('Body', readonly=True) <NEW_LINE> date_sent = fields.DateTime('Date sent', required=True, readonly=True) <NEW_LINE> state = fields.Selection(_STATES, 'State', required=True, readonly=True) <NEW_LINE> subject = fields.Char('Subject', required=True, readonly=True) <NEW_LINE> number_references = fields.Integer('References', readonly=True) <NEW_LINE> priority = fields.Selection(_PRIORITIES, 'Priority', required=True, readonly=True) <NEW_LINE> @classmethod <NEW_LINE> def __setup__(cls): <NEW_LINE> <INDENT> super(RequestHistory, cls).__setup__() <NEW_LINE> cls._order.insert(0, ('date_sent', 'DESC')) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_name(): <NEW_LINE> <INDENT> return 'No Name' <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_act_from(): <NEW_LINE> <INDENT> return int(Transaction().user) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_act_to(): <NEW_LINE> <INDENT> return int(Transaction().user) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_date_sent(): <NEW_LINE> <INDENT> return datetime.datetime.now() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def write(records, vals): <NEW_LINE> <INDENT> pass
Request history
625990589c8ee82313040c5b
class NoSolutionError(Exception): <NEW_LINE> <INDENT> pass
Return this error if no solution exists to some set of x, y inputs
6259905863d6d428bbee3d58
class ConfigurationSettings(SearchCommand.ConfigurationSettings): <NEW_LINE> <INDENT> @property <NEW_LINE> def clear_required_fields(self): <NEW_LINE> <INDENT> return type(self)._clear_required_fields <NEW_LINE> <DEDENT> _clear_required_fields = True <NEW_LINE> @property <NEW_LINE> def requires_preop(self): <NEW_LINE> <INDENT> return type(self)._requires_preop <NEW_LINE> <DEDENT> _requires_preop = False <NEW_LINE> @property <NEW_LINE> def retainsevents(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def streaming(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def streaming_preop(self): <NEW_LINE> <INDENT> command = type(self.command) <NEW_LINE> if command.map == ReportingCommand.map: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> command_line = str(self.command) <NEW_LINE> command_name = type(self.command).name <NEW_LINE> text = ' '.join([ command_name, '__map__', command_line[len(command_name) + 1:]]) <NEW_LINE> return text <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def fix_up(cls, command): <NEW_LINE> <INDENT> if not issubclass(command, ReportingCommand): <NEW_LINE> <INDENT> raise TypeError('%s is not a ReportingCommand' % command) <NEW_LINE> <DEDENT> if command.reduce == ReportingCommand.reduce: <NEW_LINE> <INDENT> raise AttributeError('No ReportingCommand.reduce override') <NEW_LINE> <DEDENT> if command.map == ReportingCommand.map: <NEW_LINE> <INDENT> cls._requires_preop = False <NEW_LINE> return <NEW_LINE> <DEDENT> f = vars(command)['map'] <NEW_LINE> try: <NEW_LINE> <INDENT> settings = f._settings <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> f.ConfigurationSettings = StreamingCommand.ConfigurationSettings <NEW_LINE> return <NEW_LINE> <DEDENT> module = '.'.join([command.__module__, command.__name__, 'map']) <NEW_LINE> name = 'ConfigurationSettings' <NEW_LINE> bases = (StreamingCommand.ConfigurationSettings,) <NEW_LINE> f.ConfigurationSettings = ConfigurationSettingsType( module, name, bases, settings) <NEW_LINE> del f._settings <NEW_LINE> return
Represents the configuration settings for a :code:`ReportingCommand`.
62599058097d151d1a2c260d
class TripletSelector: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_triplets(self, embeddings, labels): <NEW_LINE> <INDENT> raise NotImplementedError
Implementation should return indices of anchors, positive and negative samples return np array of shape [N_triplets x 3]
625990583539df3088ecd83e
class ListMembersCommand(LoadBalancerBalancerListCommand): <NEW_LINE> <INDENT> log = logging.getLogger(__name__) <NEW_LINE> def take_action(self, parsed_args): <NEW_LINE> <INDENT> client = get_client(parsed_args) <NEW_LINE> balancer = Placeholder(parsed_args.balancer_id) <NEW_LINE> members = client.balancer_list_members(balancer=balancer) <NEW_LINE> members = [Member(m) for m in members] <NEW_LINE> collection = Collection(members) <NEW_LINE> return collection.generate_output()
Output a list of the members attached to the provided loadbalancer.
62599058596a897236129081
class FileBotToolProcessingErrorEvent(DelugeEvent): <NEW_LINE> <INDENT> def __init__(self, torrent_id, handler_name, error): <NEW_LINE> <INDENT> if error: <NEW_LINE> <INDENT> if isinstance(error, Exception): <NEW_LINE> <INDENT> msg = "Exception {0} encountered during processing:\n {1}" <NEW_LINE> msg = msg.format(error.__class__.__name__, traceback.format_exc()) <NEW_LINE> error = msg <NEW_LINE> <DEDENT> elif not isinstance(error, str): <NEW_LINE> <INDENT> error = str(error) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> error = '' <NEW_LINE> <DEDENT> self._args = [torrent_id, handler_name, error]
emitted when a torrent FileBotTool was processing errored out
625990583c8af77a43b68a11
class Dispatcher(collections.MutableMapping): <NEW_LINE> <INDENT> def __init__(self, prototype=None): <NEW_LINE> <INDENT> self.method_map = dict() <NEW_LINE> if prototype is not None: <NEW_LINE> <INDENT> self.build_method_map(prototype) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.method_map[key] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.method_map[key] = value <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self.method_map[key] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.method_map) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.method_map) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return repr(self.method_map) <NEW_LINE> <DEDENT> def add_class(self, cls): <NEW_LINE> <INDENT> prefix = cls.__name__.lower() + '.' <NEW_LINE> self.build_method_map(cls(), prefix) <NEW_LINE> <DEDENT> def add_object(self, obj): <NEW_LINE> <INDENT> prefix = obj.__class__.__name__.lower() + '.' <NEW_LINE> self.build_method_map(obj, prefix) <NEW_LINE> <DEDENT> def add_dict(self, dict, prefix=''): <NEW_LINE> <INDENT> if prefix: <NEW_LINE> <INDENT> prefix += '.' <NEW_LINE> <DEDENT> self.build_method_map(dict, prefix) <NEW_LINE> <DEDENT> def add_method(self, f, name=None): <NEW_LINE> <INDENT> self.method_map[name or f.__name__] = f <NEW_LINE> return f <NEW_LINE> <DEDENT> def build_method_map(self, prototype, prefix=''): <NEW_LINE> <INDENT> if not isinstance(prototype, dict): <NEW_LINE> <INDENT> prototype = dict((method, getattr(prototype, method)) for method in dir(prototype) if not method.startswith('_')) <NEW_LINE> <DEDENT> for attr, method in prototype.items(): <NEW_LINE> <INDENT> if callable(method): <NEW_LINE> <INDENT> self[prefix + attr] = method
Dictionary like object which maps method_name to method.
62599058cc0a2c111447c58e
class TestDebugApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = isi_sdk_8_2_2.api.debug_api.DebugApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete_debug_stats(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_debug_stats(self): <NEW_LINE> <INDENT> pass
DebugApi unit test stubs
62599058e76e3b2f99fd9fa1
class SingleGEANTSimulation(GroundParticlesGEANT4Simulation): <NEW_LINE> <INDENT> def __init__(self, progress): <NEW_LINE> <INDENT> self.progress = progress <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def singleGEANTsim(self, particletype, particleenergy, xdetcoord, ydetcoord, px, py, pz): <NEW_LINE> <INDENT> arrived_photons_per_particle = [] <NEW_LINE> arrived_photons_per_particle_muon = [] <NEW_LINE> arrived_photons_per_particle_electron = [] <NEW_LINE> arrived_photons_per_particle_gamma = [] <NEW_LINE> n_muons = 0 <NEW_LINE> n_electrons = 0 <NEW_LINE> n_gammas = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> output = subprocess.check_output([ "/user/kaspervd/Documents/repositories/diamond/20170117_geant4_simulation/HiSPARC-stbc-build/./skibox", "1", particletype, "{}".format(particleenergy), "{}".format(xdetcoord), "{}".format(ydetcoord), "-99889", "{}".format(px), "{}".format(py), "{}".format(pz)]) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("./skibox", "1", particletype, "{}".format(particleenergy), "{}".format(xdetcoord), "{}".format(ydetcoord), "-99889", "{}".format(px), "{}".format(py), "{}".format(pz)) <NEW_LINE> <DEDENT> geantfile = np.genfromtxt("RUN_1/outpSD.csv", delimiter=",") <NEW_LINE> try: <NEW_LINE> <INDENT> photontimes = geantfile[1:, 0] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> photontimes = np.array([]) <NEW_LINE> <DEDENT> shutil.rmtree("RUN_1") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> photontimes = np.array([]) <NEW_LINE> <DEDENT> arrived_photons_per_particle = np.append(arrived_photons_per_particle, photontimes) <NEW_LINE> all_particles_trace = self._simulate_PMT(arrived_photons_per_particle) <NEW_LINE> all_particles_trace[all_particles_trace < -MAX_VOLTAGE] = -MAX_VOLTAGE <NEW_LINE> return all_particles_trace
This class inherits from GroundParticlesGEANT4Simulation in order to use the _simulate_pmt function
6259905873bcbd0ca4bcb835
class AudioConfig(): <NEW_LINE> <INDENT> def __init__(self, use_default_microphone: bool = False, filename: OptionalStr = None, stream: Optional[AudioInputStream] = None, device_name: OptionalStr = None): <NEW_LINE> <INDENT> if not isinstance(use_default_microphone, bool): <NEW_LINE> <INDENT> raise ValueError('use_default_microphone must be a bool, is "{}"'.format( use_default_microphone)) <NEW_LINE> <DEDENT> if use_default_microphone: <NEW_LINE> <INDENT> if filename is None and stream is None and device_name is None: <NEW_LINE> <INDENT> self._impl = impl.AudioConfig._from_default_microphone_input() <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('default microphone can not be combined with any other options') <NEW_LINE> <DEDENT> <DEDENT> if sum(x is not None for x in (filename, stream, device_name)) > 1: <NEW_LINE> <INDENT> raise ValueError('only one of filename, stream, and device_name can be given') <NEW_LINE> <DEDENT> if filename is not None: <NEW_LINE> <INDENT> self._impl = impl.AudioConfig._from_wav_file_input(filename) <NEW_LINE> return <NEW_LINE> <DEDENT> if stream is not None: <NEW_LINE> <INDENT> self._impl = impl.AudioConfig._from_stream_input(stream._impl) <NEW_LINE> return <NEW_LINE> <DEDENT> if device_name is not None: <NEW_LINE> <INDENT> self._impl = impl.AudioConfig._from_microphone_input(device_name) <NEW_LINE> return <NEW_LINE> <DEDENT> raise ValueError('cannot construct AudioConfig with the given arguments')
Represents specific audio configuration, such as microphone, file, or custom audio streams Generates an audio configuration for the various recognizers. Only one argument can be passed at a time. :param use_default_microphone: Specifies to use the default system microphone for audio input. :param device_name: Specifies the id of the audio device to use. Please refer to `this page <https://aka.ms/csspeech/microphone-selection>`_ on how to retrieve platform-specific microphone names. This functionality was added in version 1.3.0. :param filename: Specifies an audio input file. Currently, only WAV / PCM with 16-bit samples, 16 kHz sample rate, and a single channel (Mono) is supported. :param stream: Creates an AudioConfig object representing the specified stream.
625990582ae34c7f260ac689
class BaseModel(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_at = models.DateTimeField(auto_now=True) <NEW_LINE> DATE_FORMAT = '{0:%m/%d/%Y, %-I:%M %p}' <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> from django.utils import timezone <NEW_LINE> if not self.id: <NEW_LINE> <INDENT> self.created_at = timezone.now() <NEW_LINE> <DEDENT> self.updated_at = timezone.now() <NEW_LINE> return super(BaseModel, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def created(self): <NEW_LINE> <INDENT> return self.DATE_FORMAT.format(self.created_at) <NEW_LINE> <DEDENT> @property <NEW_LINE> def updated(self): <NEW_LINE> <INDENT> return self.DATE_FORMAT.format(self.updated_at) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> abstract = True
BaseModel Base model that handles ids, uuid, and both the created_at and updated_at fields. Add methods here that all models in the project will share. This is an abstract model, all models in the project should extend it.
6259905807f4c71912bb09dd
class Genre(models.Model): <NEW_LINE> <INDENT> name = models.CharField('Genre', max_length=100) <NEW_LINE> description = models.CharField('Description', max_length=255) <NEW_LINE> url = models.SlugField(max_length=100, unique=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Genre' <NEW_LINE> verbose_name_plural = 'Genres'
genre
625990583539df3088ecd83f
class AllRange(EkteloMatrix): <NEW_LINE> <INDENT> def __init__(self, n, dtype=np.float64): <NEW_LINE> <INDENT> self.n = n <NEW_LINE> self.shape = ((n*(n+1) // 2), n) <NEW_LINE> self.dtype = dtype <NEW_LINE> self._prefix = Prefix(n, dtype) <NEW_LINE> <DEDENT> def _matmat(self, V): <NEW_LINE> <INDENT> m = self.shape[0] <NEW_LINE> n = V.shape[1] <NEW_LINE> ans = np.vstack([np.zeros(n), self._prefix.dot(V)]) <NEW_LINE> res = np.zeros((m, n)) <NEW_LINE> for i, (a, b) in enumerate(itertools.combinations(range(self.n+1), 2)): <NEW_LINE> <INDENT> res[i] = ans[b] - ans[a] <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> @property <NEW_LINE> def matrix(self): <NEW_LINE> <INDENT> return self.dot(np.eye(self.n)) <NEW_LINE> <DEDENT> def gram(self): <NEW_LINE> <INDENT> r = np.arange(self.n) + 1 <NEW_LINE> X = np.outer(r, r[::-1]) <NEW_LINE> return EkteloMatrix(np.minimum(X, X.T))
The AllRange workload encodes range queries of the form [i,j] for 0 <= i <= j <= n-1
6259905816aa5153ce401a86
class ZoozRequestBase(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.requests = requests.Session() <NEW_LINE> <DEDENT> @property <NEW_LINE> def get_url(self): <NEW_LINE> <INDENT> global ZOOZ_SANDBOX <NEW_LINE> global ZOOZ_URLS <NEW_LINE> if ZOOZ_SANDBOX: <NEW_LINE> <INDENT> return ZOOZ_URLS['sandbox'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ZOOZ_URLS['production'] <NEW_LINE> <DEDENT> <DEDENT> @backoff_retry(retries=5) <NEW_LINE> def post(self, url, payload, headers): <NEW_LINE> <INDENT> return self.requests.post(url, data=payload, headers=headers) <NEW_LINE> <DEDENT> def _parse_response_nvp(self, response): <NEW_LINE> <INDENT> response_dict = urlparse.parse_qs(response, keep_blank_values=True) <NEW_LINE> return {k: v[0] for (k, v) in response_dict.items()}
Base client for the Zooz API
625990588e7ae83300eea62f
class TestModel_WorkspaceActivityTemplate(): <NEW_LINE> <INDENT> def test_workspace_activity_template_serialization(self): <NEW_LINE> <INDENT> log_summary_model = {} <NEW_LINE> log_summary_model['activity_status'] = 'testString' <NEW_LINE> log_summary_model['detected_template_type'] = 'testString' <NEW_LINE> log_summary_model['discarded_files'] = 26 <NEW_LINE> log_summary_model['error'] = 'testString' <NEW_LINE> log_summary_model['resources_added'] = 26 <NEW_LINE> log_summary_model['resources_destroyed'] = 26 <NEW_LINE> log_summary_model['resources_modified'] = 26 <NEW_LINE> log_summary_model['scanned_files'] = 26 <NEW_LINE> log_summary_model['template_variable_count'] = 26 <NEW_LINE> log_summary_model['time_taken'] = 72.5 <NEW_LINE> workspace_activity_template_model_json = {} <NEW_LINE> workspace_activity_template_model_json['end_time'] = "2019-01-01T12:00:00Z" <NEW_LINE> workspace_activity_template_model_json['log_summary'] = log_summary_model <NEW_LINE> workspace_activity_template_model_json['log_url'] = 'testString' <NEW_LINE> workspace_activity_template_model_json['message'] = 'testString' <NEW_LINE> workspace_activity_template_model_json['start_time'] = "2019-01-01T12:00:00Z" <NEW_LINE> workspace_activity_template_model_json['status'] = 'testString' <NEW_LINE> workspace_activity_template_model_json['template_id'] = 'testString' <NEW_LINE> workspace_activity_template_model_json['template_type'] = 'testString' <NEW_LINE> workspace_activity_template_model = WorkspaceActivityTemplate.from_dict(workspace_activity_template_model_json) <NEW_LINE> assert workspace_activity_template_model != False <NEW_LINE> workspace_activity_template_model_dict = WorkspaceActivityTemplate.from_dict(workspace_activity_template_model_json).__dict__ <NEW_LINE> workspace_activity_template_model2 = WorkspaceActivityTemplate(**workspace_activity_template_model_dict) <NEW_LINE> assert workspace_activity_template_model == workspace_activity_template_model2 <NEW_LINE> workspace_activity_template_model_json2 = workspace_activity_template_model.to_dict() <NEW_LINE> assert workspace_activity_template_model_json2 == workspace_activity_template_model_json
Test Class for WorkspaceActivityTemplate
6259905823849d37ff852668