code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class AutoscalerZoneOperationsGetRequest(messages.Message): <NEW_LINE> <INDENT> operation = messages.StringField(1, required=True) <NEW_LINE> project = messages.StringField(2, required=True) <NEW_LINE> zone = messages.StringField(3, required=True) | A AutoscalerZoneOperationsGetRequest object.
Fields:
operation: A string attribute.
project: A string attribute.
zone: A string attribute. | 6259906c442bda511e95d96e |
class _BaseMonotonicAttentionMechanism(_BaseAttentionMechanism): <NEW_LINE> <INDENT> def initial_alignments(self, batch_size, dtype): <NEW_LINE> <INDENT> max_time = self._alignments_size <NEW_LINE> return array_ops.one_hot( array_ops.zeros((batch_size,), dtype=dtypes.int32), max_time, dtype=dtype) | Base attention mechanism for monotonic attention.
Simply overrides the initial_alignments function to provide a dirac
distribution,which is needed in order for the monotonic attention
distributions to have the correct behavior. | 6259906c4527f215b58eb5b6 |
class GetWorkflowResult(object): <NEW_LINE> <INDENT> def __init__(__self__, access_endpoint=None, location=None, parameters=None, tags=None, workflow_schema=None, workflow_version=None, id=None): <NEW_LINE> <INDENT> if access_endpoint and not isinstance(access_endpoint, str): <NEW_LINE> <INDENT> raise TypeError('Expected argument access_endpoint to be a str') <NEW_LINE> <DEDENT> __self__.access_endpoint = access_endpoint <NEW_LINE> if location and not isinstance(location, str): <NEW_LINE> <INDENT> raise TypeError('Expected argument location to be a str') <NEW_LINE> <DEDENT> __self__.location = location <NEW_LINE> if parameters and not isinstance(parameters, dict): <NEW_LINE> <INDENT> raise TypeError('Expected argument parameters to be a dict') <NEW_LINE> <DEDENT> __self__.parameters = parameters <NEW_LINE> if tags and not isinstance(tags, dict): <NEW_LINE> <INDENT> raise TypeError('Expected argument tags to be a dict') <NEW_LINE> <DEDENT> __self__.tags = tags <NEW_LINE> if workflow_schema and not isinstance(workflow_schema, str): <NEW_LINE> <INDENT> raise TypeError('Expected argument workflow_schema to be a str') <NEW_LINE> <DEDENT> __self__.workflow_schema = workflow_schema <NEW_LINE> if workflow_version and not isinstance(workflow_version, str): <NEW_LINE> <INDENT> raise TypeError('Expected argument workflow_version to be a str') <NEW_LINE> <DEDENT> __self__.workflow_version = workflow_version <NEW_LINE> if id and not isinstance(id, str): <NEW_LINE> <INDENT> raise TypeError('Expected argument id to be a str') <NEW_LINE> <DEDENT> __self__.id = id | A collection of values returned by getWorkflow. | 6259906cdd821e528d6da597 |
class PeekingIterator(object): <NEW_LINE> <INDENT> def __init__(self, iterator): <NEW_LINE> <INDENT> self.iter = iterator <NEW_LINE> self.temp = self.iter.next() if self.iter.hasNext() else None <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> return self.temp <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> ret = self.temp <NEW_LINE> self.temp = self.iter.next() if self.iter.hasNext() else None <NEW_LINE> return ret <NEW_LINE> <DEDENT> def hasNext(self): <NEW_LINE> <INDENT> return self.temp is not None | This solution caches the next element
This problem can also be solved by instantiating a queue | 6259906c379a373c97d9a84c |
class SubmissionParticipant(db.Model): <NEW_LINE> <INDENT> __tablename__ = "submissionparticipant" <NEW_LINE> id = db.Column(db.Integer, primary_key=True, nullable=False, autoincrement=True) <NEW_LINE> publication_recid = db.Column(db.Integer) <NEW_LINE> full_name = db.Column(db.String(128)) <NEW_LINE> email = db.Column(db.String(128)) <NEW_LINE> affiliation = db.Column(db.String(128)) <NEW_LINE> invitation_cookie = db.Column(UUIDType, default=uuid.uuid4) <NEW_LINE> user_account = db.Column(db.Integer, db.ForeignKey(User.id)) <NEW_LINE> role = db.Column(db.String(32), default='') <NEW_LINE> status = db.Column(db.String(32), default='reserve') <NEW_LINE> action_date = db.Column(db.DateTime, default=datetime.utcnow, nullable=True, index=True) | This table stores information about the reviewers and
uploaders of a HEPData submission. | 6259906c8a43f66fc4bf39c0 |
class IpDetails(ACLMixin, generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> authentication_classes = (authentication.SessionAuthentication,) <NEW_LINE> permission_classes = (IsOwnerOrReadOnly,) <NEW_LINE> queryset = Ip.objects.all() <NEW_LINE> serializer_class = IpDetailSerializer | Retrieve details of specified ip address.
### DELETE
Delete specified ip address. Must be authenticated as owner or admin.
### PUT & PATCH
Edit ip address. Must be authenticated as owner or admin. | 6259906c97e22403b383c73a |
class ResourcePresetServiceServicer(object): <NEW_LINE> <INDENT> def Get(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def List(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') | Missing associated documentation comment in .proto file. | 6259906ca219f33f346c8035 |
@implementer(IFlockerAPIV1Client) <NEW_LINE> class FakeFlockerClient(object): <NEW_LINE> <INDENT> _NOW = datetime.fromtimestamp(0, UTC) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._configured_datasets = pmap() <NEW_LINE> self._leases = LeasesModel() <NEW_LINE> self.synchronize_state() <NEW_LINE> <DEDENT> def create_dataset(self, primary, maximum_size=None, dataset_id=None, metadata=pmap()): <NEW_LINE> <INDENT> if dataset_id is None: <NEW_LINE> <INDENT> dataset_id = uuid4() <NEW_LINE> <DEDENT> if dataset_id in self._configured_datasets: <NEW_LINE> <INDENT> return fail(DatasetAlreadyExists()) <NEW_LINE> <DEDENT> result = Dataset(primary=primary, maximum_size=maximum_size, dataset_id=dataset_id, metadata=metadata) <NEW_LINE> self._configured_datasets = self._configured_datasets.set( dataset_id, result) <NEW_LINE> return succeed(result) <NEW_LINE> <DEDENT> def delete_dataset(self, dataset_id): <NEW_LINE> <INDENT> dataset = self._configured_datasets[dataset_id] <NEW_LINE> self._configured_datasets = self._configured_datasets.remove( dataset_id) <NEW_LINE> return succeed(dataset) <NEW_LINE> <DEDENT> def move_dataset(self, primary, dataset_id): <NEW_LINE> <INDENT> self._configured_datasets = self._configured_datasets.transform( [dataset_id, "primary"], primary) <NEW_LINE> return succeed(self._configured_datasets[dataset_id]) <NEW_LINE> <DEDENT> def list_datasets_configuration(self): <NEW_LINE> <INDENT> return succeed(self._configured_datasets.values()) <NEW_LINE> <DEDENT> def list_datasets_state(self): <NEW_LINE> <INDENT> return succeed(self._state_datasets) <NEW_LINE> <DEDENT> def synchronize_state(self): <NEW_LINE> <INDENT> self._state_datasets = [ DatasetState( dataset_id=dataset.dataset_id, primary=dataset.primary, maximum_size=dataset.maximum_size, path=FilePath(b"/flocker").child(bytes(dataset.dataset_id))) for dataset in self._configured_datasets.values()] <NEW_LINE> <DEDENT> def acquire_lease(self, dataset_id, node_uuid, expires): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._leases = self._leases.acquire( self._NOW, dataset_id, node_uuid, expires) <NEW_LINE> <DEDENT> except LeaseError: <NEW_LINE> <INDENT> return fail(LeaseAlreadyHeld()) <NEW_LINE> <DEDENT> return succeed( Lease(dataset_id=dataset_id, node_uuid=node_uuid, expires=expires)) <NEW_LINE> <DEDENT> def release_lease(self, dataset_id): <NEW_LINE> <INDENT> lease = self._leases[dataset_id] <NEW_LINE> self._leases = self._leases.release(dataset_id, lease.node_id) <NEW_LINE> return succeed( Lease(dataset_id=dataset_id, node_uuid=lease.node_id, expires=((lease.expiration - self._NOW).total_seconds() if lease.expiration is not None else None))) <NEW_LINE> <DEDENT> def list_leases(self): <NEW_LINE> <INDENT> return succeed([ Lease(dataset_id=l.dataset_id, node_uuid=l.node_id, expires=((l.expiration - self._NOW).total_seconds() if l.expiration is not None else None)) for l in self._leases.values()]) | Fake in-memory implementation of ``IFlockerAPIV1Client``. | 6259906c7047854f46340be3 |
class PlayerEndpoint(Endpoint): <NEW_LINE> <INDENT> def __init__(self, session_id, elem_id, pipeline_class): <NEW_LINE> <INDENT> super().__init__(session_id, elem_id, pipeline_class) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"PlayerEndpoint ID: {self.elem_id} Session ID: {self.session_id}\n" <NEW_LINE> <DEDENT> def connect(self, sink_elem = None): <NEW_LINE> <INDENT> return super()._connect(sink_elem) <NEW_LINE> <DEDENT> def play(self): <NEW_LINE> <INDENT> params = { "object":self.elem_id, "operation":"play", "sessionId": self.session_id } <NEW_LINE> self.pipeline._invoke(params) <NEW_LINE> <DEDENT> def pause(self): <NEW_LINE> <INDENT> params = { "object":self.elem_id, "operation":"pause", "sessionId": self.session_id } <NEW_LINE> self.pipeline._invoke(params) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> params = { "object":self.elem_id, "operation":"stop", "sessionId": self.session_id } <NEW_LINE> self.pipeline._invoke(params) <NEW_LINE> <DEDENT> def add_event_listener(self, event, callback): <NEW_LINE> <INDENT> super()._add_event_listener(event, callback) | An input endpoint that retrieves content from file system, HTTP URL or RTSP URL and injects it into the media pipeline.
| 6259906c4f6381625f19a0be |
class RasterbucketService(BaseModel): <NEW_LINE> <INDENT> done = models.BooleanField(default=False) <NEW_LINE> owner = models.ForeignKey(User) <NEW_LINE> rasterbucket = models.ForeignKey( Rasterbucket, on_delete=models.CASCADE, related_name='services') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return 'Rasterbucket Service : {}'.format(self.name) | A model of the Rasterbucket service table
Attributes:
done (TYPE): Description
owner (TYPE): Description
rasterbucket (TYPE): Description | 6259906c796e427e5384ffa4 |
class InvalidRelease(Exception): <NEW_LINE> <INDENT> pass | Raised when the stored release is missing data or has an invalid format. | 6259906c097d151d1a2c289d |
class dictGraphFB(dictGraph): <NEW_LINE> <INDENT> def __init__(self,**kwargs): <NEW_LINE> <INDENT> dictGraph.__init__(self,**kwargs) <NEW_LINE> self._inverse=self.dictClass() <NEW_LINE> <DEDENT> __invert__ = classutil.standard_invert <NEW_LINE> def __delitem__(self,node): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fromNodes=self._inverse[node] <NEW_LINE> del self._inverse[node] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in fromNodes: <NEW_LINE> <INDENT> del self[i][node] <NEW_LINE> <DEDENT> <DEDENT> dictGraph.__delitem__(self,node) | Graph that saves both forward and backward edges | 6259906ce1aae11d1e7cf424 |
class insert_carriage_return_after_token_if_it_is_not_followed_by_a_comment_and_not_on_same_line_as_token(structure.Rule): <NEW_LINE> <INDENT> def __init__(self, name, identifier, token, oSameLineToken): <NEW_LINE> <INDENT> structure.Rule.__init__(self, name=name, identifier=identifier) <NEW_LINE> self.token = token <NEW_LINE> self.oSameLineToken = oSameLineToken <NEW_LINE> <DEDENT> def _get_tokens_of_interest(self, oFile): <NEW_LINE> <INDENT> return oFile.get_tokens_bounded_by(self.token, parser.carriage_return, include_trailing_whitespace=True) <NEW_LINE> <DEDENT> def _analyze(self, lToi): <NEW_LINE> <INDENT> for oToi in lToi: <NEW_LINE> <INDENT> lTokens = oToi.get_tokens() <NEW_LINE> if utils.are_next_consecutive_token_types([parser.carriage_return], 1, lTokens): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if utils.are_next_consecutive_token_types([parser.whitespace, parser.comment], 1, lTokens): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if utils.are_next_consecutive_token_types([parser.comment], 1, lTokens): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for oToken in lTokens: <NEW_LINE> <INDENT> if isinstance(oToken, self.oSameLineToken): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.add_violation(violation.New(oToi.get_line_number(), oToi, self.solution)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _fix_violation(self, oViolation): <NEW_LINE> <INDENT> lTokens = oViolation.get_tokens() <NEW_LINE> rules_utils.insert_carriage_return(lTokens, 1) <NEW_LINE> oViolation.set_tokens(lTokens) | Checks function parameters are on their own line except if they are all on the same line.
Parameters
----------
name : string
The group the rule belongs to.
identifier : string
unique identifier. Usually in the form of 00N. | 6259906c76e4537e8c3f0db1 |
class UrlAddressSerializer(ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UrlAddress <NEW_LINE> fields = [ 'original_url', 'short_url' ] | Serializer class | 6259906c4e4d562566373c34 |
class NameValueView(QtGui.QWidget): <NEW_LINE> <INDENT> def __init__(self, orig_table, dummy=None, parent=None): <NEW_LINE> <INDENT> QtGui.QWidget.__init__(self, parent) <NEW_LINE> assert len(orig_table) == 1, len(orig_table) <NEW_LINE> orig_record = orig_table[0] <NEW_LINE> fieldnames = orig_record.__class__.fieldnames <NEW_LINE> self.name = orig_record.__class__.__name__ <NEW_LINE> recs = [NameValue(n, v) for n, v in zip(fieldnames, orig_record)] <NEW_LINE> self.table = Table(NameValue, recs) <NEW_LINE> self.table.attr['readonly_column'] = 0 <NEW_LINE> self.tableModel = CustomTableModel(self.table, None) <NEW_LINE> self.tableView = QtGui.QTableView(self) <NEW_LINE> self.setupUi() <NEW_LINE> <DEDENT> def current_record(self): <NEW_LINE> <INDENT> indexes = self.tableView.selectedIndexes() <NEW_LINE> if not indexes: <NEW_LINE> <INDENT> raise NoRecordSelected <NEW_LINE> <DEDENT> row_idx = indexes[-1].row() <NEW_LINE> return self.tableModel.table[row_idx] <NEW_LINE> <DEDENT> def current_selection(self): <NEW_LINE> <INDENT> row_ids = set(item.row() for item in self.tableView.selectedIndexes()) <NEW_LINE> sel = [] <NEW_LINE> for row_id in row_ids: <NEW_LINE> <INDENT> row = self.table[row_id] <NEW_LINE> sel.append('\t'.join(row)) <NEW_LINE> <DEDENT> return '\n'.join(sel) <NEW_LINE> <DEDENT> def setupUi(self): <NEW_LINE> <INDENT> self.tableView.setModel(self.tableModel) <NEW_LINE> self.tableView.horizontalHeader().setStretchLastSection(True) <NEW_LINE> self.tableView.horizontalHeader().setResizeMode( QtGui.QHeaderView.ResizeToContents) <NEW_LINE> self.tableView.setMinimumSize(420, 270) <NEW_LINE> self.tableView.setSelectionBehavior( QtGui.QAbstractItemView.SelectRows) <NEW_LINE> self.tableView.setAlternatingRowColors(True) <NEW_LINE> self.tableView.setSizePolicy( QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding) <NEW_LINE> self.tableLabel = QtGui.QLabel(self.name) <NEW_LINE> self.layout = QtGui.QVBoxLayout() <NEW_LINE> self.layout.addWidget(self.tableLabel) <NEW_LINE> self.layout.addWidget(self.tableView) <NEW_LINE> self.setLayout(self.layout) | Wrapper around a table with a single record | 6259906c67a9b606de5476b9 |
class RecipeViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Recipe.objects.all() <NEW_LINE> serializer_class = serializers.RecipeSerializer <NEW_LINE> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> def _params_to_ints(self, qs): <NEW_LINE> <INDENT> return [int(str_id) for str_id in qs.split(",")] <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> tags = self.request.query_params.get("tags") <NEW_LINE> ingredients = self.request.query_params.get("ingredients") <NEW_LINE> queryset = self.queryset <NEW_LINE> if tags: <NEW_LINE> <INDENT> tag_ids = self._params_to_ints(tags) <NEW_LINE> queryset = queryset.filter(tags__id__in=tag_ids) <NEW_LINE> <DEDENT> if ingredients: <NEW_LINE> <INDENT> ingredient_ids = self._params_to_ints(ingredients) <NEW_LINE> queryset = queryset.filter(ingredients__id__in=ingredient_ids) <NEW_LINE> <DEDENT> queryset = queryset.filter(user=self.request.user).order_by( "-title") <NEW_LINE> return queryset <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user) <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == "retrieve": <NEW_LINE> <INDENT> return serializers.RecipeDetailSerializer <NEW_LINE> <DEDENT> elif self.action == "upload_image": <NEW_LINE> <INDENT> return serializers.RecipeImageSerializer <NEW_LINE> <DEDENT> return self.serializer_class <NEW_LINE> <DEDENT> @action(methods=["POST"], detail=True, url_path="upload-image") <NEW_LINE> def upload_image(self, request, pk=None): <NEW_LINE> <INDENT> recipe = self.get_object() <NEW_LINE> serializer = self.get_serializer(recipe, data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response( serializer.data, status=status.HTTP_200_OK ) <NEW_LINE> <DEDENT> return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST ) | Manage recipes in database
| 6259906c26068e7796d4e167 |
class WeatherUndergroundAPIForecast(object): <NEW_LINE> <INDENT> BASE_URL = 'https://api.weather.com/v3/wx/forecast/daily' <NEW_LINE> def __init__(self, api_key): <NEW_LINE> <INDENT> self.api_key = api_key <NEW_LINE> <DEDENT> def forecast_request(self, locator, location, forecast='5day', units='m', language='en-GB', format='json', max_tries=3): <NEW_LINE> <INDENT> location_setting = '='.join([locator, location]) <NEW_LINE> units_setting = '='.join(['units', units]) <NEW_LINE> language_setting = '='.join(['language', language]) <NEW_LINE> format_setting = '='.join(['format', format]) <NEW_LINE> api_key = '='.join(['apiKey', self.api_key]) <NEW_LINE> parameters = '&'.join([location_setting, units_setting, language_setting, format_setting, api_key]) <NEW_LINE> f_url = '/'.join([self.BASE_URL, forecast]) <NEW_LINE> url = '?'.join([f_url, parameters]) <NEW_LINE> if weewx.debug >= 1: <NEW_LINE> <INDENT> _obf_api_key = '='.join(['apiKey', '*'*(len(self.api_key) - 4) + self.api_key[-4:]]) <NEW_LINE> _obf_parameters = '&'.join([location_setting, units_setting, language_setting, format_setting, _obf_api_key]) <NEW_LINE> _obf_url = '?'.join([f_url, _obf_parameters]) <NEW_LINE> log.debug("Submitting Weather Underground API call using URL: %s" % (_obf_url, )) <NEW_LINE> <DEDENT> for count in range(max_tries): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> w = urllib.request.urlopen(url) <NEW_LINE> try: <NEW_LINE> <INDENT> char_set = w.headers.get_content_charset() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> char_set = w.headers.getparam('charset') <NEW_LINE> <DEDENT> response = w.read().decode(char_set) <NEW_LINE> w.close() <NEW_LINE> return response <NEW_LINE> <DEDENT> except (urllib.error.URLError, socket.timeout) as e: <NEW_LINE> <INDENT> log.error("Failed to get Weather Underground forecast on attempt %d" % (count+1, )) <NEW_LINE> log.error(" **** %s" % e) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.error("Failed to get Weather Underground forecast") <NEW_LINE> <DEDENT> return None | Obtain a forecast from the Weather Underground API.
The WU API is accessed by calling one or more features. These features can
be grouped into two groups, WunderMap layers and data features. This class
supports access to the API data features only.
WeatherUndergroundAPI constructor parameters:
api_key: WeatherUnderground API key to be used.
WeatherUndergroundAPI methods:
data_request. Submit a data feature request to the WeatherUnderground
API and return the response. | 6259906c7d847024c075dc09 |
@dataclass <NEW_LINE> class MedicinalProductIngredientSpecifiedSubstance(BackboneElement): <NEW_LINE> <INDENT> resource_type: ClassVar[str] = "MedicinalProductIngredientSpecifiedSubstance" <NEW_LINE> code: CodeableConcept = None <NEW_LINE> group: CodeableConcept = None <NEW_LINE> confidentiality: Optional[CodeableConcept] = None <NEW_LINE> strength: Optional[List[MedicinalProductIngredientSpecifiedSubstanceStrength]] = None | A specified substance that comprises this ingredient.
| 6259906c5fc7496912d48e7f |
class WrappedException(Exception): <NEW_LINE> <INDENT> def __init__(self, exc_info, source, sanitize=True): <NEW_LINE> <INDENT> self.exc_info = exc_info <NEW_LINE> self.sanitize = sanitize <NEW_LINE> self.source = source <NEW_LINE> <DEDENT> def extract_tb(self): <NEW_LINE> <INDENT> tb = traceback.extract_tb(self.exc_info[2]) <NEW_LINE> tb = fix_missing_traceback_filename(tb, self.source) <NEW_LINE> if self.sanitize: <NEW_LINE> <INDENT> tb = sanitize_traceback(tb, self.source.filename) <NEW_LINE> <DEDENT> return fix_missing_traceback_text(tb, self.source) <NEW_LINE> <DEDENT> def format_tb(self): <NEW_LINE> <INDENT> tb = self.extract_tb() <NEW_LINE> return traceback.format_list(tb) <NEW_LINE> <DEDENT> def heading(self): <NEW_LINE> <INDENT> return 'Traceback (most recent call last):\n' <NEW_LINE> <DEDENT> def format_exception_only(self): <NEW_LINE> <INDENT> ty, val, tb = self.exc_info <NEW_LINE> return traceback.format_exception_only(ty, val) <NEW_LINE> <DEDENT> def format_exception(self): <NEW_LINE> <INDENT> return ([self.heading()] + self.format_tb() + self.format_exception_only()) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ''.join(self.format_exception()) | Wrap an exception which would propagate out of the library. The
original exception can be specially formatted to hide library
implementation details from the user, while still enabling the exception
location to be disclosed. | 6259906cd486a94d0ba2d7ed |
class MatchedFilter(LinearTransform): <NEW_LINE> <INDENT> def __init__(self, background, target): <NEW_LINE> <INDENT> self.background = background <NEW_LINE> self.u_b = background.mean <NEW_LINE> self.u_t = target <NEW_LINE> self._whitening_transform = None <NEW_LINE> d_tb = (target - self.u_b) <NEW_LINE> self.d_tb = d_tb <NEW_LINE> C_1 = background.inv_cov <NEW_LINE> self.C_1 = C_1 <NEW_LINE> self.coef = 1.0 / d_tb.dot(C_1).dot(d_tb) <NEW_LINE> LinearTransform.__init__( self, (self.coef * d_tb).dot(C_1), pre=-self.u_b) <NEW_LINE> <DEDENT> def whiten(self, X): <NEW_LINE> <INDENT> if self._whitening_transform is None: <NEW_LINE> <INDENT> A = math.sqrt(self.coef) * self.background.sqrt_inv_cov <NEW_LINE> self._whitening_transform = LinearTransform(A, pre=-self.u_b) <NEW_LINE> <DEDENT> return self._whitening_transform(X) | A callable linear matched filter.
Given target/background means and a common covariance matrix, the matched
filter response is given by:
.. math::
y=\frac{(\mu_t-\mu_b)^T\Sigma^{-1}(x-\mu_b)}{(\mu_t-\mu_b)^T\Sigma^{-1}(\mu_t-\mu_b)}
where :math:`\mu_t` is the target mean, :math:`\mu_b` is the background
mean, and :math:`\Sigma` is the covariance. | 6259906c2ae34c7f260ac916 |
class JoinUVs(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.join_uvs" <NEW_LINE> bl_label = "Join as UVs" <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> obj = context.active_object <NEW_LINE> return (obj and obj.type == 'MESH') <NEW_LINE> <DEDENT> def _main(self, context): <NEW_LINE> <INDENT> import array <NEW_LINE> obj = context.active_object <NEW_LINE> mesh = obj.data <NEW_LINE> is_editmode = (obj.mode == 'EDIT') <NEW_LINE> if is_editmode: <NEW_LINE> <INDENT> bpy.ops.object.mode_set(mode='OBJECT', toggle=False) <NEW_LINE> <DEDENT> if not mesh.uv_textures: <NEW_LINE> <INDENT> self.report({'WARNING'}, "Object: %s, Mesh: '%s' has no UVs\n" % (obj.name, mesh.name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> len_faces = len(mesh.faces) <NEW_LINE> uv_array = array.array('f', [0.0] * 8) * len_faces <NEW_LINE> mesh.uv_textures.active.data.foreach_get("uv_raw", uv_array) <NEW_LINE> objects = context.selected_editable_objects[:] <NEW_LINE> for obj_other in objects: <NEW_LINE> <INDENT> if obj_other.type == 'MESH': <NEW_LINE> <INDENT> obj_other.data.tag = False <NEW_LINE> <DEDENT> <DEDENT> for obj_other in objects: <NEW_LINE> <INDENT> if obj_other != obj and obj_other.type == 'MESH': <NEW_LINE> <INDENT> mesh_other = obj_other.data <NEW_LINE> if mesh_other != mesh: <NEW_LINE> <INDENT> if mesh_other.tag == False: <NEW_LINE> <INDENT> mesh_other.tag = True <NEW_LINE> if len(mesh_other.faces) != len_faces: <NEW_LINE> <INDENT> self.report({'WARNING'}, "Object: %s, Mesh: '%s' has %d faces, expected %d\n" % (obj_other.name, mesh_other.name, len(mesh_other.faces), len_faces)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> uv_other = mesh_other.uv_textures.active <NEW_LINE> if not uv_other: <NEW_LINE> <INDENT> uv_other = mesh_other.uv_textures.new() <NEW_LINE> <DEDENT> uv_other.data.foreach_set("uv_raw", uv_array) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if is_editmode: <NEW_LINE> <INDENT> bpy.ops.object.mode_set(mode='EDIT', toggle=False) <NEW_LINE> <DEDENT> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> self._main(context) <NEW_LINE> return {'FINISHED'} | Copy UV Layout to objects with matching geometry | 6259906c4a966d76dd5f0718 |
class OpMean(OpKeepdims): <NEW_LINE> <INDENT> def __init__(self, x: Operation, axis: Optional[Union[int, Sequence[int]]] = None, keepdims: bool = False, **kwargs): <NEW_LINE> <INDENT> super(OpMean, self).__init__(self.__class__, x, axis, keepdims, **kwargs) <NEW_LINE> <DEDENT> def _forward(self, feed_dict: Mapping[Union[str, OpPlaceholder], np.ndarray]) -> np.ndarray: <NEW_LINE> <INDENT> if not self.params['keepdims']: <NEW_LINE> <INDENT> return self.values[0] <NEW_LINE> <DEDENT> return np.mean(self.values[0], axis=self.params['axis'], keepdims=True) <NEW_LINE> <DEDENT> def _backward(self, gradient: np.ndarray) -> None: <NEW_LINE> <INDENT> if not self.params['keepdims']: <NEW_LINE> <INDENT> self.gradients = [gradient] <NEW_LINE> return <NEW_LINE> <DEDENT> self.gradients = [gradient / (np.prod(np.shape(self.values[0])) / np.prod(np.shape(self.output)))] | Calculate the mean of elements. | 6259906c442bda511e95d96f |
class CAPJSONSerializer(JSONSerializer): <NEW_LINE> <INDENT> def preprocess_search_hit(self, pid, record_hit, links_factory=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pid = PersistentIdentifier.get(pid_type=pid.pid_type, pid_value=pid.pid_value) <NEW_LINE> result = super().preprocess_search_hit( pid, record_hit, links_factory=links_factory) <NEW_LINE> return result <NEW_LINE> <DEDENT> except PIDDoesNotExistError: <NEW_LINE> <INDENT> current_app.logger.info( f'PIDDoesNotExistError on search. Record:.') <NEW_LINE> <DEDENT> <DEDENT> def serialize_search(self, pid_fetcher, search_result, links=None, item_links_factory=None, **kwargs): <NEW_LINE> <INDENT> links = { k: url_to_api_url(v) for k, v in links.items() } if links else {} <NEW_LINE> collection_buckets = search_result.get("aggregations", {}) .get("facet_collection", {}) .get("buckets", []) <NEW_LINE> if not collection_buckets: <NEW_LINE> <INDENT> collection_buckets = search_result.get("aggregations", {}) .get("facet_collection", {}) .get("filtered", {}) .get("buckets", []) <NEW_LINE> <DEDENT> for cb in collection_buckets: <NEW_LINE> <INDENT> if "__display_name__" in cb: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> display_name = (cb["__display_name__"]["hits"]["hits"][0] ["_source"]["_collection"]["fullname"]) <NEW_LINE> cb["__display_name__"] = display_name <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> del cb["__display_name__"] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return super().serialize_search(pid_fetcher, search_result, links=links, item_links_factory=item_links_factory) | Serializer for records v1 in JSON. | 6259906c3d592f4c4edbc70e |
class CropSize(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100, help_text="Something generic e.g. 'Small square'") <NEW_LINE> width = models.IntegerField() <NEW_LINE> height = models.IntegerField() <NEW_LINE> enabled = models.BooleanField(default=True) <NEW_LINE> @property <NEW_LINE> def dimensions(self): <NEW_LINE> <INDENT> return (self.width, self.height) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "%s [%s x %s]" % (self.name, self.width, self.height) | A crop size for editors to crop upon uploading a new image.
Instances of a Crop object will be created for each enabled CropType | 6259906ce5267d203ee6cfd5 |
@admin.register(models.User) <NEW_LINE> class CustomUserAdmin(UserAdmin): <NEW_LINE> <INDENT> fieldsets = UserAdmin.fieldsets + ( ( "Custom Profile", { "fields": ( "avatar", "gender", "bio", "birthdate", "langauge", "currency", "superhost" ) } ), ) <NEW_LINE> list_filter = UserAdmin.list_filter + ("superhost", ) <NEW_LINE> list_display = ( "username", "first_name", "last_name", "email", "is_active", "langauge", "currency", "superhost", "is_staff", "is_superuser", "email_confirmed", "email_secret", ) | Custom User Admin | 6259906c0c0af96317c57976 |
class TestGroupMemberDataEx(TestGroupMemberData): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> TestGroupMemberData.__init__(self) <NEW_LINE> self.oTestCase = None; <NEW_LINE> <DEDENT> def initFromDbRowEx(self, aoRow, oDb, tsNow=None): <NEW_LINE> <INDENT> TestGroupMemberData.initFromDbRow(self, aoRow); <NEW_LINE> self.oTestCase = TestCaseDataEx(); <NEW_LINE> self.oTestCase.initFromDbRowEx(aoRow[7:], oDb, tsNow); <NEW_LINE> return self; <NEW_LINE> <DEDENT> def initFromParams(self, oDisp, fStrict=True): <NEW_LINE> <INDENT> self.oTestCase = None; <NEW_LINE> return TestGroupMemberData.initFromParams(self, oDisp, fStrict); <NEW_LINE> <DEDENT> def getDataAttributes(self): <NEW_LINE> <INDENT> asAttributes = TestGroupMemberData.getDataAttributes(self); <NEW_LINE> asAttributes.remove('oTestCase'); <NEW_LINE> return asAttributes; <NEW_LINE> <DEDENT> def _validateAndConvertWorker(self, asAllowNullAttributes, oDb): <NEW_LINE> <INDENT> dErrors = TestGroupMemberData._validateAndConvertWorker(self, asAllowNullAttributes, oDb); <NEW_LINE> if self.ksParam_idTestCase not in dErrors: <NEW_LINE> <INDENT> self.oTestCase = TestCaseDataEx() <NEW_LINE> try: <NEW_LINE> <INDENT> self.oTestCase.initFromDbWithId(oDb, self.idTestCase); <NEW_LINE> <DEDENT> except Exception as oXcpt: <NEW_LINE> <INDENT> self.oTestCase = TestCaseDataEx() <NEW_LINE> dErrors[self.ksParam_idTestCase] = str(oXcpt); <NEW_LINE> <DEDENT> <DEDENT> return dErrors; | Extended representation of a test group member. | 6259906cac7a0e7691f73d16 |
class Subcycle(SuiteObject): <NEW_LINE> <INDENT> def __init__(self, sub_xml, context, parent, run_env): <NEW_LINE> <INDENT> name = sub_xml.get('name', None) <NEW_LINE> loop_extent = sub_xml.get('loop', "1") <NEW_LINE> try: <NEW_LINE> <INDENT> loop_int = int(loop_extent) <NEW_LINE> self._loop = loop_extent <NEW_LINE> self._loop_var_int = True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._loop_var_int = False <NEW_LINE> lvar = parent.find_variable(standard_name=self.loop, any_scope=True) <NEW_LINE> if lvar is None: <NEW_LINE> <INDENT> emsg = "Subcycle, {}, specifies {} iterations but {} not found" <NEW_LINE> raise CCPPError(emsg.format(name, self.loop, self.loop)) <NEW_LINE> <DEDENT> parent.add_call_list_variable(lvar) <NEW_LINE> <DEDENT> super().__init__(name, context, parent, run_env) <NEW_LINE> for item in sub_xml: <NEW_LINE> <INDENT> new_item = new_suite_object(item, context, self, run_env) <NEW_LINE> self.add_part(new_item) <NEW_LINE> <DEDENT> <DEDENT> def analyze(self, phase, group, scheme_library, suite_vars, level): <NEW_LINE> <INDENT> if self.name is None: <NEW_LINE> <INDENT> self.name = "subcycle_index{}".format(level) <NEW_LINE> <DEDENT> self.add_variable(Var({'local_name':self.name, 'standard_name':'loop_variable', 'type':'integer', 'units':'count', 'dimensions':'()'}, _API_SOURCE, self.run_env), self.run_env) <NEW_LINE> scheme_mods = set() <NEW_LINE> for item in self.parts: <NEW_LINE> <INDENT> smods = item.analyze(phase, group, scheme_library, suite_vars, level+1) <NEW_LINE> for smod in smods: <NEW_LINE> <INDENT> scheme_mods.add(smod) <NEW_LINE> <DEDENT> <DEDENT> return scheme_mods <NEW_LINE> <DEDENT> def write(self, outfile, errcode, indent): <NEW_LINE> <INDENT> outfile.write('do {} = 1, {}'.format(self.name, self.loop), indent) <NEW_LINE> for item in self.parts: <NEW_LINE> <INDENT> item.write(outfile, errcode, indent+1) <NEW_LINE> <DEDENT> outfile.write('end do', 2) <NEW_LINE> <DEDENT> @property <NEW_LINE> def loop(self): <NEW_LINE> <INDENT> lvar = self.find_variable(standard_name=self.loop, any_scope=True) <NEW_LINE> if lvar is None: <NEW_LINE> <INDENT> emsg = "Subcycle, {}, specifies {} iterations but {} not found" <NEW_LINE> raise CCPPError(emsg.format(self.name, self.loop, self.loop)) <NEW_LINE> <DEDENT> lname = lvar.get_prop_value('local_name') <NEW_LINE> return lname | Class to represent a subcycled group of schemes or scheme collections | 6259906c167d2b6e312b81a5 |
class Meta: <NEW_LINE> <INDENT> model = CrossAccountRequest <NEW_LINE> fields = ("request_id", "target_account", "user_id", "start_date", "end_date", "created", "status") | Metadata for the serializer. | 6259906cf548e778e596cdbc |
@python_2_unicode_compatible <NEW_LINE> class Region(MPTTModel): <NEW_LINE> <INDENT> parent = TreeForeignKey( 'self', null=True, blank=True, related_name='children', db_index=True, on_delete=models.CASCADE ) <NEW_LINE> name = models.CharField(max_length=50, unique=True) <NEW_LINE> slug = models.SlugField(unique=True) <NEW_LINE> csv_headers = [ 'name', 'slug', 'parent', ] <NEW_LINE> class MPTTMeta: <NEW_LINE> <INDENT> order_insertion_by = ['name'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return "{}?region={}".format(reverse('dcim:site_list'), self.slug) <NEW_LINE> <DEDENT> def to_csv(self): <NEW_LINE> <INDENT> return csv_format([ self.name, self.slug, self.parent.name if self.parent else None, ]) | Sites can be grouped within geographic Regions. | 6259906c21bff66bcd724496 |
class Pickler(StockPickler): <NEW_LINE> <INDENT> dispatch = MetaCatchingDict(StockPickler.dispatch.copy()) <NEW_LINE> _session = False <NEW_LINE> from .settings import settings <NEW_LINE> def __init__(self, *args, **kwds): <NEW_LINE> <INDENT> settings = Pickler.settings <NEW_LINE> _byref = kwds.pop('byref', None) <NEW_LINE> _fmode = kwds.pop('fmode', None) <NEW_LINE> _recurse = kwds.pop('recurse', None) <NEW_LINE> StockPickler.__init__(self, *args, **kwds) <NEW_LINE> self._main = _main_module <NEW_LINE> self._diff_cache = {} <NEW_LINE> self._byref = settings['byref'] if _byref is None else _byref <NEW_LINE> self._strictio = False <NEW_LINE> self._fmode = settings['fmode'] if _fmode is None else _fmode <NEW_LINE> self._recurse = settings['recurse'] if _recurse is None else _recurse <NEW_LINE> <DEDENT> def dump(self, obj): <NEW_LINE> <INDENT> stack.clear() <NEW_LINE> if NumpyUfuncType and numpyufunc(obj): <NEW_LINE> <INDENT> @register(type(obj)) <NEW_LINE> def save_numpy_ufunc(pickler, obj): <NEW_LINE> <INDENT> log.info("Nu: %s" % obj) <NEW_LINE> name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) <NEW_LINE> StockPickler.save_global(pickler, obj, name=name) <NEW_LINE> log.info("# Nu") <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> if NumpyArrayType and ndarraysubclassinstance(obj): <NEW_LINE> <INDENT> @register(type(obj)) <NEW_LINE> def save_numpy_array(pickler, obj): <NEW_LINE> <INDENT> log.info("Nu: (%s, %s)" % (obj.shape,obj.dtype)) <NEW_LINE> npdict = getattr(obj, '__dict__', None) <NEW_LINE> f, args, state = obj.__reduce__() <NEW_LINE> pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj) <NEW_LINE> log.info("# Nu") <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> if GENERATOR_FAIL and type(obj) == GeneratorType: <NEW_LINE> <INDENT> msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType <NEW_LINE> raise PicklingError(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> StockPickler.dump(self, obj) <NEW_LINE> <DEDENT> stack.clear() <NEW_LINE> return <NEW_LINE> <DEDENT> dump.__doc__ = StockPickler.dump.__doc__ <NEW_LINE> pass | python's Pickler extended to interpreter sessions | 6259906c76e4537e8c3f0db2 |
class UnalignedDataset: <NEW_LINE> <INDENT> def __init__(self, dataset_path, phase, max_dataset_size=float("inf"), shuffle=True): <NEW_LINE> <INDENT> self.dir_A = os.path.join(dataset_path, phase + 'A') <NEW_LINE> self.dir_B = os.path.join(dataset_path, phase + 'B') <NEW_LINE> self.A_paths = sorted(generate_image_list(self.dir_A, max_dataset_size)) <NEW_LINE> self.B_paths = sorted(generate_image_list(self.dir_B, max_dataset_size)) <NEW_LINE> self.A_size = len(self.A_paths) <NEW_LINE> self.B_size = len(self.B_paths) <NEW_LINE> self.shuffle = shuffle <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> index_B = index % self.B_size <NEW_LINE> if index % max(self.A_size, self.B_size) == 0 and self.shuffle: <NEW_LINE> <INDENT> random.shuffle(self.A_paths) <NEW_LINE> index_B = random.randint(0, self.B_size - 1) <NEW_LINE> <DEDENT> A_path = self.A_paths[index % self.A_size] <NEW_LINE> B_path = self.B_paths[index_B] <NEW_LINE> A_img = np.array(load_img(A_path)) <NEW_LINE> B_img = np.array(load_img(B_path)) <NEW_LINE> return A_img, B_img <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return max(self.A_size, self.B_size) | This dataset class can load unaligned/unpaired datasets.
Args:
dataset_path (str): The path of images (should have subfolders trainA, trainB, testA, testB, etc).
phase (str): Train or test. It requires two directories in dataset_path, like trainA and trainB to.
host training images from domain A '{dataset_path}/trainA' and from domain B '{dataset_path}/trainB'
respectively.
max_dataset_size (int): Maximum number of return image paths.
Returns:
Two domain image path list. | 6259906c4f6381625f19a0bf |
class OpenSSHSubprocessVendor(SubprocessVendor): <NEW_LINE> <INDENT> executable_path = 'ssh' <NEW_LINE> def _get_vendor_specific_argv(self, username, host, port, subsystem=None, command=None): <NEW_LINE> <INDENT> args = [self.executable_path, '-oForwardX11=no', '-oForwardAgent=no', '-oClearAllForwardings=yes', '-oNoHostAuthenticationForLocalhost=yes'] <NEW_LINE> if port is not None: <NEW_LINE> <INDENT> args.extend(['-p', str(port)]) <NEW_LINE> <DEDENT> if username is not None: <NEW_LINE> <INDENT> args.extend(['-l', username]) <NEW_LINE> <DEDENT> if subsystem is not None: <NEW_LINE> <INDENT> args.extend(['-s', '--', host, subsystem]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args.extend(['--', host] + command) <NEW_LINE> <DEDENT> return args | SSH vendor that uses the 'ssh' executable from OpenSSH. | 6259906c796e427e5384ffa6 |
class TripletPrefetcher(Process): <NEW_LINE> <INDENT> def __init__(self, conn, labels, data, mean, resize, batch_size, sampling_type, **kwargs): <NEW_LINE> <INDENT> super(TripletPrefetcher, self).__init__() <NEW_LINE> self._conn = conn <NEW_LINE> self._labels = labels <NEW_LINE> self._data = data <NEW_LINE> if type(self._data[0]) is not str: <NEW_LINE> <INDENT> self._compressed = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._compressed = True <NEW_LINE> <DEDENT> self._batch_size = batch_size <NEW_LINE> self._mean = mean <NEW_LINE> self._resize = resize <NEW_LINE> self._sampling_type = sampling_type <NEW_LINE> self._sampler = TripletSampler( self._sampling_type, self._labels, **kwargs) <NEW_LINE> <DEDENT> def type(self): <NEW_LINE> <INDENT> return "TripletPrefetcher" <NEW_LINE> <DEDENT> def get_a_datum(self): <NEW_LINE> <INDENT> sample = self._sampler.sample() <NEW_LINE> if self._compressed: <NEW_LINE> <INDENT> datum_ = [ extract_sample(self._data[id], self._mean, self._resize) for id in sample[:3]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> datum_ = [self._data[id] for id in sample[:3]] <NEW_LINE> <DEDENT> if len(sample) == 4: <NEW_LINE> <INDENT> datum_.append(sample[-1]) <NEW_LINE> <DEDENT> return datum_ <NEW_LINE> <DEDENT> def get_next_minibatch(self): <NEW_LINE> <INDENT> data = [] <NEW_LINE> p_data = [] <NEW_LINE> n_data = [] <NEW_LINE> label = [] <NEW_LINE> for i in range(self._batch_size): <NEW_LINE> <INDENT> datum_ = self.get_a_datum() <NEW_LINE> data.append(datum_[0]) <NEW_LINE> p_data.append(datum_[1]) <NEW_LINE> n_data.append(datum_[2]) <NEW_LINE> if len(datum_) == 4: <NEW_LINE> <INDENT> label.append(datum_[-1]) <NEW_LINE> <DEDENT> <DEDENT> batch = [np.array(data), np.array(p_data), np.array(n_data)] <NEW_LINE> if len(label): <NEW_LINE> <INDENT> label = np.array(label).reshape(self._batch_size, 1, 1, 1) <NEW_LINE> batch.append(label) <NEW_LINE> <DEDENT> return batch <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> print("Prefetcher Started...") <NEW_LINE> while True: <NEW_LINE> <INDENT> batch = self.get_next_minibatch() <NEW_LINE> self._conn.send(batch) | TripletPrefetcher:
Use a separate process to sample triplets,
following the same function implementations as TripletDataLayer | 6259906c76e4537e8c3f0db3 |
class Indeed: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_record(card): <NEW_LINE> <INDENT> atag = card.h2.a <NEW_LINE> job_title = atag.get("title") <NEW_LINE> job_url = "https://ru.indeed.com" + atag.get("href") <NEW_LINE> company_name = card.find("span", "company").text.strip() <NEW_LINE> location = card.find("div", "recJobLoc").get("data-rc-loc") <NEW_LINE> summary = card.find("div", "summary").text.strip() <NEW_LINE> date_post = card.find("span", "date").text.strip() <NEW_LINE> today_date = date.today().strftime("%Y-%m-%d") <NEW_LINE> try: <NEW_LINE> <INDENT> salary = card.find("span", "salaryText").text.strip() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> salary = "" <NEW_LINE> <DEDENT> record = (job_title, job_url, company_name, location, summary, date_post, today_date, salary) <NEW_LINE> return record <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_records(position, location): <NEW_LINE> <INDENT> url = f"https://ru.indeed.com/jobs?q={position}&l={location}" <NEW_LINE> response = requests.get(url) <NEW_LINE> soup = BeautifulSoup(response.text, "html.parser") <NEW_LINE> cards = soup.find_all("div", class_="jobsearch-SerpJobCard") <NEW_LINE> records = [] <NEW_LINE> for card in cards: <NEW_LINE> <INDENT> record = Indeed.get_record(card) <NEW_LINE> records.append(record) <NEW_LINE> <DEDENT> with open("jobs_indeed.csv", "w", newline="", encoding="utf-8") as f: <NEW_LINE> <INDENT> write = csv.writer(f) <NEW_LINE> write.writerow(["Job Title", "Url", "Company", "Location", "Summary", "Date post", "Date", "Salary"]) <NEW_LINE> write.writerows(records) <NEW_LINE> <DEDENT> return records | Extracts job's data from indeed. | 6259906cbe8e80087fbc08bc |
class retrieve_expanded_post_result(object): <NEW_LINE> <INDENT> def __init__(self, success=None, e1=None, e2=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e1 = e1 <NEW_LINE> self.e2 = e2 <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = TPost() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e1 = TPostNotFoundException() <NEW_LINE> self.e1.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e2 = TAccountNotFoundException() <NEW_LINE> self.e2.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('retrieve_expanded_post_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e1 is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e1', TType.STRUCT, 1) <NEW_LINE> self.e1.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e2 is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e2', TType.STRUCT, 2) <NEW_LINE> self.e2.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- e1
- e2 | 6259906c5fc7496912d48e80 |
class BaseTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> logger.info("setting up database : %s"%app.config["SQLALCHEMY_DATABASE_URI"]) <NEW_LINE> db.create_all() <NEW_LINE> with self.client: <NEW_LINE> <INDENT> data = {"password" : "admin", "invite" : "invite", "email" : "[email protected]"} <NEW_LINE> resp = self.client.post("/api/v1/users", data=data) <NEW_LINE> <DEDENT> <DEDENT> def create_app(self): <NEW_LINE> <INDENT> app.config["SQLALCHEMY_DATABASE_URI"] = SQLALCHEMY_DATABASE_URI <NEW_LINE> app.config["TESTING"] = TESTING <NEW_LINE> return app <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> logger.info("downgrading database") <NEW_LINE> db.session.remove() <NEW_LINE> db.drop_all() | A base test case. | 6259906c3d592f4c4edbc710 |
class ExplicitObject(Object): <NEW_LINE> <INDENT> def __init__(self, properties: Dict[str, BaseType]=None, required: List[str]=None, min_properties: int=None, max_properties: int=None, dependencies: Dict[str, Union[List[str], Schema]]=None, pattern_properties: Dict[str, BaseType]=None, enum: List[any]=None, title: str=None, description: str=None, default: any=None): <NEW_LINE> <INDENT> super(ExplicitObject, self).__init__(properties, False, required, min_properties, max_properties, dependencies, pattern_properties, enum, title, description, default) | Object that has additional_properties set to False, so all properties must be declared. | 6259906c8e71fb1e983bd2f8 |
class MetricsResultInfo(Model): <NEW_LINE> <INDENT> _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'start': {'key': 'start', 'type': 'iso-8601'}, 'end': {'key': 'end', 'type': 'iso-8601'}, 'interval': {'key': 'interval', 'type': 'duration'}, 'segments': {'key': 'segments', 'type': '[MetricsSegmentInfo]'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(MetricsResultInfo, self).__init__(**kwargs) <NEW_LINE> self.additional_properties = kwargs.get('additional_properties', None) <NEW_LINE> self.start = kwargs.get('start', None) <NEW_LINE> self.end = kwargs.get('end', None) <NEW_LINE> self.interval = kwargs.get('interval', None) <NEW_LINE> self.segments = kwargs.get('segments', None) | A metric result data.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param start: Start time of the metric.
:type start: datetime
:param end: Start time of the metric.
:type end: datetime
:param interval: The interval used to segment the metric data.
:type interval: timedelta
:param segments: Segmented metric data (if segmented).
:type segments: list[~azure.applicationinsights.models.MetricsSegmentInfo] | 6259906c2c8b7c6e89bd5016 |
class IlluminatiAndStuffTweenFactory: <NEW_LINE> <INDENT> def __init__(self, handler, registry): <NEW_LINE> <INDENT> self.handler = handler <NEW_LINE> self.registry = registry <NEW_LINE> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.handler(request) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> tb = traceback.format_exc() <NEW_LINE> resp = response.Response( status_int=500, body=f'<pre>{tb}</pre>', ) <NEW_LINE> print(tb) <NEW_LINE> return resp | This tween prints out your 500 stack traces to the body of the
response. | 6259906c379a373c97d9a850 |
class VersionPermitView(SingleObjectMixin, QuestionView): <NEW_LINE> <INDENT> model = Motion <NEW_LINE> final_message = ugettext_lazy('Version successfully permitted.') <NEW_LINE> required_permission = 'motion.can_manage_motion' <NEW_LINE> question_url_name = 'motion_version_detail' <NEW_LINE> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> version_number = self.kwargs.get('version_number', None) <NEW_LINE> try: <NEW_LINE> <INDENT> self.version = self.get_object().versions.get(version_number=int(version_number)) <NEW_LINE> <DEDENT> except MotionVersion.DoesNotExist: <NEW_LINE> <INDENT> raise Http404('Version %s not found.' % version_number) <NEW_LINE> <DEDENT> return super().get(*args, **kwargs) <NEW_LINE> <DEDENT> def get_url_name_args(self): <NEW_LINE> <INDENT> return [self.get_object().pk, self.version.version_number] <NEW_LINE> <DEDENT> def get_question_message(self): <NEW_LINE> <INDENT> return _('Are you sure you want permit version %s?') % self.version.version_number <NEW_LINE> <DEDENT> def on_clicked_yes(self): <NEW_LINE> <INDENT> self.get_object().active_version = self.version <NEW_LINE> self.get_object().save(update_fields=['active_version']) <NEW_LINE> self.get_object().write_log( message_list=[ugettext_noop('Version'), ' %d ' % self.version.version_number, ugettext_noop('permitted')], person=self.request.user) | View to permit a version of a motion. | 6259906cd268445f2663a775 |
class Date: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def date_from_str(cls, s): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> year = int(s[:4]) <NEW_LINE> month = int(s[4:6]) <NEW_LINE> day = int(s[6:8]) <NEW_LINE> s = s.split('T')[1] <NEW_LINE> hour = int(s[:2]) <NEW_LINE> minute = int(s[2:4]) <NEW_LINE> second = int(s[4:6]) <NEW_LINE> return cls(year, month, day, hour, minute, second) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError('Invalid date format') <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, year=0, month=0, day=0, hour=0, minute=0, second=0): <NEW_LINE> <INDENT> self.year = year <NEW_LINE> self.month = month <NEW_LINE> self.day = day <NEW_LINE> self.hour = hour <NEW_LINE> self.minute = minute <NEW_LINE> self.second = second <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.year == other.year)and (self.month == other.month) and (self.day == other.day) and (self.hour == other.hour) and (self.minute == other.minute) and (self.second == other.second) <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return (self.year > other.year) or (self.month > other.month) or (self.day > other.day) or (self.hour > other.hour) or (self.minute > other.minute) or (self.second > other.second) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return (self.year < other.year) or (self.month < other.month) or (self.day < other.day) or (self.hour < other.hour) or (self.minute < other.minute) or (self.second < other.second) <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return (self.year <= other.year) or (self.month <= other.month) or (self.day <= other.day) or (self.hour <= other.hour) or (self.minute <= other.minute) or (self.second <= other.second) <NEW_LINE> <DEDENT> def same_date(self, other): <NEW_LINE> <INDENT> return (self.year == other.year) and (self.month == other.month) and (self.day == other.day) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{0}.{1}.{2},{3}:{4}:{5}'.format(self.day, self.month, self.year, self.hour, self.minute, self.second) <NEW_LINE> <DEDENT> def file_str(self): <NEW_LINE> <INDENT> return '{0:4}{1:2}{2:2}T{3:2}{4:2}{5:}Z'.format(self.year, self.month, self.day, self.hour, self.minute, self.second).replace(' ', '0') | A class to represent the time of an event | 6259906c8a43f66fc4bf39c4 |
class shifter: <NEW_LINE> <INDENT> def __init__(self, sequence, idx_block_start, idx_block_end, idx_dest): <NEW_LINE> <INDENT> self.list = sequence <NEW_LINE> self._count = 1 + idx_block_end - idx_block_start <NEW_LINE> d_shift = idx_dest - idx_block_start <NEW_LINE> self._step = -(d_shift // abs(d_shift)) <NEW_LINE> end_offset = 0 <NEW_LINE> if idx_dest < idx_block_start: <NEW_LINE> <INDENT> self._slice_start = dest_child = idx_dest <NEW_LINE> rdest_child = self._slice_end = 1 + idx_block_end <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rdest_child = self._slice_start = idx_block_start <NEW_LINE> dest_child = idx_dest + self._count <NEW_LINE> end_offset = max(0, dest_child - len(sequence)) <NEW_LINE> if end_offset > 0: <NEW_LINE> <INDENT> dest_child -= end_offset <NEW_LINE> <DEDENT> self._slice_end = dest_child <NEW_LINE> <DEDENT> self._bstart = idx_block_start <NEW_LINE> self._rbstart = idx_dest - end_offset <NEW_LINE> self._bend = idx_block_end <NEW_LINE> self._rbend = idx_block_end + d_shift - end_offset <NEW_LINE> self._newbstart = dest_child <NEW_LINE> self._rnewbstart = rdest_child <NEW_LINE> self.affected_range = range(self._slice_start, self._slice_end) <NEW_LINE> <DEDENT> def block_start(self, reverse=False): <NEW_LINE> <INDENT> return self._rbstart if reverse else self._bstart <NEW_LINE> <DEDENT> def block_end(self, reverse=False): <NEW_LINE> <INDENT> return self._rbend if reverse else self._bend <NEW_LINE> <DEDENT> def block_dest(self, reverse=False): <NEW_LINE> <INDENT> return self._rnewbstart if reverse else self._newbstart <NEW_LINE> <DEDENT> def __call__(self, reverse=False): <NEW_LINE> <INDENT> deck = deque(self.list[self._slice_start:self._slice_end]) <NEW_LINE> deck.rotate(self._count * (-self._step if reverse else self._step)) <NEW_LINE> for i in range(self._slice_start, self._slice_end): <NEW_LINE> <INDENT> self.list[i] = deck.popleft() | Get a callable object that shifts (in-place) part of a mutable
sequence to different index in that sequence. The length of the
sequence must not change so long as the shifter object is in use. | 6259906c5fcc89381b266d6f |
class Command(management.BaseCommand): <NEW_LINE> <INDENT> RENEWAL_WINDOW = datetime.timedelta(hours=1) <NEW_LINE> help = ( "Update the validity and expiration status of all Know Me premium " "subscriptions." ) <NEW_LINE> @staticmethod <NEW_LINE> def deactivate_orphan_subscriptions(): <NEW_LINE> <INDENT> return models.Subscription.objects.filter( apple_receipt__isnull=True, is_legacy_subscription=False, is_active=True, ).update(is_active=False) <NEW_LINE> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> self.stdout.write( "Deactivating all subscriptions without a receipt..." ) <NEW_LINE> orphan_subs = self.deactivate_orphan_subscriptions() <NEW_LINE> self.stdout.write(f"Deactivated {orphan_subs} orphan subscription(s).") <NEW_LINE> now = timezone.now() <NEW_LINE> cutoff_time = now + self.RENEWAL_WINDOW <NEW_LINE> self.stdout.write( f"Updating Apple subscriptions that expire before " f"{cutoff_time.isoformat()}..." ) <NEW_LINE> self.update_apple_subscriptions(now, self.RENEWAL_WINDOW) <NEW_LINE> self.stdout.write( self.style.SUCCESS("Finished updating Apple subscriptions.") ) <NEW_LINE> <DEDENT> def update_apple_subscriptions( self, now: datetime.datetime, renewal_window: datetime.timedelta ): <NEW_LINE> <INDENT> receipts = models.AppleReceipt.objects.filter( expiration_time__lte=now + renewal_window ) <NEW_LINE> receipt_pks_to_delete = [] <NEW_LINE> for receipt in receipts: <NEW_LINE> <INDENT> is_active = True <NEW_LINE> try: <NEW_LINE> <INDENT> receipt.update_info() <NEW_LINE> receipt.save() <NEW_LINE> if receipt.expiration_time < now: <NEW_LINE> <INDENT> is_active = False <NEW_LINE> <DEDENT> <DEDENT> except subscriptions.CancelledReceiptException as e: <NEW_LINE> <INDENT> self.stderr.write( self.style.NOTICE( f"Apple receipt for original transaction " f"{receipt.transaction_id} has been cancelled and " f"will be deleted: {e.msg}" ) ) <NEW_LINE> is_active = False <NEW_LINE> receipt_pks_to_delete.append(receipt.pk) <NEW_LINE> <DEDENT> except subscriptions.ReceiptException as e: <NEW_LINE> <INDENT> self.stderr.write( self.style.NOTICE( f"Apple receipt {receipt.pk} failed validation: " f"{e.msg}" ) ) <NEW_LINE> is_active = False <NEW_LINE> <DEDENT> models.Subscription.objects.filter(apple_receipt=receipt).update( is_active=is_active ) <NEW_LINE> <DEDENT> if receipt_pks_to_delete: <NEW_LINE> <INDENT> models.AppleReceipt.objects.filter( pk__in=receipt_pks_to_delete ).delete() | Management command to update the status of all subscriptions. | 6259906cf548e778e596cdbd |
class DynamoDbBlockListManager(BlockListManager): <NEW_LINE> <INDENT> def __init__( self, root_dm: "DynamoDbManager", client: Any, resource: Any, table: str ): <NEW_LINE> <INDENT> self._root_dm = root_dm <NEW_LINE> self._client = client <NEW_LINE> self._table_name = table <NEW_LINE> self._table = resource.Table(table) <NEW_LINE> <DEDENT> def create_table(self): <NEW_LINE> <INDENT> if self._table_name not in self._root_dm.get_tables(): <NEW_LINE> <INDENT> self._client.create_table( TableName=self._table_name, AttributeDefinitions=[ { "AttributeName": "id", "AttributeType": "S" } ], KeySchema=[ { "AttributeName": "id", "KeyType": "HASH" } ], BillingMode="PAY_PER_REQUEST" ) <NEW_LINE> <DEDENT> <DEDENT> def delete_table(self): <NEW_LINE> <INDENT> if self._table_name in self._root_dm.get_tables(): <NEW_LINE> <INDENT> self._client.delete_table(TableName=self._table_name) <NEW_LINE> <DEDENT> <DEDENT> def contains(self, _id: str) -> bool: <NEW_LINE> <INDENT> bl = self._table.get_item(Key={"id": _id}).get("Item") <NEW_LINE> if bl is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> exp = bl["expiration"] <NEW_LINE> now = int(datetime.now().timestamp()) <NEW_LINE> if now > exp: <NEW_LINE> <INDENT> self._delete(_id) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def put(self, _id: str, exp: int): <NEW_LINE> <INDENT> self._table.put_item(Item={"id": _id, "expiration": exp}) <NEW_LINE> <DEDENT> def _delete(self, _id: str): <NEW_LINE> <INDENT> self._table.delete_item(Key={"id": _id}) | DynamoDB block list manager. | 6259906c63d6d428bbee3ea2 |
class HorizPriceShock(EconShockScenario): <NEW_LINE> <INDENT> def apply(self, curve: Curve) -> Curve: <NEW_LINE> <INDENT> if isinstance(curve, SupplyCurve): <NEW_LINE> <INDENT> return SupplyCurve( [{'price': price + self.supply_shock, 'supply': quantity} for price, quantity in zip(curve._price, curve._quantity)]) <NEW_LINE> <DEDENT> elif isinstance(curve, DemandCurve): <NEW_LINE> <INDENT> return DemandCurve( [{'price': price + self.demand_shock, 'demand': quantity} for price, quantity in zip(curve._price, curve._quantity)]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('curve is not one of SupplyCurve or DemandCurve') | Horizontal price shocks (tranlations to supply/demand curves). | 6259906c435de62698e9d636 |
class UserRegistrationForm(forms.Form): <NEW_LINE> <INDENT> username = forms.CharField( label='Username', max_length=100, min_length=5, widget=forms.TextInput(attrs={'Class': 'form-control'})) <NEW_LINE> email = forms.EmailField( widget=forms.TextInput(attrs={'Class': 'form-control'})) <NEW_LINE> password1 = forms.CharField( label='Password', max_length=100, min_length=5, widget=forms.PasswordInput(attrs={'Class': 'form-control'})) <NEW_LINE> password2 = forms.CharField( label='Confirm Password', max_length=100, min_length=5, widget=forms.PasswordInput(attrs={'Class': 'form-control'})) <NEW_LINE> def clean_email(self): <NEW_LINE> <INDENT> email = self.cleaned_data['email'] <NEW_LINE> qs = User.objects.filter(email=email) <NEW_LINE> if qs.exists(): <NEW_LINE> <INDENT> raise ValidationError('Emails is already registered.') <NEW_LINE> <DEDENT> return email <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> cleaned_data = super().clean() <NEW_LINE> p1 = cleaned_data.get('password1') <NEW_LINE> p2 = cleaned_data.get('password2') <NEW_LINE> if p1 and p2: <NEW_LINE> <INDENT> if p1 != p2: <NEW_LINE> <INDENT> raise ValidationError('PasswordS does not match') | Registration Form. | 6259906c1b99ca400229014e |
class Node: <NEW_LINE> <INDENT> def __init__(self, elem, lchild=None, rchild=None): <NEW_LINE> <INDENT> self.elem = elem <NEW_LINE> self.lchild = lchild <NEW_LINE> self.rchild = rchild | 节点类 | 6259906c8e7ae83300eea8c1 |
class Ephe(Structure): <NEW_LINE> <INDENT> _fields_ = [ ('<L', 'dwrd') ] | Ublox rxm_sfrbx Ephemeris NumWords fields | 6259906c7d43ff248742802a |
class CostModelProviderQueryException(APIException): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR <NEW_LINE> self.detail = {'detail': force_text(message)} | Rate query custom internal error exception. | 6259906caad79263cf42ffe7 |
class CreateNoteUpperForm(CreateNoteForm): <NEW_LINE> <INDENT> text = CharFieldUpper( widget=forms.Textarea(attrs={ 'class': 'form-control'}) ) | The form for creating note object which will have
only uppercase letters.
We check note's length. It must be more than 9. | 6259906c76e4537e8c3f0db5 |
class GroupDoesNotExist(EntryDoesNotExist): <NEW_LINE> <INDENT> pass | The requested group does not exist. | 6259906c99cbb53fe6832719 |
class DETRawSampler: <NEW_LINE> <INDENT> def __init__( self, data_root: Path, allowed_class_ids: Set[str], allowed_class_ints: Set[int] ) -> None: <NEW_LINE> <INDENT> label_root = Path(data_root, "Annotations", "DET") <NEW_LINE> frame_root = Path(data_root, "Data", "DET") <NEW_LINE> self._rawinstances_by_cls = defaultdict(list) <NEW_LINE> trn_files = [f"train_{cls_id}" for cls_id in allowed_class_ints] <NEW_LINE> val_files = ["val"] <NEW_LINE> for mode, files in zip(["train", "val"], [trn_files, val_files]): <NEW_LINE> <INDENT> for f in files: <NEW_LINE> <INDENT> instance_list_path = Path(data_root, "ImageSets", "DET", f"{f}.txt") <NEW_LINE> with open(instance_list_path) as instance_list: <NEW_LINE> <INDENT> for line in instance_list: <NEW_LINE> <INDENT> instance_id, _ = line.split() <NEW_LINE> if "extra" in instance_id: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> framepath = Path(frame_root, mode, f"{instance_id}.JPEG") <NEW_LINE> labelpath = Path(label_root, mode, f"{instance_id}.xml") <NEW_LINE> class_ids = { pascal_object.class_id for pascal_object in parse_pascal_xmlfile(labelpath) } <NEW_LINE> if class_ids.issubset(allowed_class_ids): <NEW_LINE> <INDENT> ri = RawImageInstance(impath=framepath, labelpath=labelpath) <NEW_LINE> for class_id in class_ids: <NEW_LINE> <INDENT> self._rawinstances_by_cls[class_id].append(ri) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def sample(self) -> RawImageInstance: <NEW_LINE> <INDENT> sampled_cls_id = random.choice(list(self._rawinstances_by_cls.keys())) <NEW_LINE> raw_instance = random.choice(self._rawinstances_by_cls[sampled_cls_id]) <NEW_LINE> return raw_instance | randomly samples raw instances (paths to images and labels) from
DET train+val. | 6259906c283ffb24f3cf50da |
class MTobjects: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> class Objfile: <NEW_LINE> <INDENT> def __init__(self, obj): <NEW_LINE> <INDENT> self._obj = obj <NEW_LINE> self._debug = set() <NEW_LINE> <DEDENT> <DEDENT> class Progspace: <NEW_LINE> <INDENT> def __init__(self, progspace): <NEW_LINE> <INDENT> self._progspace = progspace <NEW_LINE> self._objs = { } <NEW_LINE> <DEDENT> <DEDENT> self._progspaces = { } <NEW_LINE> for obj in gdb.objfiles(): <NEW_LINE> <INDENT> if obj.is_valid() and obj.owner == None: <NEW_LINE> <INDENT> assert obj.progspace.filename <NEW_LINE> self._progspaces.setdefault(obj.progspace.filename, Progspace(obj))._objs[obj.filename] = Objfile(obj) <NEW_LINE> <DEDENT> <DEDENT> for obj in gdb.objfiles(): <NEW_LINE> <INDENT> if obj.is_valid() and obj.owner != None: <NEW_LINE> <INDENT> assert obj.progspace.filename and obj.progspace.filename in self._progspaces.keys() <NEW_LINE> progspace = self._progspaces.get(obj.progspace.filename) <NEW_LINE> assert obj.owner.filename in progspace._objs.keys() <NEW_LINE> objfile = progspace._objs[obj.owner.filename] <NEW_LINE> objfile._debug.add(obj) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def dump(self): <NEW_LINE> <INDENT> print(c.white + 'objects' + c.reset) <NEW_LINE> for name, progspace in self._progspaces.items(): <NEW_LINE> <INDENT> print(' ' + c.cyan + os.path.basename(name) + ' ' + c.blue + os.path.dirname(name) + c.reset) <NEW_LINE> for name_obj, objfile in progspace._objs.items(): <NEW_LINE> <INDENT> print(' ' + c.green + os.path.basename(name_obj) + ' ' + c.blue + os.path.dirname(name_obj) + c.reset) <NEW_LINE> for debug_objfile in objfile._debug: <NEW_LINE> <INDENT> name_dbg = debug_objfile.filename <NEW_LINE> print(' ' + c.red + os.path.basename(name_dbg) + ' ' + c.blue + os.path.dirname(name_dbg) + c.reset) | Find objects and relate their debug objects | 6259906c56b00c62f0fb4101 |
class ProtocolBuffersResource(object): <NEW_LINE> <INDENT> def __init__(self, pb_class): <NEW_LINE> <INDENT> self.pb_class = pb_class <NEW_LINE> for http_method in falcon.HTTP_METHODS: <NEW_LINE> <INDENT> if hasattr(pb_class, http_method): <NEW_LINE> <INDENT> method = http_method.lower() <NEW_LINE> setattr(self, 'on_%s' % method, self._on_prototype) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @falcon.before(require_json) <NEW_LINE> def _on_prototype(self, req, resp, **kwargs): <NEW_LINE> <INDENT> self._handle(req, resp, **kwargs) <NEW_LINE> <DEDENT> def _handle(self, req, resp, **kwargs): <NEW_LINE> <INDENT> pb = self._to_pb(req, kwargs) <NEW_LINE> handle = getattr(self, 'handle_%s' % req.method.lower()) <NEW_LINE> result = handle(req, resp, pb, **kwargs) <NEW_LINE> resp_dict = protobuf_to_dict.protobuf_to_dict(result) <NEW_LINE> if resp_dict: <NEW_LINE> <INDENT> resp.body = json.dumps(resp_dict) <NEW_LINE> <DEDENT> <DEDENT> def _to_pb(self, req, url_params): <NEW_LINE> <INDENT> body = req.stream.read() or '{}' <NEW_LINE> try: <NEW_LINE> <INDENT> body_dict = json.loads(body) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise falcon.HTTPBadRequest('Invalid request', 'Cannot parse JSON in request body.') <NEW_LINE> <DEDENT> for params in (url_params, req.params): <NEW_LINE> <INDENT> for key in params: <NEW_LINE> <INDENT> body_dict[key] = params[key] <NEW_LINE> <DEDENT> <DEDENT> pb_class = getattr(self.pb_class, req.method) <NEW_LINE> return protobuf_to_dict.dict_to_protobuf(pb_class, body_dict) | Protocol Buffers based resource. | 6259906c2c8b7c6e89bd5018 |
class RegisterUser(BaseView): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> if self.validate_json(): <NEW_LINE> <INDENT> return self.validate_json() <NEW_LINE> <DEDENT> data = request.get_json() <NEW_LINE> email = data.get('email') <NEW_LINE> username = data.get('username') <NEW_LINE> password = data.get('password') <NEW_LINE> user_data = {'email': email, 'username': username, 'password': password} <NEW_LINE> if self.validate_null(**user_data): <NEW_LINE> <INDENT> return self.validate_null(**user_data) <NEW_LINE> <DEDENT> if self.check_email(email): <NEW_LINE> <INDENT> return self.check_email(email) <NEW_LINE> <DEDENT> if self.check_password(password): <NEW_LINE> <INDENT> return self.check_password(password) <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> norm_name = self.remove_extra_spaces(name=username) <NEW_LINE> username = norm_name['name'] <NEW_LINE> emails = [user.email for user in users] <NEW_LINE> if email in emails: <NEW_LINE> <INDENT> response = {'message': 'User already exists. Please login'} <NEW_LINE> return jsonify(response), 409 <NEW_LINE> <DEDENT> user = User(email, username, password) <NEW_LINE> users.append(user) <NEW_LINE> response = {'message': 'Account created successfully'} <NEW_LINE> return jsonify(response), 201 | Method to Register a new user | 6259906ce5267d203ee6cfd7 |
class CI_OnlineResource(object): <NEW_LINE> <INDENT> def __init__(self,md=None): <NEW_LINE> <INDENT> if md is None: <NEW_LINE> <INDENT> self.url = None <NEW_LINE> self.protocol = None <NEW_LINE> self.name = None <NEW_LINE> self.description = None <NEW_LINE> self.function = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val = md.find(util.nspath_eval('gmd:linkage/gmd:URL', namespaces)) <NEW_LINE> self.url = util.testXMLValue(val) <NEW_LINE> val = md.find(util.nspath_eval('gmd:protocol/gco:CharacterString', namespaces)) <NEW_LINE> self.protocol = util.testXMLValue(val) <NEW_LINE> val = md.find(util.nspath_eval('gmd:name/gco:CharacterString', namespaces)) <NEW_LINE> self.name = util.testXMLValue(val) <NEW_LINE> val = md.find(util.nspath_eval('gmd:description/gco:CharacterString', namespaces)) <NEW_LINE> self.description = util.testXMLValue(val) <NEW_LINE> self.function = _testCodeListValue(md.find(util.nspath_eval('gmd:function/gmd:CI_OnLineFunctionCode', namespaces))) | process CI_OnlineResource | 6259906cd268445f2663a776 |
class AdminDAO: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_admin(self, username): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> admin_account = admin.objects.get(username = username) <NEW_LINE> <DEDENT> except DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return admin_account <NEW_LINE> <DEDENT> def check_password(self, password_hash, password): <NEW_LINE> <INDENT> return bcrypt.check_password_hash(password_hash, password) | Data access object for the Admin accounts. | 6259906cf548e778e596cdbf |
class CatalogException(base.ClientException): <NEW_LINE> <INDENT> pass | Something is rotten in Service Catalog. | 6259906c21bff66bcd72449a |
class WordStem(object): <NEW_LINE> <INDENT> def get_porter_stem(self, word): <NEW_LINE> <INDENT> stemmer = porter.PorterStemmer() <NEW_LINE> return stemmer.stem(word) <NEW_LINE> <DEDENT> def get_snowball_stem(self, word): <NEW_LINE> <INDENT> stemmer = snowball.SnowballStemmer("english") <NEW_LINE> return stemmer.stem(word) | Stemming methods as provided by NLTK library
http://www.nltk.org/howto/stem.html | 6259906c8e7ae83300eea8c2 |
class trip_weighted_travel_time_for_transit_walk(Variable): <NEW_LINE> <INDENT> zone_wtt_tw = "trip_weighted_travel_time_for_transit_walk" <NEW_LINE> def dependencies(self): <NEW_LINE> <INDENT> return [attribute_label("zone", self.zone_wtt_tw), my_attribute_label("zone_id")] <NEW_LINE> <DEDENT> def compute(self, dataset_pool): <NEW_LINE> <INDENT> zones = dataset_pool.get_dataset('zone') <NEW_LINE> return self.get_dataset().get_join_data(zones, name=self.zone_wtt_tw) | The trip_weighted_travel_time_for_transit_walk value. | 6259906c7047854f46340be9 |
class Image(base_classes.BaseNode): <NEW_LINE> <INDENT> def __init__(self, node, parent): <NEW_LINE> <INDENT> logger.debug("Image().__init__(%s)", node) <NEW_LINE> base_classes.BaseNode.__init__(self, node, parent, constants.IMAGE) <NEW_LINE> if(self.scene.options.get(constants.EMBED_TEXTURES, False)): <NEW_LINE> <INDENT> texturefile = open(api.image.file_path(self.node),"rb") <NEW_LINE> extension = os.path.splitext(api.image.file_path(self.node))[1][1:].strip().lower() <NEW_LINE> if(extension == 'jpg') : <NEW_LINE> <INDENT> extension = 'jpeg' <NEW_LINE> <DEDENT> self[constants.URL] = "data:image/" + extension + ";base64," + base64.b64encode(texturefile.read()).decode("utf-8") <NEW_LINE> texturefile.close(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> texture_folder = self.scene.options.get(constants.TEXTURE_FOLDER, "") <NEW_LINE> self[constants.URL] = os.path.join(texture_folder, api.image.file_name(self.node)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def destination(self): <NEW_LINE> <INDENT> dirname = os.path.dirname(self.scene.filepath) <NEW_LINE> return os.path.join(dirname, self[constants.URL]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def filepath(self): <NEW_LINE> <INDENT> return api.image.file_path(self.node) <NEW_LINE> <DEDENT> def copy_texture(self, func=io.copy): <NEW_LINE> <INDENT> logger.debug("Image().copy_texture()") <NEW_LINE> func(self.filepath, self.destination) <NEW_LINE> return self.destination | Class the wraps an image node. This is the node that
represent that actual file on disk. | 6259906c76e4537e8c3f0db6 |
class PersoonViewSet(rest.DatapuntViewSet): <NEW_LINE> <INDENT> queryset = models.Persoon.objects.all().order_by('id') <NEW_LINE> queryset_detail = (models.Persoon.objects .select_related('natuurlijkpersoon') .select_related('niet_natuurlijkpersoon') .all()) <NEW_LINE> serializer_detail_class = serializers.PersoonDetail <NEW_LINE> serializer_class = serializers.Persoon <NEW_LINE> filter_class = PersoonFilter <NEW_LINE> ordering = ('id',) | Persoon (PRS)
Een Persoon is een ieder die rechten en plichten kan hebben. Persoon
wordt gebruikt als overkoepelend begrip (een verzamelnaam voor
NatuurlijkPersoon, NietNatuurlijkPersoon en NaamPersoon) om er over
te kunnen communiceren. Iedere in het handelsregister voorkomende Persoon
heeft ofwel een Eigenaarschap en/ of minstens één Functievervulling
waarmee de rol van de Persoon is vastgelegd. | 6259906c796e427e5384ffaa |
class TestClosedLoopControlBase(): <NEW_LINE> <INDENT> pass | Base class for close loop control tests. | 6259906ccc0a2c111447c6ea |
class LandingParamsValidatorMixin(BaseParamsValidatorMixin): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _ajax_validator(value, default): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return int(value) <NEW_LINE> <DEDENT> except BaseException as exc: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _landing_slug_title_validator(value, default): <NEW_LINE> <INDENT> return value | Mixin with validators for validate
request parameters. | 6259906c8e7ae83300eea8c3 |
class NDPosPlugin(AsynPort): <NEW_LINE> <INDENT> UniqueName = "PORT" <NEW_LINE> _SpecificTemplate = NDPosPluginTemplate <NEW_LINE> def __init__(self, PORT, NDARRAY_PORT, QUEUE = 2, BLOCK = 0, NDARRAY_ADDR = 0, PRIORITY = 0, STACKSIZE = 0, **args): <NEW_LINE> <INDENT> self.__super.__init__(PORT) <NEW_LINE> self.__dict__.update(locals()) <NEW_LINE> makeTemplateInstance(self._SpecificTemplate, locals(), args) <NEW_LINE> <DEDENT> ArgInfo = _SpecificTemplate.ArgInfo + makeArgInfo(__init__, PORT = Simple('Port name for the NDPosPlugin plugin', str), QUEUE = Simple('Input array queue size', int), BLOCK = Simple('Blocking callbacks?', int), NDARRAY_PORT = Ident('Input array port', AsynPort), NDARRAY_ADDR = Simple('Input array port address', int), PRIORITY = Simple('Max buffers to allocate', int), STACKSIZE = Simple('Max buffers to allocate', int)) <NEW_LINE> def Initialise(self): <NEW_LINE> <INDENT> print('# NDPosPluginConfigure(portName, queueSize, blockingCallbacks, NDArrayPort, NDArrayAddr, maxBuffers, maxMemory, priority, stackSize)' % self.__dict__) <NEW_LINE> print('NDPosPluginConfigure("%(PORT)s", %(QUEUE)d, %(BLOCK)d, "%(NDARRAY_PORT)s", %(NDARRAY_ADDR)s, 0, 0, %(PRIORITY)d, %(STACKSIZE)d)' % self.__dict__) | This plugin attaches position information to NDArrays | 6259906cfff4ab517ebcf04e |
class CheckFastqVersion(PipelineAction): <NEW_LINE> <INDENT> def __init__(self, output): <NEW_LINE> <INDENT> PipelineAction.__init__(self, 'CheckFastqVersion', output) <NEW_LINE> <DEDENT> def __call__(self, fastq_file, pipeline=None): <NEW_LINE> <INDENT> if not os.path.isfile(fastq_file[0]) and os.path.isfile(fastq_file[0] + '.file_info'): <NEW_LINE> <INDENT> if os.path.isfile(self.output[0]): <NEW_LINE> <INDENT> return self.output <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError('A valid fastq file is needed to check version of fastq: .file_info detected') <NEW_LINE> <DEDENT> <DEDENT> with open(self.output[0], 'w') as aln_param: <NEW_LINE> <INDENT> qual_scores = '' <NEW_LINE> with openFile(fastq_file[0]) as fastq: <NEW_LINE> <INDENT> while len(qual_scores) < 1000: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> line = fastq.readline().decode('utf-8') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise RuntimeError('Failed to read fastq file {}: {}' .format(fastq_file, e)) <NEW_LINE> <DEDENT> if not line.startswith('@'): <NEW_LINE> <INDENT> raise ValueError('Wrong FASTA file {}'.format(fastq_file)) <NEW_LINE> <DEDENT> line = fastq.readline().decode('utf-8') <NEW_LINE> line = fastq.readline().decode('utf-8') <NEW_LINE> if not line.startswith('+'): <NEW_LINE> <INDENT> env.logger.warning( 'Suspiciout FASTA file {}: third line does not start with "+".' .foramt(fastq_file)) <NEW_LINE> return <NEW_LINE> <DEDENT> line = fastq.readline().decode('utf-8') <NEW_LINE> qual_scores += line.strip() <NEW_LINE> <DEDENT> <DEDENT> min_qual = min([ord(x) for x in qual_scores]) <NEW_LINE> max_qual = max([ord(x) for x in qual_scores]) <NEW_LINE> env.logger.debug('FASTA file with quality score ranging {} to {}' .format(min_qual, max_qual)) <NEW_LINE> if min_qual >= 64 or max_qual > 90: <NEW_LINE> <INDENT> aln_param.write('-I') <NEW_LINE> pipeline.VARS['ALN_PARAM'] = '-I' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pipeline.VARS['ALN_PARAM'] = '' <NEW_LINE> <DEDENT> <DEDENT> return self.output | Check the version of fastq file in order to set proper option for ``bwa aln`` runs.
File flow: Write option to output file, and set variable ``ALN_PARAM``.
INPUT ====> OUTPUT
Example:
action=CheckFastqVersion(output='aln_param.txt')
Note:
Output result to a file to avoid re-checking the fastq files repeatedly. | 6259906c56ac1b37e63038fc |
class LicenseAudioRequest(object): <NEW_LINE> <INDENT> swagger_types = { 'audio': 'list[LicenseAudio]' } <NEW_LINE> attribute_map = { 'audio': 'audio' } <NEW_LINE> def __init__(self, audio=None): <NEW_LINE> <INDENT> self._audio = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.audio = audio <NEW_LINE> <DEDENT> @property <NEW_LINE> def audio(self): <NEW_LINE> <INDENT> return self._audio <NEW_LINE> <DEDENT> @audio.setter <NEW_LINE> def audio(self, audio): <NEW_LINE> <INDENT> if audio is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `audio`, must not be `None`") <NEW_LINE> <DEDENT> self._audio = audio <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(LicenseAudioRequest, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, LicenseAudioRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906cbaa26c4b54d50adc |
class Robot(Namespace): <NEW_LINE> <INDENT> def __init__(self, endpoint): <NEW_LINE> <INDENT> super(Robot, self).__init__(endpoint) <NEW_LINE> <DEDENT> def getWebsocketAddress(self): <NEW_LINE> <INDENT> return self._endpoint.getWebsocketAddress() | Representation of a namespace which has a websocket connection from a
robot assigned and is part of the cloud engine internal communication. | 6259906c7d43ff248742802b |
class ScoreArea(tk.Frame): <NEW_LINE> <INDENT> def __init__(self, parent, players, eliminate_player): <NEW_LINE> <INDENT> tk.Frame.__init__(self, parent) <NEW_LINE> self.parent = parent <NEW_LINE> self.name = 'scorearea' <NEW_LINE> self.players = players <NEW_LINE> self.player_score_vars = [] <NEW_LINE> self.player_name_labels = [] <NEW_LINE> self.player_score_labels = [] <NEW_LINE> title = tk.Label(self, text="Scores:").grid(row=0, column=0) <NEW_LINE> for i, p in enumerate(self.players._players): <NEW_LINE> <INDENT> self.player_score_vars.append(tk.StringVar()) <NEW_LINE> self.player_name_labels.append(tk.Label(self, text=(p.name,"-"))) <NEW_LINE> self.player_score_labels.append(tk.Label(self, textvariable=self.player_score_vars[i])) <NEW_LINE> self.player_name_labels[i].grid(row=i+1, column=1) <NEW_LINE> self.player_score_labels[i].grid(row=i+1, column=2) <NEW_LINE> self.player_score_vars[i].set(p.score) <NEW_LINE> <DEDENT> button = tk.Button(self, text="I'm out! :(", command=eliminate_player).grid(row=5, column=2) <NEW_LINE> <DEDENT> def update_score(self, players): <NEW_LINE> <INDENT> for i, p in enumerate(players._players): <NEW_LINE> <INDENT> self.player_score_vars[i].set(p.score) | This class renders a score area.
| 6259906c99cbb53fe683271b |
class PyramidPoolingMainBranch(Chain): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, scale_factor): <NEW_LINE> <INDENT> super(PyramidPoolingMainBranch, self).__init__() <NEW_LINE> with self.init_scope(): <NEW_LINE> <INDENT> self.att = AttentionRefinementBlock( in_channels=in_channels, out_channels=out_channels) <NEW_LINE> self.up = InterpolationBlock( scale_factor=scale_factor, mode="nearest") <NEW_LINE> self.conv = conv3x3_block( in_channels=out_channels, out_channels=out_channels) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, x, y): <NEW_LINE> <INDENT> x = self.att(x) <NEW_LINE> x = x + y <NEW_LINE> x = self.up(x) <NEW_LINE> x = self.conv(x) <NEW_LINE> return x | Pyramid pooling main branch.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
scale_factor : float
Multiplier for spatial size. | 6259906c7d847024c075dc0f |
class LoginForm(FlaskForm): <NEW_LINE> <INDENT> username = StringField('Username', validators=[DataRequired()]) <NEW_LINE> password = PasswordField('Password', validators=[DataRequired()]) <NEW_LINE> twitter = StringField("Twitter") <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(LoginForm, self).__init__(*args, **kwargs) <NEW_LINE> self.user = None <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> initial_validation = super(LoginForm, self).validate() <NEW_LINE> if not initial_validation: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.user = User.query.filter_by(username=self.username.data).first() <NEW_LINE> if not self.user: <NEW_LINE> <INDENT> self.username.errors.append('Unknown username') <NEW_LINE> return False <NEW_LINE> <DEDENT> if not self.user.check_password(self.password.data): <NEW_LINE> <INDENT> self.password.errors.append('Invalid password') <NEW_LINE> return False <NEW_LINE> <DEDENT> if not self.user.active: <NEW_LINE> <INDENT> self.username.errors.append('User not activated') <NEW_LINE> return False <NEW_LINE> <DEDENT> return True | Login form. | 6259906c4e4d562566373c3a |
@pimms.immutable <NEW_LINE> class TanPotential(PotentialFunction): <NEW_LINE> <INDENT> def __init__(self): pass <NEW_LINE> def value(self, x): return tangent(x) <NEW_LINE> def jacobian(self, x, into=None): <NEW_LINE> <INDENT> x = flattest(x) <NEW_LINE> z = sps.diags(secant(x)**2) <NEW_LINE> return safe_into(into, z) | TanPotential is a potential function that represents tan(x). | 6259906cadb09d7d5dc0bd9e |
class SimpleBuildingBlockPass(microprobe.passes.Pass): <NEW_LINE> <INDENT> def __init__(self, bblsize, threshold=0.1): <NEW_LINE> <INDENT> super(SimpleBuildingBlockPass, self).__init__() <NEW_LINE> self._bblsize = bblsize <NEW_LINE> self._description = "Create a basic block with '%d' " "instructions" % self._bblsize <NEW_LINE> self._threshold = threshold <NEW_LINE> <DEDENT> def __call__(self, building_block, dummy): <NEW_LINE> <INDENT> building_block.cfg.add_bbl(size=self._bblsize) <NEW_LINE> <DEDENT> def check(self, building_block, dummy_target): <NEW_LINE> <INDENT> pass_ok = True <NEW_LINE> pass_ok = pass_ok and (len(building_block.cfg.bbls) == 1) <NEW_LINE> for bbl in building_block.cfg.bbls: <NEW_LINE> <INDENT> LOG.debug("BBL size: %d", bbl.size) <NEW_LINE> LOG.debug("Expected BBL size: %d", self._bblsize) <NEW_LINE> if (abs((bbl.size * 1.0) / self._bblsize) - 1) > self._threshold: <NEW_LINE> <INDENT> LOG.warning( "Percentage deviation: %.2f", abs( (bbl.size * 1.0) / self._bblsize ) - 1 ) <NEW_LINE> pass_ok = False <NEW_LINE> <DEDENT> <DEDENT> return pass_ok | SimpleBuildingBlockPass Class.
This :class:`~.Pass` adds a single building block of a given instruction
size to the given building block. The pass fails if the buiding block
size differs in ratio more than the threshold provided. | 6259906c5166f23b2e244c07 |
class PriorProbabilityEstimator(BaseEstimator): <NEW_LINE> <INDENT> def fit(self, X, y, sample_weight=None): <NEW_LINE> <INDENT> if sample_weight is None: <NEW_LINE> <INDENT> sample_weight = np.ones_like(y, dtype=np.float64) <NEW_LINE> <DEDENT> class_counts = bincount(y, weights=sample_weight) <NEW_LINE> self.priors = class_counts / class_counts.sum() <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> check_is_fitted(self, 'priors') <NEW_LINE> y = np.empty((X.shape[0], self.priors.shape[0]), dtype=np.float64) <NEW_LINE> y[:] = self.priors <NEW_LINE> return y | An estimator predicting the probability of each
class in the training data. | 6259906c32920d7e50bc787b |
class FBApplication (FBComponent): <NEW_LINE> <INDENT> def FBApplication(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ExecuteScript(self,pFilename): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FBXFileAppend(self,pFilename,pImportingNamespace): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FBXFileMerge(self,pFilename,pImportingNamespace): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FBXFileOpen(self,pFilename,pImportingNamespace): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FBXFileSave(self,pFilename): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileAppend(self,pFilename,pShowOptions,pImportingNamespace): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileBatch(self,pBatchOptions,pPlotOptions): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileExit(self,pSave): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileExport(self,pFilename): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileImport(self,pFilename,pMatchModels): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileMerge(self,pFilename,pShowOptions,pImportingNamespace): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileNew(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileOpen(self,pFilename,pShowOptions,pImportingNamespace): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileRender(self,pRenderOptions): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def FileSave(self,pFilename): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Maximize(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Minimize(self,pBlocking): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def SwitchViewerCamera(self,pCamera): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> CurrentActor=property(doc="<b>Read Write Property:</b> Indicate the current actor, as used by the character tool. Can be NULL. ") <NEW_LINE> CurrentCharacter=property(doc="<b>Read Write Property:</b> Indicate the current character, as used by the character tool. Can be NULL. ") <NEW_LINE> FBXFileName=property(doc="<b>Read Write Property:</b> Current scene filename. ") <NEW_LINE> OnFileExit=property(doc="<b>Event:</b> A File Exit as been requested, nothing has been destroyed yet. ") <NEW_LINE> OnFileMerge=property(doc="<b>Event:</b> A File Merge has been requested, nothing has been loaded yet. ") <NEW_LINE> OnFileNew=property(doc="<b>Event:</b> A File New has been requested, nothing has been destroyed yet. ") <NEW_LINE> OnFileNewCompleted=property(doc="<b>Event:</b> A File New has been completed. ") <NEW_LINE> OnFileOpen=property(doc="<b>Event:</b> A File Open has been requested, nothing has been loaded yet. ") <NEW_LINE> OnFileOpenCompleted=property(doc="<b>Event:</b> A File Open has been completed. ") <NEW_LINE> OnFileSave=property(doc="<b>Event:</b> A File Save has been requested, nothing has been saved yet. ") <NEW_LINE> OnFileSaveCompleted=property(doc="<b>Event:</b> A File Save has been completed. ") <NEW_LINE> pass | Application class.
Permits the manipulation of the application. It should be noted that the Event registration is instanced based. When a FBApplication object is destroyed, all the event callbacks are unregistered. If you want to have a tool to be notified of events, it needs to have a FBApplication data member. Please note that the properties CurrentActor and CurrentCharacter cannot be non NULL at the same time. The character tool will work on only one item at a time. When no item is selected in the tool, both properties will be null. | 6259906cd268445f2663a777 |
class TestLinearOrderRecordsResponseBase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testLinearOrderRecordsResponseBase(self): <NEW_LINE> <INDENT> pass | LinearOrderRecordsResponseBase unit test stubs | 6259906c91f36d47f2231aa9 |
class InitialTagCreateRequest(object): <NEW_LINE> <INDENT> swagger_types = { 'name': 'TagName', 'creator': 'TagCreator', 'img_type': 'ImageType' } <NEW_LINE> attribute_map = { 'name': 'name', 'creator': 'creator', 'img_type': 'imgType' } <NEW_LINE> def __init__(self, name=None, creator=None, img_type=None, _configuration=None): <NEW_LINE> <INDENT> if _configuration is None: <NEW_LINE> <INDENT> _configuration = Configuration() <NEW_LINE> <DEDENT> self._configuration = _configuration <NEW_LINE> self._name = None <NEW_LINE> self._creator = None <NEW_LINE> self._img_type = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if creator is not None: <NEW_LINE> <INDENT> self.creator = creator <NEW_LINE> <DEDENT> self.img_type = img_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def creator(self): <NEW_LINE> <INDENT> return self._creator <NEW_LINE> <DEDENT> @creator.setter <NEW_LINE> def creator(self, creator): <NEW_LINE> <INDENT> self._creator = creator <NEW_LINE> <DEDENT> @property <NEW_LINE> def img_type(self): <NEW_LINE> <INDENT> return self._img_type <NEW_LINE> <DEDENT> @img_type.setter <NEW_LINE> def img_type(self, img_type): <NEW_LINE> <INDENT> if self._configuration.client_side_validation and img_type is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `img_type`, must not be `None`") <NEW_LINE> <DEDENT> self._img_type = img_type <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(InitialTagCreateRequest, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InitialTagCreateRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InitialTagCreateRequest): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906c38b623060ffaa46d |
class Encoder(Configurable): <NEW_LINE> <INDENT> def __init__(self, params, mode, name=None, verbose=True): <NEW_LINE> <INDENT> super(Encoder, self).__init__( params=params, mode=mode, verbose=verbose, name=name or self.__class__.__name__) <NEW_LINE> self._encoder_output_tuple_type = namedtuple( "EncoderOutput", "outputs final_states attention_values attention_length") <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_params(): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def encode(self, feature_ids, feature_length, input_modality, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError | Base class for encoders. | 6259906ca17c0f6771d5d7c3 |
class EvalCommand( CommandObject ): <NEW_LINE> <INDENT> NAME = "evaluate" <NEW_LINE> DESCRIPTION = "Evaluates the current selection as Python code." <NEW_LINE> def __init__( self, displayMessage=None, selection=None ): <NEW_LINE> <INDENT> super( EvalCommand, self ).__init__() <NEW_LINE> self.setDescription( self.DESCRIPTION ) <NEW_LINE> self.setName( self.NAME ) <NEW_LINE> if displayMessage is None: <NEW_LINE> <INDENT> from enso import messages <NEW_LINE> displayMessage = messages.displayMessage <NEW_LINE> <DEDENT> if selection is None: <NEW_LINE> <INDENT> import enso.selection <NEW_LINE> selection = enso.selection <NEW_LINE> <DEDENT> self._selection = selection <NEW_LINE> self._displayMessage = displayMessage <NEW_LINE> <DEDENT> def run( self, seldict=None ): <NEW_LINE> <INDENT> if seldict is None: <NEW_LINE> <INDENT> seldict = self._selection.get() <NEW_LINE> <DEDENT> text = seldict.get( "text", "" ).strip() <NEW_LINE> evalSuccessful = False <NEW_LINE> append = False <NEW_LINE> if text.endswith( "=" ): <NEW_LINE> <INDENT> text = text[:-1].strip() <NEW_LINE> append = True <NEW_LINE> <DEDENT> if not text: <NEW_LINE> <INDENT> self._displayMessage( "<p>No code to evaluate!</p>" ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> code = compile( text, "<selected text>", "eval" ) <NEW_LINE> result = eval( code, {"__builtins__":None}, {} ) <NEW_LINE> evalSuccessful = True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self._displayMessage( "<p>Error: %s</p>" % xml_tools.escape_xml(str(e)) ) <NEW_LINE> <DEDENT> <DEDENT> if evalSuccessful: <NEW_LINE> <INDENT> resulttext = str( repr(result) ) <NEW_LINE> if append: <NEW_LINE> <INDENT> newtext = "%s = %s" % (text, resulttext) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newtext = resulttext <NEW_LINE> <DEDENT> self._selection.set( {"text" : newtext} ) | The 'evaluate' command. | 6259906ca219f33f346c803d |
class PreconfiguredChoices(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.xml = etree.Element("processingMCP") <NEW_LINE> self.choices = etree.SubElement(self.xml, "preconfiguredChoices") <NEW_LINE> <DEDENT> def add_choice(self, applies_to_text, go_to_chain_text, comment=None): <NEW_LINE> <INDENT> if comment is not None: <NEW_LINE> <INDENT> comment = etree.Comment(" {} ".format(comment)) <NEW_LINE> self.choices.append(comment) <NEW_LINE> <DEDENT> choice = etree.SubElement(self.choices, "preconfiguredChoice") <NEW_LINE> etree.SubElement(choice, "appliesTo").text = applies_to_text <NEW_LINE> etree.SubElement(choice, "goToChain").text = go_to_chain_text <NEW_LINE> <DEDENT> def save(self, config_path): <NEW_LINE> <INDENT> with open(config_path, "w") as f: <NEW_LINE> <INDENT> f.write(etree.tostring(self.xml, pretty_print=True, encoding="unicode")) | Encode processing configuration XML documents and optionally write to disk. | 6259906c4f6381625f19a0c2 |
class ExcludeTrivialImportBranchRule(StrategyRule): <NEW_LINE> <INDENT> def get_symbol(self, symbol, stats): <NEW_LINE> <INDENT> if isinstance(symbol, (Trunk, TypedSymbol)): <NEW_LINE> <INDENT> return symbol <NEW_LINE> <DEDENT> if stats.tag_create_count == 0 and stats.branch_create_count == stats.trivial_import_count: <NEW_LINE> <INDENT> logger.verbose( 'Excluding branch %s because it is a trivial import branch.' % (symbol,) ) <NEW_LINE> return ExcludedSymbol(symbol) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return symbol | If a symbol is a trivial import branch, exclude it.
A trivial import branch is defined to be a branch that only had a
single import on it (no other kinds of commits) in every file in
which it appeared. In most cases these branches are worthless. | 6259906c091ae35668706469 |
class CourseComments(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(UserProfile, verbose_name=u"用户") <NEW_LINE> course = models.ForeignKey(Couser, verbose_name=u"课程") <NEW_LINE> comments = models.CharField(max_length=200, verbose_name=u"评论") <NEW_LINE> add_time = models.DateTimeField(default=datetime.now, verbose_name=u"添加时间") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = u"课程评论" <NEW_LINE> verbose_name_plural = verbose_name | 课程评论 | 6259906c99cbb53fe683271d |
class MedicalView(CoachPermissionRequiredMixin, ListView): <NEW_LINE> <INDENT> context_object_name = "athlete_list" <NEW_LINE> model = Athlete <NEW_LINE> ordering = "last_medical" <NEW_LINE> template_name = "team_manager/medical_view.html" <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(MedicalView, self).get_context_data(**kwargs) <NEW_LINE> context["title"] = _("Medical Examination Overview") <NEW_LINE> context["datepicker_format"] = get_datepicker_date_format() <NEW_LINE> return context | View which shows an overview about the medical examiniations of the athlete | 6259906c76e4537e8c3f0db9 |
class Multiple(tuple): <NEW_LINE> <INDENT> def __new__(cls, *elems): <NEW_LINE> <INDENT> return super().__new__(cls, elems) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if not len(self): <NEW_LINE> <INDENT> return '%s()' % self.__class__.__name__ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> global trace_indent <NEW_LINE> trace_indent += 2 <NEW_LINE> result = '%s(\n%s\n)' % ( self.__class__.__name__, ',\n'.join('%s%s' % (indent(), repr(x)) for x in self) ) <NEW_LINE> trace_indent -= 2 <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if not len(self): <NEW_LINE> <INDENT> return '%s()' % self.__class__.__name__ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> global trace_indent <NEW_LINE> trace_indent += 2 <NEW_LINE> result = '%s(\n%s\n)' % ( self.__class__.__name__, ',\n'.join('%s%s' % (indent(), flatstr(x)) for x in self) ) <NEW_LINE> trace_indent -= 2 <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> def str_per_command_line(self): <NEW_LINE> <INDENT> if not len(self): <NEW_LINE> <INDENT> return '(None)' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = [] <NEW_LINE> for i, x in enumerate(self, start=1): <NEW_LINE> <INDENT> result.append('.%d %s' % (i, flatstr_per_command_line(x))) <NEW_LINE> <DEDENT> return '\n'.join(result) | A Multiple holds multiple results, like what most stages of scansion
generate. | 6259906caad79263cf42ffeb |
class SimpleRelatedProxy(SimpleRelated): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> proxy = True | Proxy to model with foreign key to Normal, shared only and regular translatable field | 6259906c32920d7e50bc787c |
class ComputerSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> stack = serializers.IntegerField(label='stack', write_only=True, help_text="Size of the computer's program stack.") <NEW_LINE> debug_data = serializers.DictField(read_only=True, source='debug') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Computer <NEW_LINE> fields = ('id', 'stack', 'debug_data') <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> validated_data['program_stack_size'] = validated_data.pop('stack', 0) <NEW_LINE> return super(ComputerSerializer, self).create(validated_data) | Serializer to manage `Computer` instances. | 6259906c4e4d562566373c3c |
class TestCsrRestIkeKeepaliveCreate(base.BaseTestCase): <NEW_LINE> <INDENT> def _save_dpd_info(self): <NEW_LINE> <INDENT> with httmock.HTTMock(csr_request.token, csr_request.normal_get): <NEW_LINE> <INDENT> details = self.csr.get_request('vpn-svc/ike/keepalive') <NEW_LINE> if self.csr.status == requests.codes.OK: <NEW_LINE> <INDENT> self.dpd = details <NEW_LINE> self.addCleanup(self._restore_dpd_info) <NEW_LINE> <DEDENT> elif self.csr.status != requests.codes.NOT_FOUND: <NEW_LINE> <INDENT> self.fail("Unable to save original DPD info") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _restore_dpd_info(self): <NEW_LINE> <INDENT> with httmock.HTTMock(csr_request.token, csr_request.put): <NEW_LINE> <INDENT> payload = {'interval': self.dpd['interval'], 'retry': self.dpd['retry']} <NEW_LINE> self.csr.put_request('vpn-svc/ike/keepalive', payload=payload) <NEW_LINE> if self.csr.status != requests.codes.NO_CONTENT: <NEW_LINE> <INDENT> self.fail("Unable to restore DPD info after test") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def setUp(self, host='localhost', tunnel_ip='10.10.10.10', timeout=None): <NEW_LINE> <INDENT> super(TestCsrRestIkeKeepaliveCreate, self).setUp() <NEW_LINE> info = {'rest_mgmt_ip': host, 'tunnel_ip': tunnel_ip, 'username': 'stack', 'password': 'cisco', 'timeout': timeout} <NEW_LINE> self.csr = csr_client.CsrRestClient(info) <NEW_LINE> self._save_dpd_info() <NEW_LINE> self.csr.token = None <NEW_LINE> <DEDENT> def test_configure_ike_keepalive(self): <NEW_LINE> <INDENT> with httmock.HTTMock(csr_request.token, csr_request.put, csr_request.normal_get): <NEW_LINE> <INDENT> keepalive_info = {'interval': 60, 'retry': 4} <NEW_LINE> self.csr.configure_ike_keepalive(keepalive_info) <NEW_LINE> self.assertEqual(requests.codes.NO_CONTENT, self.csr.status) <NEW_LINE> content = self.csr.get_request('vpn-svc/ike/keepalive') <NEW_LINE> self.assertEqual(requests.codes.OK, self.csr.status) <NEW_LINE> expected = {'periodic': False} <NEW_LINE> expected.update(keepalive_info) <NEW_LINE> self.assertDictContainsSubset(expected, content) <NEW_LINE> <DEDENT> <DEDENT> def test_disable_ike_keepalive(self): <NEW_LINE> <INDENT> with httmock.HTTMock(csr_request.token, csr_request.delete, csr_request.put, csr_request.get_not_configured): <NEW_LINE> <INDENT> keepalive_info = {'interval': 0, 'retry': 4} <NEW_LINE> self.csr.configure_ike_keepalive(keepalive_info) <NEW_LINE> self.assertEqual(requests.codes.NO_CONTENT, self.csr.status) | Test IKE keepalive REST requests.
Note: On the Cisco CSR, the IKE keepalive for v1 is a global configuration
that applies to all VPN tunnels to specify Dead Peer Detection information.
As a result, this REST API is not used in the OpenStack device driver, and
the keepalive will default to zero (disabled). | 6259906caad79263cf42ffec |
class DeleteFieldCommand(BaseFieldCommand): <NEW_LINE> <INDENT> def __init__(self, tag, ind1, ind2, subfield_commands, conditionSubfield="", condition="", condition_exact_match=True, _condition_does_not_exist=False): <NEW_LINE> <INDENT> BaseFieldCommand.__init__(self, tag, ind1, ind2, subfield_commands) <NEW_LINE> self._conditionSubfield = conditionSubfield <NEW_LINE> self._condition = condition <NEW_LINE> self._condition_exact_match = condition_exact_match <NEW_LINE> self._condition_does_not_exist = _condition_does_not_exist <NEW_LINE> <DEDENT> def _delete_field_condition(self, record): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for field in record[self._tag]: <NEW_LINE> <INDENT> subfield_exists = False <NEW_LINE> for subfield in field[0]: <NEW_LINE> <INDENT> if subfield[0] == self._conditionSubfield: <NEW_LINE> <INDENT> subfield_exists = True <NEW_LINE> if self._condition_does_not_exist == True: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if self._condition_exact_match: <NEW_LINE> <INDENT> if self._condition == subfield[1]: <NEW_LINE> <INDENT> bibrecord.record_delete_field(record, self._tag, self._ind1, self._ind2, field_position_global=field[4]) <NEW_LINE> self._modifications += 1 <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._condition in subfield[1]: <NEW_LINE> <INDENT> bibrecord.record_delete_field(record, self._tag, self._ind1, self._ind2, field_position_global=field[4]) <NEW_LINE> self._modifications += 1 <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if subfield_exists == False and self._condition_does_not_exist: <NEW_LINE> <INDENT> bibrecord.record_delete_field(record, self._tag, self._ind1, self._ind2, field_position_global=field[4]) <NEW_LINE> self._modifications += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def process_record(self, record): <NEW_LINE> <INDENT> if self._condition: <NEW_LINE> <INDENT> self._delete_field_condition(record) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bibrecord.record_delete_field(record, self._tag, self._ind1, self._ind2) <NEW_LINE> self._modifications += 1 | Deletes given fields from a record | 6259906c4428ac0f6e659d69 |
class SArrayBuilder(object): <NEW_LINE> <INDENT> def __init__(self, dtype, num_segments=1, history_size=10): <NEW_LINE> <INDENT> self._builder = UnitySArrayBuilderProxy() <NEW_LINE> self._builder.init(num_segments, history_size, dtype) <NEW_LINE> self._block_size = 1024 <NEW_LINE> <DEDENT> def append(self, data, segment=0): <NEW_LINE> <INDENT> self._builder.append(data, segment) <NEW_LINE> <DEDENT> def append_multiple(self, data, segment=0): <NEW_LINE> <INDENT> if not hasattr(data, "__iter__"): <NEW_LINE> <INDENT> raise TypeError("append_multiple must be passed an iterable object") <NEW_LINE> <DEDENT> tmp_list = [] <NEW_LINE> for i in data: <NEW_LINE> <INDENT> tmp_list.append(i) <NEW_LINE> if len(tmp_list) >= self._block_size: <NEW_LINE> <INDENT> self._builder.append_multiple(tmp_list, segment) <NEW_LINE> tmp_list = [] <NEW_LINE> <DEDENT> <DEDENT> if len(tmp_list) > 0: <NEW_LINE> <INDENT> self._builder.append_multiple(tmp_list, segment) <NEW_LINE> <DEDENT> <DEDENT> def get_type(self): <NEW_LINE> <INDENT> return self._builder.get_type() <NEW_LINE> <DEDENT> def read_history(self, num=10, segment=0): <NEW_LINE> <INDENT> if num < 0: <NEW_LINE> <INDENT> num = 0 <NEW_LINE> <DEDENT> if segment < 0: <NEW_LINE> <INDENT> raise TypeError("segment must be >= 0") <NEW_LINE> <DEDENT> return self._builder.read_history(num, segment) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> return SArray(_proxy=self._builder.close()) | An interface to incrementally build an SArray element by element.
Once closed, the SArray cannot be "reopened" using this interface.
Parameters
----------
dtype : type
The type of the elements in the SArray.
num_segments : int, optional
Number of segments that can be written in parallel.
history_size : int, optional
The number of elements to be cached as history. Caches the last
`history_size` elements added with `append` or `append_multiple`.
Returns
-------
out : SArrayBuilder
Examples
--------
>>> from turicreate import SArrayBuilder
>>> sb = SArrayBuilder(int)
>>> sb.append(1)
>>> sb.append_multiple([2,3])
>>> sb.close()
dtype: int
Rows: 3
[1, 2, 3] | 6259906cdd821e528d6da59c |
class ListVirtualHubsResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[VirtualHub]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["VirtualHub"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ListVirtualHubsResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link | Result of the request to list VirtualHubs. It contains a list of VirtualHubs and a URL nextLink to get the next set of results.
:param value: List of VirtualHubs.
:type value: list[~azure.mgmt.network.v2018_07_01.models.VirtualHub]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str | 6259906c01c39578d7f14350 |
class SquarePrivilegeCouponCases(TestCase): <NEW_LINE> <INDENT> def tearDown(self): <NEW_LINE> <INDENT> self.reset.clearData() <NEW_LINE> self.driver.quit() <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.logger = Logger() <NEW_LINE> self.driver = AppiumDriver(None, None, IDC.platformName, IDC.platformVersion, IDC.deviceName, IDC.driverUrl, IDC.bundleId, IDC.udid).getDriver() <NEW_LINE> self.reset = ClearAppData(self.driver) <NEW_LINE> self.reset.clearData() <NEW_LINE> TestPrepare(self, self.driver, self.logger).prepare() <NEW_LINE> <DEDENT> def test_case(self): <NEW_LINE> <INDENT> dashboardPage = DashboardPage(self, self.driver, self.logger) <NEW_LINE> dashboardPage.validSelf() <NEW_LINE> searchPage = SearchPage(self, self.driver, self.logger) <NEW_LINE> dashboardPage.validSelf() <NEW_LINE> dashboardPage.clickOnSearchAll() <NEW_LINE> searchPage.inputKeywords(u"北京通州万达广场") <NEW_LINE> searchPage.clickOnSearch() <NEW_LINE> searchPage.clickOnSpecificSquare() <NEW_LINE> squareModulePage = SquareModulePage(self, self.driver, self.logger) <NEW_LINE> squareModulePage.validSelf() <NEW_LINE> squareModulePage.clickOnPrivilege() <NEW_LINE> squareModulePage.waitBySeconds(8) | 作者 宋波
巡检checklist #Anonymous
自动化测试 #Anonymous
广场详情页点击优惠可以进入优惠券并可以成功领取优惠券在我的票券中显示 | 6259906c4527f215b58eb5bb |
class OpenSSLSeeker(Seeker): <NEW_LINE> <INDENT> NAME = 'OpenSSL' <NEW_LINE> VERSION_STRING = " part of OpenSSL " <NEW_LINE> def searchLib(self, logger): <NEW_LINE> <INDENT> key_string = self.VERSION_STRING <NEW_LINE> ids = ['SHA1', 'SHA-256', 'SHA-512', 'SSLv3', 'TLSv1', 'ASN.1', 'EVP', 'RAND', 'RSA', 'Big Number'] <NEW_LINE> self._version_strings = [] <NEW_LINE> seen_copyrights = set() <NEW_LINE> match_counter = 0 <NEW_LINE> for bin_str in self._all_strings: <NEW_LINE> <INDENT> if key_string in str(bin_str): <NEW_LINE> <INDENT> copyright_string = str(bin_str) <NEW_LINE> if len(filter(lambda id: id in copyright_string, ids)) == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> chopped_copyright_string = copyright_string[copyright_string.find(key_string):] <NEW_LINE> if match_counter >= 1 and chopped_copyright_string in seen_copyrights: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> logger.debug("Located a copyright string of %s in address 0x%x", self.NAME, bin_str.ea) <NEW_LINE> match_counter += 1 <NEW_LINE> seen_copyrights.add(chopped_copyright_string) <NEW_LINE> self._version_strings.append(chopped_copyright_string) <NEW_LINE> <DEDENT> <DEDENT> return len(self._version_strings) <NEW_LINE> <DEDENT> def identifyVersions(self, logger): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for work_str in self._version_strings: <NEW_LINE> <INDENT> results.append(self.extractVersion(work_str, start_index=work_str.find(self.NAME) + len(self.NAME) + 1, legal_chars=string.digits + string.ascii_lowercase + '.')) <NEW_LINE> <DEDENT> return results | Seeker (Identifier) for the OpenSSL open source library. | 6259906c2c8b7c6e89bd501d |
class InvalidInput(ValueError): <NEW_LINE> <INDENT> pass | Raised during interactive analysis, whenever the user input is invalis | 6259906cac7a0e7691f73d1e |
class HistoryIsNone(Exception): <NEW_LINE> <INDENT> pass | Raised if the ``_history`` property of the browser is set to None and one method using it is called
| 6259906c8e7ae83300eea8c6 |
class SudokuError(Exception): <NEW_LINE> <INDENT> pass | Ein anwendungsspezifischer Fehler | 6259906c7047854f46340bed |
class BusTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_reset(self): <NEW_LINE> <INDENT> bus = Bus() <NEW_LINE> bus.v_magnitude = 0.95 <NEW_LINE> bus.v_angle = 15.0 <NEW_LINE> bus.p_lmbda = 50.0 <NEW_LINE> bus.q_lmbda = 20.0 <NEW_LINE> bus.mu_vmin = 10.0 <NEW_LINE> bus.mu_vmax = 10.0 <NEW_LINE> bus.reset() <NEW_LINE> self.assertEqual(bus.v_magnitude, 0.95) <NEW_LINE> self.assertEqual(bus.v_angle, 15.0) <NEW_LINE> self.assertEqual(bus.p_lmbda, 0.0) <NEW_LINE> self.assertEqual(bus.q_lmbda, 0.0) <NEW_LINE> self.assertEqual(bus.mu_vmin, 0.0) <NEW_LINE> self.assertEqual(bus.mu_vmax, 0.0) | Test case for the Bus class.
| 6259906c4f88993c371f113b |
class FunctionalTest(LiveServerTestCase): <NEW_LINE> <INDENT> fixtures = ["fedora_software_testing.json"] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> settings.DEBUG = True <NEW_LINE> LOGGER.setLevel(logging.WARNING) <NEW_LINE> self.browser = webdriver.Firefox() <NEW_LINE> self.browser.implicitly_wait(3) <NEW_LINE> self.browser.get("http://localhost:8081") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.browser.quit() <NEW_LINE> <DEDENT> def test_if_fedora_software_is_reachable(self): <NEW_LINE> <INDENT> self.assertIn("Welcome to Fedora Software", self.browser.title) <NEW_LINE> <DEDENT> def test_if_featured_apps_are_displayed_correctly(self): <NEW_LINE> <INDENT> featured_header_text = self.browser.find_element_by_tag_name('h1').text <NEW_LINE> self.assertEqual("Featured apps", featured_header_text) <NEW_LINE> app_link = self.browser.find_element_by_id("featured_app_href") <NEW_LINE> app_link = app_link.get_attribute("href") <NEW_LINE> app_pkgname = "None" <NEW_LINE> i = 0 <NEW_LINE> for app in FeaturedApp.objects.all(): <NEW_LINE> <INDENT> app_name_from_link = app_link.split("/apps/")[1] <NEW_LINE> if app_name_from_link in app.component.type_id: <NEW_LINE> <INDENT> app_type_id = app.component.type_id <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self.assertIn(app_name_from_link, app_type_id) <NEW_LINE> <DEDENT> def test_if_carousel_displays_a_valid_image(self): <NEW_LINE> <INDENT> carousel = self.browser.find_element_by_id("carousel-featured") <NEW_LINE> carousel_html = carousel.value_of_css_property("background") <NEW_LINE> featured_app = self.browser.find_element_by_id("featured_app_href") <NEW_LINE> featured_app.get_attribute("href").split("/apps/")[1] <NEW_LINE> carousel_image = carousel_html.split('/', 1)[1] <NEW_LINE> carousel_image = "/" + carousel_image <NEW_LINE> carousel_path = carousel_image.split('") no-repeat')[0][1:] <NEW_LINE> project_path = Path(__file__).ancestor(2) <NEW_LINE> fedorasoftware_app_path = project_path.child("fedora_software") <NEW_LINE> carousel_file = os.path.join( fedorasoftware_app_path, carousel_path ) <NEW_LINE> self.assertTrue(os.path.exists(carousel_file)) | Suite of Acceptance tests. | 6259906c56ac1b37e63038fe |
class WorkflowModel: <NEW_LINE> <INDENT> def __init__(self, *steps: StepModel): <NEW_LINE> <INDENT> self._steps = list(steps) <NEW_LINE> <DEDENT> @property <NEW_LINE> def steps(self) -> List[StepModel]: <NEW_LINE> <INDENT> return self._steps | A model of workflow. It is composed of a list of StepModel instances. | 6259906c1f5feb6acb164427 |
class ConnectionTypeCreateOrUpdateParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, 'field_definitions': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'is_global': {'key': 'properties.isGlobal', 'type': 'bool'}, 'field_definitions': {'key': 'properties.fieldDefinitions', 'type': '{FieldDefinition}'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionTypeCreateOrUpdateParameters, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs['name'] <NEW_LINE> self.is_global = kwargs.get('is_global', None) <NEW_LINE> self.field_definitions = kwargs['field_definitions'] | The parameters supplied to the create or update connection type operation.
All required parameters must be populated in order to send to Azure.
:param name: Required. Gets or sets the name of the connection type.
:type name: str
:param is_global: Gets or sets a Boolean value to indicate if the connection type is global.
:type is_global: bool
:param field_definitions: Required. Gets or sets the field definitions of the connection type.
:type field_definitions: dict[str, ~azure.mgmt.automation.models.FieldDefinition] | 6259906cb7558d5895464b4d |
class VirtualMachineExtensionHandlerInstanceView(Model): <NEW_LINE> <INDENT> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'type_handler_version': {'key': 'typeHandlerVersion', 'type': 'str'}, 'status': {'key': 'status', 'type': 'InstanceViewStatus'}, } <NEW_LINE> def __init__(self, type=None, type_handler_version=None, status=None): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.type_handler_version = type_handler_version <NEW_LINE> self.status = status | The instance view of a virtual machine extension handler.
:param type: Full type of the extension handler which includes both
publisher and type.
:type type: str
:param type_handler_version: The type version of the extension handler.
:type type_handler_version: str
:param status: The extension handler status.
:type status: :class:`InstanceViewStatus
<azure.mgmt.compute.compute.v2016_04_30_preview.models.InstanceViewStatus>` | 6259906c7d43ff248742802d |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.