code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class HDHandler(Resource): <NEW_LINE> <INDENT> def __init__(self, jf, size): <NEW_LINE> <INDENT> Resource.__init__(self, jf, '/HP/HD/' + size) <NEW_LINE> self.name = 'Hit Die' <NEW_LINE> self.value = size <NEW_LINE> self.recharge = 'long' <NEW_LINE> <DEDENT> def use_HD(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> roll = self.use(1) <NEW_LINE> <DEDENT> except LowOnResource: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> conmod = h.modifier(self.record.get('/abilities/Constitution')) <NEW_LINE> return roll + conmod if (roll + conmod > 1) else 1 <NEW_LINE> <DEDENT> def rest(self, what): <NEW_LINE> <INDENT> if what == 'long': <NEW_LINE> <INDENT> if Character.HEALING == 'fast': <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.regain(ceil(self.maxnumber / 2)) <NEW_LINE> <DEDENT> <DEDENT> if what == 'short': <NEW_LINE> <INDENT> if Character.HEALING == 'fast': <NEW_LINE> <INDENT> self.regain(ceil(self.maxnumber / 4))
Handles one set of hit dice for a character. Data: As Resource, but with the assumptions made that it recharges on a long rest, its name is 'Hit Die', and its value is its size Methods: use_HD: Returns the result of rolling itself + the character's Constitution modifier. rest: Overrides Resource.rest, it only regains half of its maximum number of HD.
625990673539df3088ecda35
class FileField: <NEW_LINE> <INDENT> def __init__(self, name: str, value: FileTypes) -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> fileobj: FileContent <NEW_LINE> if isinstance(value, tuple): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> filename, fileobj, content_type = value <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> filename, fileobj = value <NEW_LINE> content_type = guess_content_type(filename) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> filename = Path(str(getattr(value, "name", "upload"))).name <NEW_LINE> fileobj = value <NEW_LINE> content_type = guess_content_type(filename) <NEW_LINE> <DEDENT> self.filename = filename <NEW_LINE> self.file = fileobj <NEW_LINE> self.content_type = content_type <NEW_LINE> self._consumed = False <NEW_LINE> <DEDENT> def get_length(self) -> int: <NEW_LINE> <INDENT> headers = self.render_headers() <NEW_LINE> if isinstance(self.file, (str, bytes)): <NEW_LINE> <INDENT> return len(headers) + len(self.file) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> file_length = peek_filelike_length(self.file) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> assert not hasattr(self, "_data") <NEW_LINE> self._data = to_bytes(self.file.read()) <NEW_LINE> file_length = len(self._data) <NEW_LINE> <DEDENT> return len(headers) + file_length <NEW_LINE> <DEDENT> def render_headers(self) -> bytes: <NEW_LINE> <INDENT> if not hasattr(self, "_headers"): <NEW_LINE> <INDENT> parts = [ b"Content-Disposition: form-data; ", format_form_param("name", self.name), ] <NEW_LINE> if self.filename: <NEW_LINE> <INDENT> filename = format_form_param("filename", self.filename) <NEW_LINE> parts.extend([b"; ", filename]) <NEW_LINE> <DEDENT> if self.content_type is not None: <NEW_LINE> <INDENT> content_type = self.content_type.encode() <NEW_LINE> parts.extend([b"\r\nContent-Type: ", content_type]) <NEW_LINE> <DEDENT> parts.append(b"\r\n\r\n") <NEW_LINE> self._headers = b"".join(parts) <NEW_LINE> <DEDENT> return self._headers <NEW_LINE> <DEDENT> def render_data(self) -> typing.Iterator[bytes]: <NEW_LINE> <INDENT> if isinstance(self.file, (str, bytes)): <NEW_LINE> <INDENT> yield to_bytes(self.file) <NEW_LINE> return <NEW_LINE> <DEDENT> if hasattr(self, "_data"): <NEW_LINE> <INDENT> yield self._data <NEW_LINE> return <NEW_LINE> <DEDENT> if self._consumed: <NEW_LINE> <INDENT> self.file.seek(0) <NEW_LINE> <DEDENT> self._consumed = True <NEW_LINE> for chunk in self.file: <NEW_LINE> <INDENT> yield to_bytes(chunk) <NEW_LINE> <DEDENT> <DEDENT> def render(self) -> typing.Iterator[bytes]: <NEW_LINE> <INDENT> yield self.render_headers() <NEW_LINE> yield from self.render_data()
A single file field item, within a multipart form field.
6259906732920d7e50bc77dd
class Lattice: <NEW_LINE> <INDENT> pos = {} <NEW_LINE> grid = [] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.init_grid(1,1) <NEW_LINE> <DEDENT> def __init__(self,r,c): <NEW_LINE> <INDENT> self.init_grid(r,c) <NEW_LINE> <DEDENT> def init_grid(self,r,c): <NEW_LINE> <INDENT> global grid,pos <NEW_LINE> pos={"x" : 0, "y" : 0} <NEW_LINE> grid = [] <NEW_LINE> for y in range(0,r,1): <NEW_LINE> <INDENT> row = [] <NEW_LINE> for x in range(0,c,1): <NEW_LINE> <INDENT> row.append(0) <NEW_LINE> <DEDENT> grid.append(row) <NEW_LINE> <DEDENT> <DEDENT> def get_grid(self): <NEW_LINE> <INDENT> return grid <NEW_LINE> <DEDENT> def get_max_x(self): <NEW_LINE> <INDENT> return len(grid[0])-1 <NEW_LINE> <DEDENT> def get_max_y(self): <NEW_LINE> <INDENT> return len(grid)-1 <NEW_LINE> <DEDENT> def print_grid(self): <NEW_LINE> <INDENT> for y in grid: <NEW_LINE> <INDENT> for x in y: <NEW_LINE> <INDENT> sys.stdout.write(str(x)+'\t') <NEW_LINE> <DEDENT> sys.stdout.write('\n') <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> def get_pos(self): <NEW_LINE> <INDENT> result={"x" : pos["x"], "y" : pos["y"]} <NEW_LINE> return result <NEW_LINE> <DEDENT> def set_pos(self,x,y): <NEW_LINE> <INDENT> pos["x"] = x <NEW_LINE> pos["y"] = y <NEW_LINE> <DEDENT> def set_value(self,n): <NEW_LINE> <INDENT> grid[pos["y"]][pos["x"]] = n <NEW_LINE> <DEDENT> def get_value(self): <NEW_LINE> <INDENT> return grid[pos["y"]][pos["x"]] <NEW_LINE> <DEDENT> def get_neighbors(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> if pos["x"] > 0: <NEW_LINE> <INDENT> result.append({"x" : pos["x"] - 1, "y" : pos["y"]}) <NEW_LINE> <DEDENT> if pos["y"] > 0: <NEW_LINE> <INDENT> result.append({"x" : pos["x"], "y" : pos["y"] - 1}) <NEW_LINE> <DEDENT> if pos["x"] < self.get_max_x(): <NEW_LINE> <INDENT> result.append({"x" : pos["x"] + 1, "y" : pos["y"]}) <NEW_LINE> <DEDENT> if pos["y"] < self.get_max_y(): <NEW_LINE> <INDENT> result.append({"x" : pos["x"], "y" : pos["y"] + 1}) <NEW_LINE> <DEDENT> return result
A simple lattice structure
62599067fff4ab517ebcefb1
class baseDisp: <NEW_LINE> <INDENT> def __init__(self, disp): <NEW_LINE> <INDENT> self.out = disp <NEW_LINE> <DEDENT> def Begin(self, command): <NEW_LINE> <INDENT> self.out.Begin(command) <NEW_LINE> <DEDENT> def data(self, line): <NEW_LINE> <INDENT> self.out.data(line) <NEW_LINE> <DEDENT> def flush(self, prompt, callback): <NEW_LINE> <INDENT> self.out.flush(prompt, callback) <NEW_LINE> <DEDENT> def Answer(self, response): <NEW_LINE> <INDENT> self.out.Answer(response) <NEW_LINE> <DEDENT> def End(self, command): <NEW_LINE> <INDENT> self.out.End(command) <NEW_LINE> <DEDENT> def Process(self): <NEW_LINE> <INDENT> self.out.Process()
Base class for chained parsing classes. (Does little by itself.) Basically, this class just insures that all the sub-classes support the standard chained display class protocols.
625990673539df3088ecda36
class Writer(CreationInfoWriter, ReviewInfoWriter, FileWriter, PackageWriter, ExternalDocumentRefWriter, AnnotationInfoWriter): <NEW_LINE> <INDENT> def __init__(self, document, out): <NEW_LINE> <INDENT> super(Writer, self).__init__(document, out) <NEW_LINE> <DEDENT> def create_doc(self): <NEW_LINE> <INDENT> doc_node = URIRef('http://www.spdx.org/tools#SPDXRef-DOCUMENT') <NEW_LINE> self.graph.add((doc_node, RDF.type, self.spdx_namespace.SpdxDocument)) <NEW_LINE> vers_literal = Literal(str(self.document.version)) <NEW_LINE> self.graph.add((doc_node, self.spdx_namespace.specVersion, vers_literal)) <NEW_LINE> data_lics = URIRef(self.document.data_license.url) <NEW_LINE> self.graph.add((doc_node, self.spdx_namespace.dataLicense, data_lics)) <NEW_LINE> doc_name = URIRef(self.document.name) <NEW_LINE> self.graph.add((doc_node, self.spdx_namespace.name, doc_name)) <NEW_LINE> return doc_node <NEW_LINE> <DEDENT> def write(self): <NEW_LINE> <INDENT> doc_node = self.create_doc() <NEW_LINE> creation_info_node = self.create_creation_info() <NEW_LINE> ci_triple = (doc_node, self.spdx_namespace.creationInfo, creation_info_node) <NEW_LINE> self.graph.add(ci_triple) <NEW_LINE> review_nodes = self.reviews() <NEW_LINE> for review in review_nodes: <NEW_LINE> <INDENT> self.graph.add((doc_node, self.spdx_namespace.reviewed, review)) <NEW_LINE> <DEDENT> ext_doc_ref_nodes = self.ext_doc_refs() <NEW_LINE> for ext_doc_ref in ext_doc_ref_nodes: <NEW_LINE> <INDENT> ext_doc_ref_triple = (doc_node, self.spdx_namespace.externalDocumentRef, ext_doc_ref) <NEW_LINE> self.graph.add(ext_doc_ref_triple) <NEW_LINE> <DEDENT> licenses = map( self.create_extracted_license, self.document.extracted_licenses) <NEW_LINE> for lic in licenses: <NEW_LINE> <INDENT> self.graph.add((doc_node, self.spdx_namespace.hasExtractedLicensingInfo, lic)) <NEW_LINE> <DEDENT> files = self.files() <NEW_LINE> for file_node in files: <NEW_LINE> <INDENT> self.graph.add((doc_node, self.spdx_namespace.referencesFile, file_node)) <NEW_LINE> <DEDENT> self.add_file_dependencies() <NEW_LINE> package_node = self.packages() <NEW_LINE> package_triple = (doc_node, self.spdx_namespace.describesPackage, package_node) <NEW_LINE> self.graph.add(package_triple) <NEW_LINE> self.graph = to_isomorphic(self.graph) <NEW_LINE> self.graph.serialize(self.out, 'pretty-xml', encoding='utf-8')
Warpper for other writers to write all fields of spdx.document.Document Call `write()` to start writing.
625990674428ac0f6e659cc8
class Settings(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 800 <NEW_LINE> self.screen_height = 600 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_limit = 3 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_hight = 15 <NEW_LINE> self.bullet_color = 60, 60, 60 <NEW_LINE> self.bullets_allowed = 10 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.speedup_scale = 1.1 <NEW_LINE> self.score_scale = 1.5 <NEW_LINE> self.initialize_dynamic_settings() <NEW_LINE> <DEDENT> def initialize_dynamic_settings(self): <NEW_LINE> <INDENT> self.ship_speed_factor = 1.5 <NEW_LINE> self.bullet_speed_factor = 3 <NEW_LINE> self.alien_speed_factor = 1 <NEW_LINE> self.fleet_direction = 1 <NEW_LINE> self.alien_points = 50 <NEW_LINE> <DEDENT> def increase_speed(self): <NEW_LINE> <INDENT> self.ship_speed_factor *= self.speedup_scale <NEW_LINE> self.bullet_speed_factor *= self.speedup_scale <NEW_LINE> self.alien_speed_factor *= self.speedup_scale <NEW_LINE> self.alien_points = int(self.alien_points * self.score_scale)
存储《外星人入侵》的所有设置的类
62599067442bda511e95d924
class TaskExecutionViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = TaskExecution.objects.all().order_by('-date_time') <NEW_LINE> serializer_class = TaskExecutionSerializer
API endpoint that serves the logs of task execution.
625990670c0af96317c5792a
class CommentCreateView(AjaxableResponseMixin, CreateView): <NEW_LINE> <INDENT> form_class = CommentForm <NEW_LINE> model = Comment <NEW_LINE> template_name = 'comments/comment_form.html' <NEW_LINE> success_url = reverse_lazy('comment-create') <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> comment = form.save(commit=False) <NEW_LINE> try: <NEW_LINE> <INDENT> content_type = ContentType.objects.get( app_label=self.request.POST['app_name'], model=self.request.POST['model'].lower()) <NEW_LINE> model_object = content_type.get_object_for_this_type( id=self.request.POST['model_id']) <NEW_LINE> comment.content_object = model_object <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> comment_recipe_created.send(sender=self, recipe=Recipe.objects.filter(id=self.request.POST['model_id']), comment=self.request.POST['comment']) <NEW_LINE> rating_recipe_created.send(sender=self, recipe=Recipe.objects.filter(id=self.request.POST['model_id']), rating=self.request.POST['rating']) <NEW_LINE> print("USER WHO CREATED COMMENT: ", self.request.user) <NEW_LINE> comment.save() <NEW_LINE> target = Recipe.objects.get(id=comment.object_id) <NEW_LINE> recipient = User.objects.get(id=target.author_id) <NEW_LINE> notify.send(self.request.user, actor=self.request.user, recipient=recipient, verb='commented on your recipe', target=target) <NEW_LINE> if comment.rating: <NEW_LINE> <INDENT> notify.send(self.request.user, actor=self.request.user, recipient=recipient, verb='rated your recipe', target=target) <NEW_LINE> <DEDENT> return super(CommentCreateView, self).form_valid(form)
Class that creates an instance of model:comment.Comment
625990677d847024c075db70
class DoorClosedMonitor(BaseDoorMonitor): <NEW_LINE> <INDENT> def __init__(self, Name, Type='Door', Kind='Position', TriggerValue='CLOSED', Description='Monitor when the DOOR is CLOSED'): <NEW_LINE> <INDENT> BaseDoorMonitor.__init__(self, Name, Type, Kind, TriggerValue, Description, 'DOOR CLOSED', 'The Door was Closed')
Difining a monitor to trigger when a door is CLOSED
62599067f548e778e596cd22
class ComponentAttribute(TicketAttribute): <NEW_LINE> <INDENT> NAME = 'component' <NEW_LINE> DEFAULT_VALUE = 'Other'
This is the component the ticket is related to.
625990674e4d562566373b9e
class TestTimeConversion(unittest.TestCase): <NEW_LINE> <INDENT> def check_time(self, event, g, Ne, key="time"): <NEW_LINE> <INDENT> ll_event = event.get_ll_representation(1, Ne) <NEW_LINE> self.assertEqual(ll_event[key], g / (4 * Ne)) <NEW_LINE> <DEDENT> def test_population_parameter_change(self): <NEW_LINE> <INDENT> g = 8192 <NEW_LINE> Ne = 1024 <NEW_LINE> event = msprime.PopulationParametersChange(time=g, initial_size=1) <NEW_LINE> self.check_time(event, g, Ne) <NEW_LINE> <DEDENT> def test_migration_rate_change(self): <NEW_LINE> <INDENT> g = 512 <NEW_LINE> Ne = 8192 <NEW_LINE> event = msprime.MigrationRateChange(time=g, rate=1) <NEW_LINE> self.check_time(event, g, Ne) <NEW_LINE> <DEDENT> def test_mass_migration(self): <NEW_LINE> <INDENT> g = 100 <NEW_LINE> Ne = 100 <NEW_LINE> event = msprime.MassMigration(time=g, source=0, destination=1) <NEW_LINE> self.check_time(event, g, Ne) <NEW_LINE> <DEDENT> def test_instantaneous_bottleneck(self): <NEW_LINE> <INDENT> g = 100 <NEW_LINE> strength = 1000 <NEW_LINE> Ne = 100 <NEW_LINE> event = msprime.InstantaneousBottleneck(time=g, strength=strength) <NEW_LINE> self.check_time(event, g, Ne) <NEW_LINE> self.check_time(event, strength, Ne, "strength")
Tests the time conversion into scaled units.
6259906716aa5153ce401c71
class ArticleReportingTestCase(ArticleRatingTestCase): <NEW_LINE> <INDENT> def article_report_input(self): <NEW_LINE> <INDENT> self.reporting_url = '/api/articles/' + self.response_article_posted.data['art_slug'] + '/report' <NEW_LINE> self.report_msg = { "report_msg": "This has been plagiarised from my site." } <NEW_LINE> <DEDENT> def test_author_cannot_report_their_own_article(self): <NEW_LINE> <INDENT> self.post_article() <NEW_LINE> self.article_report_input() <NEW_LINE> response_POST = self.client.post(self.reporting_url, self.report_msg, format="json") <NEW_LINE> self.assertEqual(response_POST.status_code, status.HTTP_403_FORBIDDEN) <NEW_LINE> self.assertEqual(response_POST.data['message'], "You cannot report " + "your own article.") <NEW_LINE> <DEDENT> def test_audience_can_report_article(self): <NEW_LINE> <INDENT> self.post_article() <NEW_LINE> self.register_user(self.user_2) <NEW_LINE> self.article_report_input() <NEW_LINE> response_POST = self.client.post(self.reporting_url, self.report_msg, format="json") <NEW_LINE> self.assertEqual(response_POST.status_code, status.HTTP_201_CREATED) <NEW_LINE> self.assertEqual(response_POST.data['message'], "You have reported " + "this article to the admin.") <NEW_LINE> self.assertEqual(mail.outbox[2].subject, "Article:" + self.response_article_posted.data['art_slug'] + " has been reported.") <NEW_LINE> self.assertEqual(response_POST.status_code, status.HTTP_201_CREATED)
This class defines the api test case to rate articles
62599067d486a94d0ba2d756
class MySQLBase(sad.declarative_base(), object): <NEW_LINE> <INDENT> __abstract__ = True <NEW_LINE> db = None <NEW_LINE> id = sa.Column(sa.Integer, primary_key=True) <NEW_LINE> updated_at = sa.Column(ArrowType) <NEW_LINE> updated_by = sa.Column(sa.Integer) <NEW_LINE> created_at = sa.Column(ArrowType, default=arrow.now('US/Pacific')) <NEW_LINE> created_by = sa.Column(sa.Integer) <NEW_LINE> is_active = sa.Column(sa.Boolean, default=True) <NEW_LINE> @classmethod <NEW_LINE> def get_or_create(cls, **data): <NEW_LINE> <INDENT> return cls.query().filter_by(**data).one_or_none() or cls(**data).save() <NEW_LINE> <DEDENT> def update(self, **kwargs): <NEW_LINE> <INDENT> for k, v in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def objects(cls, give_query=False, **data): <NEW_LINE> <INDENT> query = cls.query().filter_by(**data) <NEW_LINE> return query if give_query else query.all() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def query(cls): <NEW_LINE> <INDENT> return cls.db.session.query(cls) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> self.db.session.add(self) <NEW_LINE> self.db.session.commit() <NEW_LINE> return self
MySQL base object
62599067462c4b4f79dbd19f
class DevelopmentConfig(Config): <NEW_LINE> <INDENT> TESTING = True <NEW_LINE> DEBUG = True <NEW_LINE> SQLALCHEMY_ECHO = True
Development configurations
62599067498bea3a75a591cd
class IMDbParserError(IMDbError): <NEW_LINE> <INDENT> pass
Exception raised when an error occurred parsing the data.
6259906756ac1b37e63038af
class SimpleTriangle: <NEW_LINE> <INDENT> def __init__(self, side1, side2, side3): <NEW_LINE> <INDENT> self.side1 = side1 <NEW_LINE> self.side2 = side2 <NEW_LINE> self.side3 = side3 <NEW_LINE> if not(isinstance(self.side1, int) and isinstance(self.side2, int) and isinstance(self.side3, int)): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if ((self.side1 + self.side2) < self.side3) or ((self.side2 + self.side3) < self.side1) or ((self.side1 + self.side3) < self.side2): <NEW_LINE> <INDENT> raise ArithmeticError <NEW_LINE> <DEDENT> if self.side1 <= 0 or self.side2 <= 0 or self.side3 <= 0: <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> <DEDENT> def right_triangle(self): <NEW_LINE> <INDENT> side1, side2, side3 = sorted([self.side1, self.side2, self.side3]) <NEW_LINE> return round((side1 * side1) + (side2 * side2) - (side3 * side3), 2) == 0 <NEW_LINE> <DEDENT> def equilateral(self): <NEW_LINE> <INDENT> return self.side1 == self.side2 and self.side2 == self.side3 <NEW_LINE> <DEDENT> def isosceles(self): <NEW_LINE> <INDENT> return (self.side1 == self.side2 and self.side2 != self.side3) or (self.side2 == self.side3 and self.side2 != self.side1) or (self.side1 == self.side3 and self.side3 != self.side2) <NEW_LINE> <DEDENT> def scalene(self): <NEW_LINE> <INDENT> return self.side1 != self.side2 and self.side2 != self.side3 and self.side1 != self.side3
Triangle class to determine different types of triangles
62599067009cb60464d02cd1
class LinkedPATemplatePostSummaryRoot(ModelNormal): <NEW_LINE> <INDENT> allowed_values = { } <NEW_LINE> validations = { } <NEW_LINE> additional_properties_type = None <NEW_LINE> _nullable = False <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> lazy_import() <NEW_LINE> return { 'data': (LinkedPATemplatePostSummary,), 'meta': (bool, date, datetime, dict, float, int, list, str, none_type,), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = { 'data': 'data', 'meta': 'meta', } <NEW_LINE> _composed_schemas = {} <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, data, *args, **kwargs): <NEW_LINE> <INDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> self.data = data <NEW_LINE> for var_name, var_value in kwargs.items(): <NEW_LINE> <INDENT> if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> setattr(self, var_name, var_value)
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values.
625990670a50d4780f70698c
class Client(object): <NEW_LINE> <INDENT> def __init__(self, endpoint, *args, **kwargs): <NEW_LINE> <INDENT> self.http_client = http.HTTPClient(utils.strip_version(endpoint), *args, **kwargs) <NEW_LINE> self.schemas = schemas.Controller(self.http_client) <NEW_LINE> image_model = self._get_image_model() <NEW_LINE> self.images = images.Controller(self.http_client, image_model) <NEW_LINE> self.image_tags = image_tags.Controller(self.http_client, image_model) <NEW_LINE> self.image_members = image_members.Controller(self.http_client, self._get_member_model()) <NEW_LINE> <DEDENT> def _get_image_model(self): <NEW_LINE> <INDENT> schema = self.schemas.get('image') <NEW_LINE> return warlock.model_factory(schema.raw(), schemas.SchemaBasedModel) <NEW_LINE> <DEDENT> def _get_member_model(self): <NEW_LINE> <INDENT> schema = self.schemas.get('member') <NEW_LINE> return warlock.model_factory(schema.raw(), schemas.SchemaBasedModel)
Client for the OpenStack Images v2 API. :param string endpoint: A user-supplied endpoint URL for the glance service. :param string token: Token for authentication. :param integer timeout: Allows customization of the timeout for client http requests. (optional)
6259906732920d7e50bc77de
class Tile: <NEW_LINE> <INDENT> def __init__(self, id): <NEW_LINE> <INDENT> self.type = id <NEW_LINE> self.name = area[id][1] <NEW_LINE> current_area = area[0] <NEW_LINE> <DEDENT> def on_resolve_card(self, card): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def find_next(self): <NEW_LINE> <INDENT> global area <NEW_LINE> global current_area <NEW_LINE> x = 0 <NEW_LINE> for num in range(1, len(area)): <NEW_LINE> <INDENT> del area[x] <NEW_LINE> x = random.randrange(0, len(area), 1) <NEW_LINE> if area[x][1] == "Patio": <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_area = area[x] <NEW_LINE> <DEDENT> return
taken from https://github.com/lokikristianson/zimp-impl/blob/master/tile_doctest line 20-88 >>> tile = sys.modules[__name__] >>> tile.Tile(0).name 'Foyer' >>> tile.Tile(1).name 'Patio' >>> tile.Tile(2).name 'Evil Temple' >>> tile.Tile(3).name 'Storage Room' >>> tile.Tile(4).name 'Kitchen' >>> tile.Tile(5).name 'Dining Room' >>> tile.Tile(6).name 'Family Room' >>> tile.Tile(7).name 'Bedroom' >>> tile.Tile(8).name 'Bathroom' >>> tile.Tile(9).name 'Garden' >>> tile.Tile(10).name 'Graveyard' >>> tile.Tile(11).name 'Garage' >>> tile.Tile(12).name 'Sitting Area' >>> tile.Tile(13).name 'Yard1' >>> tile.Tile(14).name 'Yard2' >>> tile.Tile(15).name 'Yard3' >>> tile.area[0][2] True >>> tile.area[9][2] False >>> tile.area[10][2] False >>> tile.area[0][3] False >>> tile.area[9][3] True >>> tile.area[10][3] True >>> tile.area[0][4] False >>> tile.area[9][4] True >>> tile.area[10][4] True >>> tile.area[0][5] False >>> tile.area[9][5] True >>> tile.area[10][5] True >>> tile.area[0][6] False >>> tile.area[9][6] True >>> tile.area[10][6] False >>> tile.area[0][7] True >>> tile.area[9][7] True >>> tile.area[10][7] False
625990674428ac0f6e659cca
class EventNotificationList(FrozenClass): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TypeId = FourByteNodeId(ObjectIds.EventNotificationList_Encoding_DefaultBinary) <NEW_LINE> self.Encoding = 1 <NEW_LINE> self.BodyLength = 0 <NEW_LINE> self.Events = [] <NEW_LINE> self._freeze() <NEW_LINE> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> packet = [] <NEW_LINE> body = [] <NEW_LINE> packet.append(self.TypeId.to_binary()) <NEW_LINE> packet.append(pack_uatype('UInt8', self.Encoding)) <NEW_LINE> body.append(struct.pack('<i', len(self.Events))) <NEW_LINE> for fieldname in self.Events: <NEW_LINE> <INDENT> body.append(fieldname.to_binary()) <NEW_LINE> <DEDENT> body = b''.join(body) <NEW_LINE> packet.append(struct.pack('<i', len(body))) <NEW_LINE> packet.append(body) <NEW_LINE> return b''.join(packet) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_binary(data): <NEW_LINE> <INDENT> obj = EventNotificationList() <NEW_LINE> obj.TypeId = NodeId.from_binary(data) <NEW_LINE> obj.Encoding = unpack_uatype('UInt8', data) <NEW_LINE> obj.BodyLength = unpack_uatype('Int32', data) <NEW_LINE> length = struct.unpack('<i', data.read(4))[0] <NEW_LINE> if length != -1: <NEW_LINE> <INDENT> for _ in range(0, length): <NEW_LINE> <INDENT> obj.Events.append(EventFieldList.from_binary(data)) <NEW_LINE> <DEDENT> <DEDENT> return obj <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'EventNotificationList(' + 'TypeId:' + str(self.TypeId) + ', ' + 'Encoding:' + str(self.Encoding) + ', ' + 'BodyLength:' + str(self.BodyLength) + ', ' + 'Events:' + str(self.Events) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__
:ivar TypeId: :vartype TypeId: NodeId :ivar Encoding: :vartype Encoding: UInt8 :ivar BodyLength: :vartype BodyLength: Int32 :ivar Events: :vartype Events: EventFieldList
6259906797e22403b383c6a6
class Semanticizer(object): <NEW_LINE> <INDENT> def __init__(self, fname): <NEW_LINE> <INDENT> commonness = defaultdict(list) <NEW_LINE> self.db = sqlite3.connect(fname) <NEW_LINE> self._cur = self.db.cursor() <NEW_LINE> for target, anchor, count in self._get_senses_counts(): <NEW_LINE> <INDENT> commonness[anchor].append((target, count)) <NEW_LINE> <DEDENT> for anchor, targets in six.iteritems(commonness): <NEW_LINE> <INDENT> total = float(sum(count for _, count in targets)) <NEW_LINE> commonness[anchor] = [(t, count / total) for t, count in targets] <NEW_LINE> <DEDENT> self.commonness = commonness <NEW_LINE> self.N = self._get_ngram_max_length() <NEW_LINE> <DEDENT> def _get_ngram_max_length(self): <NEW_LINE> <INDENT> self._cur.execute("select value " "from parameters " "where key = 'N';") <NEW_LINE> return int(self._cur.fetchone()[0]) <NEW_LINE> <DEDENT> def _get_senses_counts(self): <NEW_LINE> <INDENT> return self._cur.execute('select target, ngram as anchor, count ' 'from linkstats, ngrams ' 'where ngram_id = ngrams.id;') <NEW_LINE> <DEDENT> def all_candidates(self, s): <NEW_LINE> <INDENT> if isinstance(s, six.string_types): <NEW_LINE> <INDENT> s = s.split() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = tosequence(s) <NEW_LINE> <DEDENT> for i, j, s in ngrams_with_pos(s, self.N): <NEW_LINE> <INDENT> if s in self.commonness: <NEW_LINE> <INDENT> for target, prob in self.commonness[s]: <NEW_LINE> <INDENT> yield i, j, target, prob
Entity linker. This is the main class for using Semanticizest. It's a handle on a statistical model that lives on disk. Parameters ---------- fname : string Filename of the stored model from which to load the Wikipedia statistics. Loading is lazy; the underlying file should not be modified while any Semanticizer is using it.
625990674f88993c371f10eb
class Category(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=30) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
This class is a simple foreign key for the articles defining their category
6259906892d797404e38972a
class ImagesCollection(object): <NEW_LINE> <INDENT> def __init__(self, params: dict): <NEW_LINE> <INDENT> assert params['TYPE'] in ['BBOX_JSON_MARKING', 'IMAGES_DIR', 'GML_FACES_MARKING', 'GML_BBOXES_MARKING'] <NEW_LINE> self._params = params <NEW_LINE> self._samples = None <NEW_LINE> self._max_size = params['MAX_SIZE'] <NEW_LINE> self._scales = params['SCALES'] <NEW_LINE> self._num_backgrounds = None <NEW_LINE> self.imgs_path = None <NEW_LINE> self._name = params.get('NAME', None) <NEW_LINE> self.extract_clusters = params.get('EXTRACT_CLUSTERS', False) <NEW_LINE> if params['TYPE'] in ['BBOX_JSON_MARKING', 'GML_FACES_MARKING', 'GML_BBOXES_MARKING']: <NEW_LINE> <INDENT> json_format = {'BBOX_JSON_MARKING': 'default', 'GML_FACES_MARKING': 'gml_faces', 'GML_BBOXES_MARKING': 'gml_bboxes'}[params['TYPE']] <NEW_LINE> self.imgs_path = osp.join(params['PATH'], 'imgs') <NEW_LINE> if 'MARKING_NAME' in params: <NEW_LINE> <INDENT> self._samples = load_bboxes_dataset_with_json_marking( params['PATH'], params['MARKING_NAME'], self._max_size, self._scales, json_format) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._samples = load_bboxes_dataset_with_json_marking( params['PATH'], 'marking.json', self._max_size, self._scales, json_format) <NEW_LINE> <DEDENT> <DEDENT> elif params['TYPE'] == 'IMAGES_DIR': <NEW_LINE> <INDENT> self.imgs_path = params['PATH'] <NEW_LINE> scan_recurse = params.get('RECURSE', False) <NEW_LINE> self._samples = load_images_from_directory_without_marking( params['PATH'], self._max_size, self._scales, scan_recurse) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def max_size(self) -> int: <NEW_LINE> <INDENT> return self._max_size <NEW_LINE> <DEDENT> @property <NEW_LINE> def scales(self) -> list: <NEW_LINE> <INDENT> return self._scales <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._samples) <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_backgrounds(self): <NEW_LINE> <INDENT> if self._num_backgrounds is not None: <NEW_LINE> <INDENT> return self._num_backgrounds <NEW_LINE> <DEDENT> self._num_backgrounds = 0 <NEW_LINE> for sample in self._samples: <NEW_LINE> <INDENT> obj_count = 0 <NEW_LINE> for object in sample.marking: <NEW_LINE> <INDENT> if object['class'] > 0: <NEW_LINE> <INDENT> obj_count += 1 <NEW_LINE> <DEDENT> <DEDENT> if not obj_count: <NEW_LINE> <INDENT> self._num_backgrounds += 1 <NEW_LINE> <DEDENT> <DEDENT> return self._num_backgrounds <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_classes(self): <NEW_LINE> <INDENT> max_class = 0 <NEW_LINE> for sample in self._samples: <NEW_LINE> <INDENT> for object in sample.marking: <NEW_LINE> <INDENT> max_class = max(object['class'], max_class) <NEW_LINE> <DEDENT> <DEDENT> return max_class + 1 <NEW_LINE> <DEDENT> def __getitem__(self, key: int) -> ImageSample: <NEW_LINE> <INDENT> return self._samples[key]
Коллекция изображений, поддерживает различные форматы: BBOX_JSON_MARKING - изображения с разметкой в json (см. loaders.load_bboxes_dataset_with_json_marking) IMAGES_DIR - директория с изображениями в форматах jpg, png, jpeg без разметки
6259906899cbb53fe683267e
class ChooseLev(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self, spriteCho, world): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.choice = world <NEW_LINE> pygame.sprite.Sprite.__init__(self) <NEW_LINE> sprite_sheet = SpriteSheet("Terrain.png") <NEW_LINE> image = sprite_sheet.get_image(spriteCho[0], spriteCho[1], spriteCho[2], spriteCho[3]) <NEW_LINE> self.image = image <NEW_LINE> self.rect = self.image.get_rect()
Portal user can choose
62599068ac7a0e7691f73c7f
class WorldAssemblyResignation(Action, WorldAssembly): <NEW_LINE> <INDENT> def __init__(self, text, params): <NEW_LINE> <INDENT> match = re.match( '@@(.+?)@@ resigned from the World Assembly.', text ) <NEW_LINE> if not match: <NEW_LINE> <INDENT> raise _ParseError <NEW_LINE> <DEDENT> self.agent = aionationstates.Nation(match.group(1)) <NEW_LINE> super().__init__(text, params)
A nation resigning from World Assembly.
6259906816aa5153ce401c73
class OnTaskWorkflowEmailError(OnTaskServiceException): <NEW_LINE> <INDENT> pass
Raised when an error appears in store_dataframe.
62599068e1aae11d1e7cf3d9
class Div2kConfig(tfds.core.BuilderConfig): <NEW_LINE> <INDENT> def __init__(self, name, **kwargs): <NEW_LINE> <INDENT> if name not in _DATA_OPTIONS: <NEW_LINE> <INDENT> raise ValueError("data must be one of %s" % _DATA_OPTIONS) <NEW_LINE> <DEDENT> description = kwargs.get("description", "Uses %s data." % name) <NEW_LINE> kwargs["description"] = description <NEW_LINE> super(Div2kConfig, self).__init__(name=name, **kwargs) <NEW_LINE> self.data = name <NEW_LINE> self.download_urls = { "train_lr_url": _DL_URLS["train_" + self.data], "valid_lr_url": _DL_URLS["valid_" + self.data], "train_hr_url": _DL_URLS["train_hr"], "valid_hr_url": _DL_URLS["valid_hr"], }
BuilderConfig for Div2k.
62599068cc0a2c111447c69d
class CompressMiddleware(object): <NEW_LINE> <INDENT> def __init__(self, app, conf): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.conf = conf <NEW_LINE> self.compress_suffix = conf.get('compress_suffix', '') <NEW_LINE> <DEDENT> def __call__(self, env, start_response): <NEW_LINE> <INDENT> request = Request(env) <NEW_LINE> try: <NEW_LINE> <INDENT> (version, account, container, objname) = split_path(request.path_info, 1, 4, True) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> response = request.get_response(self.app) <NEW_LINE> return response(env, start_response) <NEW_LINE> <DEDENT> if not objname: <NEW_LINE> <INDENT> response = request.get_response(self.app) <NEW_LINE> if container: <NEW_LINE> <INDENT> if not request.params.has_key('compress'): <NEW_LINE> <INDENT> response.body = response.body.replace(self.compress_suffix, '') <NEW_LINE> <DEDENT> <DEDENT> return response(env, start_response) <NEW_LINE> <DEDENT> original_path_info = request.path_info <NEW_LINE> request.path_info += self.compress_suffix <NEW_LINE> if request.method == 'GET': <NEW_LINE> <INDENT> if not request.params.has_key('compress'): <NEW_LINE> <INDENT> response = request.get_response(self.app) <NEW_LINE> if response.status_int == 404: <NEW_LINE> <INDENT> request.path_info = original_path_info <NEW_LINE> response = request.get_response(self.app) <NEW_LINE> return response(env, start_response) <NEW_LINE> <DEDENT> uncompressed_data = create_uncompress(response.body) <NEW_LINE> response.body = uncompressed_data <NEW_LINE> return response(env, start_response) <NEW_LINE> <DEDENT> <DEDENT> if request.method == 'PUT': <NEW_LINE> <INDENT> if hasattr(request, 'body_file'): <NEW_LINE> <INDENT> data = "" <NEW_LINE> while True: <NEW_LINE> <INDENT> chunk = request.body_file.read() <NEW_LINE> if not chunk: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> data += chunk <NEW_LINE> <DEDENT> request.body = data <NEW_LINE> compress_data = create_compress(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> compress_data = create_compress(request.body) <NEW_LINE> <DEDENT> if compress_data: <NEW_LINE> <INDENT> request.body = compress_data <NEW_LINE> <DEDENT> <DEDENT> response = request.get_response(self.app) <NEW_LINE> return response(env, start_response)
Compress middleware used for object compression
62599068a8370b77170f1b60
class pickled_method(object): <NEW_LINE> <INDENT> def __init__(self, file, name): <NEW_LINE> <INDENT> self._file = file <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> def __call__(self, function): <NEW_LINE> <INDENT> return Picklize(function, self._file, self._name)
Pickles the result of the method (ignoring arguments) and uses this if possible on the next call.
625990687047854f46340b4f
class Software(object): <NEW_LINE> <INDENT> def __init__(self, software): <NEW_LINE> <INDENT> self.kind = get_string(software.SoftwareKind) <NEW_LINE> self.producer = get_string(software.SoftwareProducer) <NEW_LINE> self.description = get_string(software.Description) <NEW_LINE> self.version = get_string(software.Version) <NEW_LINE> self.filename = get_string(software.FileName) <NEW_LINE> self.file_size = get_uint(software.FileSize) <NEW_LINE> self.file_time = get_datetime(software.FileDateTime) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.__dict__)
Represents a version of a device. This can be used to describe any versionable portion of a device, and not just software. Mandatory fields: :ivar string description: A description of the software (such as the name). :ivar string version: The software version. Optional fields: :ivar string kind: The type of the software. Possible values are *firmware*, *software* or *hardware*. :ivar string producer: The software producer. :ivar string filename: The name of the file. :ivar integer file_size: Size of the file. :ivar datetime file_time: A time associated with the file, for versioning purposes.
625990684a966d76dd5f068e
class ThreadedSSHClient: <NEW_LINE> <INDENT> def __init__(self, host_file): <NEW_LINE> <INDENT> self.host_file = host_file <NEW_LINE> self.lock_obj = threading.Lock() <NEW_LINE> self.__parse_host_file() <NEW_LINE> <DEDENT> def __parse_host_file(self): <NEW_LINE> <INDENT> tree = et.parse(self.host_file) <NEW_LINE> threads = list() <NEW_LINE> for host_tag in tree.getiterator('host'): <NEW_LINE> <INDENT> host_config = list() <NEW_LINE> host_config.extend([host_tag.get('hostname'), int(host_tag.get('port'))]) <NEW_LINE> for child_tag in host_tag: <NEW_LINE> <INDENT> host_config.append(child_tag.text) <NEW_LINE> <DEDENT> host_config.append(self.lock_obj) <NEW_LINE> t = threading.Thread(target=ThreadedSSHHandler, args=host_config) <NEW_LINE> threads.append(t) <NEW_LINE> t.start() <NEW_LINE> <DEDENT> for t in threads: <NEW_LINE> <INDENT> t.join()
mian thread
6259906845492302aabfdc77
class PlacementZone(CloudResource): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractproperty <NEW_LINE> def region_name(self): <NEW_LINE> <INDENT> pass
Represents a placement zone. A placement zone is contained within a Region.
62599068796e427e5384ff11
class Precision(): <NEW_LINE> <INDENT> def __call__(self, pos_score, neg_score): <NEW_LINE> <INDENT> scores = torch.cat((pos_score[:, 1], neg_score[:, 1]), 0) <NEW_LINE> topk = torch.topk(scores, pos_score.size(0))[1] <NEW_LINE> prec = (topk < pos_score.size(0)).float().sum() / (pos_score.size(0) + 1e-8) <NEW_LINE> return prec.data[0]
不太明白含义
6259906801c39578d7f14302
class ServiceRPC: <NEW_LINE> <INDENT> def __init__(self, core_client): <NEW_LINE> <INDENT> self.__common_client = _CommonClient(core_client) <NEW_LINE> <DEDENT> def add(self, name="", params={}): <NEW_LINE> <INDENT> return self.__common_client._add('add-service-rpc', name, params) <NEW_LINE> <DEDENT> def show(self, name='', uid='', details_level=''): <NEW_LINE> <INDENT> return self.__common_client._show('show-service-rpc', name, uid, details_level) <NEW_LINE> <DEDENT> def set(self, name='', uid='', params={}): <NEW_LINE> <INDENT> return self.__common_client._set('set-service-rpc', name, uid, params) <NEW_LINE> <DEDENT> def delete(self, name='', uid='', params={}): <NEW_LINE> <INDENT> return self.__common_client._delete('delete-service-rpc', name, uid, params) <NEW_LINE> <DEDENT> def show_all(self, limit=50, offset=0, order=[], details_level=''): <NEW_LINE> <INDENT> return self.__common_client._show_all('show-services-rpc', limit=limit, offset=offset, order=order, details_level=details_level)
Manage RPC services.
625990683317a56b869bf110
class BeamModel: <NEW_LINE> <INDENT> def __init__(self, span, n_support_xx=0, n_support_yy=0): <NEW_LINE> <INDENT> self._span = span <NEW_LINE> self._n_support_xx = n_support_xx <NEW_LINE> self._n_support_yy = n_support_yy <NEW_LINE> self.load_case_xx = [] <NEW_LINE> self.load_case_yy = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def span(self): <NEW_LINE> <INDENT> return self._span <NEW_LINE> <DEDENT> @span.setter <NEW_LINE> def span(self, span): <NEW_LINE> <INDENT> self._span = span <NEW_LINE> <DEDENT> def add_load_case(self, l_case, _dir=DIR_AROUND_XX): <NEW_LINE> <INDENT> if _dir == DIR_AROUND_XX: <NEW_LINE> <INDENT> self.load_case_xx.append(l_case) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.load_case_yy.append(l_case) <NEW_LINE> <DEDENT> <DEDENT> def calc_stress(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def output_result(self): <NEW_LINE> <INDENT> pass
はりモデルのクラス
62599068627d3e7fe0e08624
class CmdRest(Command): <NEW_LINE> <INDENT> key = "rest" <NEW_LINE> help_category = "combat" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> if is_in_combat(self.caller): <NEW_LINE> <INDENT> self.caller.msg("You can't rest while you're in combat.") <NEW_LINE> return <NEW_LINE> <DEDENT> self.caller.db.hp = self.caller.db.max_hp <NEW_LINE> self.caller.location.msg_contents("%s rests to recover HP." % self.caller)
Recovers damage. Usage: rest Resting recovers your HP to its maximum, but you can only rest if you're not in a fight.
625990688e7ae83300eea829
class Settings(AppSettings): <NEW_LINE> <INDENT> THUMBNAIL_DEBUG = False <NEW_LINE> THUMBNAIL_DEFAULT_STORAGE = ( 'easy_thumbnails.storage.ThumbnailFileSystemStorage') <NEW_LINE> THUMBNAIL_MEDIA_ROOT = '' <NEW_LINE> THUMBNAIL_MEDIA_URL = '' <NEW_LINE> THUMBNAIL_BASEDIR = '' <NEW_LINE> THUMBNAIL_SUBDIR = '' <NEW_LINE> THUMBNAIL_PREFIX = '' <NEW_LINE> THUMBNAIL_QUALITY = 85 <NEW_LINE> THUMBNAIL_EXTENSION = 'jpg' <NEW_LINE> THUMBNAIL_PRESERVE_EXTENSIONS = None <NEW_LINE> THUMBNAIL_TRANSPARENCY_EXTENSION = 'png' <NEW_LINE> THUMBNAIL_PROCESSORS = ( 'easy_thumbnails.processors.colorspace', 'easy_thumbnails.processors.autocrop', 'easy_thumbnails.processors.scale_and_crop', 'easy_thumbnails.processors.filters', ) <NEW_LINE> THUMBNAIL_SOURCE_GENERATORS = ( 'easy_thumbnails.source_generators.pil_image', ) <NEW_LINE> THUMBNAIL_CHECK_CACHE_MISS = False <NEW_LINE> THUMBNAIL_ALIASES = None <NEW_LINE> THUMBNAIL_DEFAULT_OPTIONS = None <NEW_LINE> THUMBNAIL_HIGH_RESOLUTION = False <NEW_LINE> THUMBNAIL_REMOTE_STORAGE = False <NEW_LINE> FILE_REMOTE_STORAGE = False <NEW_LINE> EASY_CACHE_TIMEOUT = 60 * 60 * 24 * 30
These default settings for easy-thumbnails can be specified in your Django project's settings module to alter the behaviour of easy-thumbnails.
6259906899cbb53fe6832680
class Angle(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.type = int <NEW_LINE> self.atom = [] <NEW_LINE> <DEDENT> def read(self, input, index): <NEW_LINE> <INDENT> index = self._read_type(input, index) <NEW_LINE> index = self._read_atom(input, index) <NEW_LINE> if (index != len(input)): <NEW_LINE> <INDENT> raise IOError("input is too long to be an angle") <NEW_LINE> <DEDENT> <DEDENT> def _read_type(self, input, index): <NEW_LINE> <INDENT> self.type = int(input[index]) <NEW_LINE> index += 1 <NEW_LINE> return index <NEW_LINE> <DEDENT> def _read_atom(self, input, index): <NEW_LINE> <INDENT> if (len(self.atom) != 0): self.atom = [] <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> self.atom.append(int(input[index])) <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> return index <NEW_LINE> <DEDENT> def write(self): <NEW_LINE> <INDENT> return "{0} {1}".format(self.type, self._write_atom()) <NEW_LINE> <DEDENT> def _write_atom(self): <NEW_LINE> <INDENT> return " ".join(str(self.atom[i]) for i in range(3))
stores, reads and writes a LAMMPS angle
6259906856b00c62f0fb4069
class ListingViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Listing.objects.all().order_by('-time') <NEW_LINE> serializer_class = ListingSerializer
API endpoint that allows listings to be viewed or edited.
6259906826068e7796d4e0d5
class MessageCountDetails(Model): <NEW_LINE> <INDENT> _validation = { 'active_message_count': {'readonly': True}, 'dead_letter_message_count': {'readonly': True}, 'scheduled_message_count': {'readonly': True}, 'transfer_message_count': {'readonly': True}, 'transfer_dead_letter_message_count': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'active_message_count': {'key': 'activeMessageCount', 'type': 'long'}, 'dead_letter_message_count': {'key': 'deadLetterMessageCount', 'type': 'long'}, 'scheduled_message_count': {'key': 'scheduledMessageCount', 'type': 'long'}, 'transfer_message_count': {'key': 'transferMessageCount', 'type': 'long'}, 'transfer_dead_letter_message_count': {'key': 'transferDeadLetterMessageCount', 'type': 'long'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(MessageCountDetails, self).__init__(**kwargs) <NEW_LINE> self.active_message_count = None <NEW_LINE> self.dead_letter_message_count = None <NEW_LINE> self.scheduled_message_count = None <NEW_LINE> self.transfer_message_count = None <NEW_LINE> self.transfer_dead_letter_message_count = None
Message Count Details. Variables are only populated by the server, and will be ignored when sending a request. :ivar active_message_count: Number of active messages in the queue, topic, or subscription. :vartype active_message_count: long :ivar dead_letter_message_count: Number of messages that are dead lettered. :vartype dead_letter_message_count: long :ivar scheduled_message_count: Number of scheduled messages. :vartype scheduled_message_count: long :ivar transfer_message_count: Number of messages transferred to another queue, topic, or subscription. :vartype transfer_message_count: long :ivar transfer_dead_letter_message_count: Number of messages transferred into dead letters. :vartype transfer_dead_letter_message_count: long
62599068a8370b77170f1b61
class BCardSite(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('businesscard') <NEW_LINE> verbose_name_plural = _('businesscards') <NEW_LINE> unique_together = ('user', 'name') <NEW_LINE> <DEDENT> user = models.ForeignKey(User) <NEW_LINE> name = models.SlugField() <NEW_LINE> default_page = models.OneToOneField(BCardPage, related_name='+', null=True, blank=True) <NEW_LINE> template = models.OneToOneField(DBTemplate, null=True, blank=True) <NEW_LINE> def save(self, **kwargs): <NEW_LINE> <INDENT> if self.pk is None: <NEW_LINE> <INDENT> super(BCardSite, self).save(**kwargs) <NEW_LINE> path = settings.BCARD_DEFAULT_PATH <NEW_LINE> def_page = BCardPage(bcard=self, path=path) <NEW_LINE> def_page.title = settings.BCARD_DEFAULT_PAGE_TITLE <NEW_LINE> def_page.content = settings.BCARD_DEFAULT_PAGE_CONTENT <NEW_LINE> def_page.save() <NEW_LINE> self.default_page = def_page <NEW_LINE> def_template_content = loader.get_template( settings.BCARD_DEFAULT_TEMPLATE).render(Context()) <NEW_LINE> def_template_name = '{0}/{1}'.format( self, settings.BCARD_DEFAULT_TEMPLATE_NAME) <NEW_LINE> def_template = DBTemplate( name=def_template_name, content=def_template_content) <NEW_LINE> def_template.save() <NEW_LINE> self.template = def_template <NEW_LINE> <DEDENT> super(BCardSite, self).save(**kwargs) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{0}/{1}'.format(self.user.username, self.name)
Represetnts a single site a user owns.
625990686e29344779b01dee
class BlNode: <NEW_LINE> <INDENT> DEBUG_NODES_IDS = {'SvDebugPrintNode', 'SvStethoscopeNode'} <NEW_LINE> def __init__(self, node): <NEW_LINE> <INDENT> self.data = node <NEW_LINE> <DEDENT> @property <NEW_LINE> def properties(self) -> List[BPYProperty]: <NEW_LINE> <INDENT> node_properties = self.data.bl_rna.__annotations__ if hasattr(self.data.bl_rna, '__annotations__') else [] <NEW_LINE> return [BPYProperty(self.data, prop_name) for prop_name in node_properties] <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_debug_node(self) -> bool: <NEW_LINE> <INDENT> return self.base_idname in self.DEBUG_NODES_IDS <NEW_LINE> <DEDENT> @property <NEW_LINE> def base_idname(self) -> str: <NEW_LINE> <INDENT> id_name, _, version = self.data.bl_idname.partition('MK') <NEW_LINE> try: <NEW_LINE> <INDENT> int(version) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return self.data.bl_idname <NEW_LINE> <DEDENT> return id_name
Wrapping around ordinary node for extracting some its information
62599068be8e80087fbc0826
class TestStreamingRFC2(PartialFitTests, unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.n_samples = 1000 <NEW_LINE> cls.x, cls.y = sklearn.datasets.make_blobs(n_samples=int(2e4), random_state=0, n_features=40, centers=2, cluster_std=100) <NEW_LINE> cls.mod = StreamingRFC(n_estimators_per_chunk=1, max_n_estimators=39) <NEW_LINE> cls.expected_n_estimators = 39 <NEW_LINE> super().setUpClass()
Test SRFC with single estimator per chunk with "random forest style" max features. ie, subset. Total models limited to 39.
625990683d592f4c4edbc67b
class SpeechEventType(enum.IntEnum): <NEW_LINE> <INDENT> SPEECH_EVENT_UNSPECIFIED = 0 <NEW_LINE> END_OF_SINGLE_UTTERANCE = 1
Indicates the type of speech event. Attributes: SPEECH_EVENT_UNSPECIFIED (int): No speech event specified. END_OF_SINGLE_UTTERANCE (int): This event indicates that the server has detected the end of the user's speech utterance and expects no additional speech. Therefore, the server will not process additional audio (although it may subsequently return additional results). The client should stop sending additional audio data, half-close the gRPC connection, and wait for any additional results until the server closes the gRPC connection. This event is only sent if ``single_utterance`` was set to ``true``, and is not used otherwise.
62599068adb09d7d5dc0bd07
class TestIssue119(unittest.TestCase): <NEW_LINE> <INDENT> layer = Issues <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.proj_dir = os.getcwd() <NEW_LINE> self.dirpath = tempfile.mkdtemp() <NEW_LINE> os.chdir(self.dirpath) <NEW_LINE> self.repo = git.Repo.init(self.dirpath) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree(self.dirpath) <NEW_LINE> os.chdir(self.proj_dir) <NEW_LINE> <DEDENT> def test(self): <NEW_LINE> <INDENT> settings_proc = subprocess.Popen( 'git settings list --keys'.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE ) <NEW_LINE> stdout, stderr = [x.decode('utf-8') for x in settings_proc.communicate()] <NEW_LINE> self.assertFalse(stdout) <NEW_LINE> self.assertEqual(stderr.strip(), 'error: argument -k/--keys: not allowed without positional argument section')
Limiting settings to keys should require a section
625990684f88993c371f10ed
class SessionalConfig(models.Model): <NEW_LINE> <INDENT> unit = models.OneToOneField(Unit, null=False, blank=False, on_delete=models.PROTECT) <NEW_LINE> appointment_start = models.DateField() <NEW_LINE> appointment_end = models.DateField() <NEW_LINE> pay_start = models.DateField() <NEW_LINE> pay_end = models.DateField() <NEW_LINE> course_hours_breakdown = models.CharField(null=True, blank=True, max_length=100, help_text="e.g. 1x2HR Lecture, etc. This will show up in the form in " "the column after the course department and number.") <NEW_LINE> def autoslug(self): <NEW_LINE> <INDENT> return make_slug(self.unit.label) <NEW_LINE> <DEDENT> slug = AutoSlugField(populate_from='autoslug', null=False, editable=False, unique=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s - %s" % (self.unit.label, "default configuration for sessional contracts") <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> raise NotImplementedError("This object cannot be deleted")
An object to hold default dates for a given unit. The user can change these whenever the semesters change, and the new contracts will use these as defaults. There should only be one of these per unit, to avoid overwriting someone else's.
625990688da39b475be04988
class signatures(JobProperty): <NEW_LINE> <INDENT> statusOn=True <NEW_LINE> allowedTypes=['list'] <NEW_LINE> StoredValue = []
signatures in MET slice
625990683cc13d1c6d466ee1
class TaskPublisher(Publisher): <NEW_LINE> <INDENT> exchange = default_queue["exchange"] <NEW_LINE> exchange_type = default_queue["exchange_type"] <NEW_LINE> routing_key = conf.DEFAULT_ROUTING_KEY <NEW_LINE> serializer = conf.TASK_SERIALIZER <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(TaskPublisher, self).__init__(*args, **kwargs) <NEW_LINE> global _queues_declared <NEW_LINE> if not _queues_declared: <NEW_LINE> <INDENT> consumers = get_consumer_set(self.connection) <NEW_LINE> consumers.close() <NEW_LINE> _queues_declared = True <NEW_LINE> <DEDENT> <DEDENT> def delay_task(self, task_name, task_args=None, task_kwargs=None, task_id=None, taskset_id=None, **kwargs): <NEW_LINE> <INDENT> task_id = task_id or gen_unique_id() <NEW_LINE> eta = kwargs.get("eta") <NEW_LINE> eta = eta and eta.isoformat() <NEW_LINE> message_data = { "task": task_name, "id": task_id, "args": task_args or [], "kwargs": task_kwargs or {}, "retries": kwargs.get("retries", 0), "eta": eta, } <NEW_LINE> if taskset_id: <NEW_LINE> <INDENT> message_data["taskset"] = taskset_id <NEW_LINE> <DEDENT> self.send(message_data, **extract_msg_options(kwargs)) <NEW_LINE> signals.task_sent.send(sender=task_name, **message_data) <NEW_LINE> return task_id
Publish tasks.
625990687cff6e4e811b71e5
class ExpressRouteCrossConnectionsRoutesTableSummaryListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[ExpressRouteCrossConnectionRoutesTableSummary]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ExpressRouteCrossConnectionRoutesTableSummary"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ExpressRouteCrossConnectionsRoutesTableSummaryListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = None
Response for ListRoutesTable associated with the Express Route Cross Connections. Variables are only populated by the server, and will be ignored when sending a request. :param value: A list of the routes table. :type value: list[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCrossConnectionRoutesTableSummary] :ivar next_link: The URL to get the next set of results. :vartype next_link: str
62599068627d3e7fe0e08626
class OSDiskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> MANAGED = "Managed" <NEW_LINE> EPHEMERAL = "Ephemeral"
OSDiskType represents the type of an OS disk on an agent pool.
6259906845492302aabfdc7a
@pulumi.output_type <NEW_LINE> class GetCloudletsApplicationLoadBalancerMatchRuleResult: <NEW_LINE> <INDENT> def __init__(__self__, id=None, json=None, match_rules=None): <NEW_LINE> <INDENT> if id and not isinstance(id, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'id' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "id", id) <NEW_LINE> if json and not isinstance(json, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'json' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "json", json) <NEW_LINE> if match_rules and not isinstance(match_rules, list): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'match_rules' to be a list") <NEW_LINE> <DEDENT> pulumi.set(__self__, "match_rules", match_rules) <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def id(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "id") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def json(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "json") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter(name="matchRules") <NEW_LINE> def match_rules(self) -> Optional[Sequence['outputs.GetCloudletsApplicationLoadBalancerMatchRuleMatchRuleResult']]: <NEW_LINE> <INDENT> return pulumi.get(self, "match_rules")
A collection of values returned by getCloudletsApplicationLoadBalancerMatchRule.
62599068435de62698e9d5a7
class LogicEFTBotBase: <NEW_LINE> <INDENT> def has_command(self, cmd: str): <NEW_LINE> <INDENT> return cmd in self.commands <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self.commands = {} <NEW_LINE> log.info(f"Loading commands for bot...") <NEW_LINE> for attr in dir(self): <NEW_LINE> <INDENT> obj = getattr(self, attr) <NEW_LINE> if attr.startswith('__') or not callable(obj): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not hasattr(obj, '_bot_command'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> cmd = getattr(obj, '_bot_command') <NEW_LINE> if cmd in self.commands: <NEW_LINE> <INDENT> raise Error(f"LogicEFTBot has duplicate commands registered for ${cmd}") <NEW_LINE> <DEDENT> self.commands[cmd] = obj <NEW_LINE> log.info(f"Registered command `{cmd}` to fn `{attr}`") <NEW_LINE> <DEDENT> <DEDENT> def exec(self, ctx: CommandContext, command: str, data: Optional[str]) -> str: <NEW_LINE> <INDENT> if not command in self.commands: <NEW_LINE> <INDENT> raise CommandNotFoundException(command) <NEW_LINE> <DEDENT> fn = self.commands[command] <NEW_LINE> return fn(ctx, data)
A base class for implementing the EFT bot. Provides the logic for automatically registering commands.
6259906823849d37ff852853
class DummyTag(Tag): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(DummyTag, self).__init__() <NEW_LINE> <DEDENT> def make_output(self, tab=''): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ''
ダミータグ 出力されない
625990687d43ff2487427fdf
class PyLayerContext(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.container = None <NEW_LINE> <DEDENT> def save_for_backward(self, *tensors): <NEW_LINE> <INDENT> self.container = tensors <NEW_LINE> <DEDENT> def saved_tensor(self): <NEW_LINE> <INDENT> return self.container
The object of this class is a context that is used in PyLayer to enhance the function. Examples: .. code-block:: python import paddle from paddle.autograd import PyLayer class cus_tanh(PyLayer): @staticmethod def forward(ctx, x): # ctx is a object of PyLayerContext. y = paddle.tanh(x) ctx.save_for_backward(y) return y @staticmethod def backward(ctx, dy): # ctx is a object of PyLayerContext. y, = ctx.saved_tensor() grad = dy * (1 - paddle.square(y)) return grad
625990682c8b7c6e89bd4f83
class matGen3D: <NEW_LINE> <INDENT> def __init__(self, matlParams: tuple, eps, del_eps=0) -> None: <NEW_LINE> <INDENT> self.ymod = matlParams[0] <NEW_LINE> self.Nu = matlParams[1] <NEW_LINE> self.eps = eps <NEW_LINE> self.del_eps = del_eps <NEW_LINE> <DEDENT> def LEIsotropic3D(self): <NEW_LINE> <INDENT> const = self.ymod / ((1+self.Nu) * (1-(2*self.Nu))) <NEW_LINE> a = const * self.Nu <NEW_LINE> b = const * (1-self.Nu) <NEW_LINE> c = (a-b)/2 <NEW_LINE> Cmat = np.array( [ [b, a, a, 0, 0, 0], [a, b, a, 0, 0, 0], [a, a, b, 0, 0, 0], [0, 0, 0, c, 0, 0], [0, 0, 0, 0, c, 0], [0, 0, 0, 0, 0, c] ], dtype=float) <NEW_LINE> stress_el = Cmat @ self.eps <NEW_LINE> return stress_el, Cmat
Defines the material routine
6259906821bff66bcd724403
class QueryResultPassage(): <NEW_LINE> <INDENT> def __init__(self, *, passage_text=None, start_offset=None, end_offset=None, field=None): <NEW_LINE> <INDENT> self.passage_text = passage_text <NEW_LINE> self.start_offset = start_offset <NEW_LINE> self.end_offset = end_offset <NEW_LINE> self.field = field <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> valid_keys = ['passage_text', 'start_offset', 'end_offset', 'field'] <NEW_LINE> bad_keys = set(_dict.keys()) - set(valid_keys) <NEW_LINE> if bad_keys: <NEW_LINE> <INDENT> raise ValueError( 'Unrecognized keys detected in dictionary for class QueryResultPassage: ' + ', '.join(bad_keys)) <NEW_LINE> <DEDENT> if 'passage_text' in _dict: <NEW_LINE> <INDENT> args['passage_text'] = _dict.get('passage_text') <NEW_LINE> <DEDENT> if 'start_offset' in _dict: <NEW_LINE> <INDENT> args['start_offset'] = _dict.get('start_offset') <NEW_LINE> <DEDENT> if 'end_offset' in _dict: <NEW_LINE> <INDENT> args['end_offset'] = _dict.get('end_offset') <NEW_LINE> <DEDENT> if 'field' in _dict: <NEW_LINE> <INDENT> args['field'] = _dict.get('field') <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'passage_text') and self.passage_text is not None: <NEW_LINE> <INDENT> _dict['passage_text'] = self.passage_text <NEW_LINE> <DEDENT> if hasattr(self, 'start_offset') and self.start_offset is not None: <NEW_LINE> <INDENT> _dict['start_offset'] = self.start_offset <NEW_LINE> <DEDENT> if hasattr(self, 'end_offset') and self.end_offset is not None: <NEW_LINE> <INDENT> _dict['end_offset'] = self.end_offset <NEW_LINE> <DEDENT> if hasattr(self, 'field') and self.field is not None: <NEW_LINE> <INDENT> _dict['field'] = self.field <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
A passage query result. :attr str passage_text: (optional) The content of the extracted passage. :attr int start_offset: (optional) The position of the first character of the extracted passage in the originating field. :attr int end_offset: (optional) The position of the last character of the extracted passage in the originating field. :attr str field: (optional) The label of the field from which the passage has been extracted.
62599068ac7a0e7691f73c83
class DataProvider: <NEW_LINE> <INDENT> alias = None <NEW_LINE> _connector_cls = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._connector = self._connector_init() <NEW_LINE> <DEDENT> def _get_setting(self, name: str) -> str: <NEW_LINE> <INDENT> return get_setting('%s_%s' % (self.alias.upper(), name)) <NEW_LINE> <DEDENT> def _connector_init(self): <NEW_LINE> <INDENT> connector_cls = self._connector_cls <NEW_LINE> connector = None <NEW_LINE> if connector_cls: <NEW_LINE> <INDENT> connector = connector_cls() <NEW_LINE> <DEDENT> return connector <NEW_LINE> <DEDENT> def get_data(self, at): <NEW_LINE> <INDENT> raise NotImplementedError
База для поставщиков данных.
6259906866673b3332c31b9a
class ReportView(BrowserView): <NEW_LINE> <INDENT> index = ViewPageTemplateFile("template/report_view.pt") <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> context = self.context <NEW_LINE> request = self.request <NEW_LINE> self.getDB() <NEW_LINE> catalog = context.portal_catalog <NEW_LINE> portal = api.portal.get() <NEW_LINE> upperLimit = context.upperLimit <NEW_LINE> lowerLimit = context.lowerLimit <NEW_LINE> timesNumber = context.relOid.to_object.timesNumber <NEW_LINE> start = int(request.form.get('start', 1)) <NEW_LINE> startDate = date.today() - timedelta(start) <NEW_LINE> Session = sessionmaker() <NEW_LINE> session = Session() <NEW_LINE> queryCond = select([self.snmp_record]) .where(self.snmp_record.c.device_locate == context.relDevice.to_object.device_locate) .where(self.snmp_record.c.oid == context.relOid.to_object.oid) .where(self.snmp_record.c.record_time > startDate) .order_by(self.snmp_record.c.record_time) <NEW_LINE> conn = ENGINE.connect() <NEW_LINE> result = conn.execute(queryCond) <NEW_LINE> self.timeStr = '' <NEW_LINE> self.floatStr = '' <NEW_LINE> self.upperFloatStr = '' <NEW_LINE> self.lowerFloatStr = '' <NEW_LINE> for item in result: <NEW_LINE> <INDENT> self.timeStr += "'%s', " % item['record_time'] <NEW_LINE> self.floatStr += "%s, " % round(int(item['record_int'])*timesNumber,1) <NEW_LINE> self.upperFloatStr += "%s, " % upperLimit <NEW_LINE> self.lowerFloatStr += "%s, " % lowerLimit <NEW_LINE> <DEDENT> self.timeStr = self.timeStr[:-2] <NEW_LINE> self.floatStr = self.floatStr[:-2] <NEW_LINE> self.upperFloatStr = self.upperFloatStr[:-2] <NEW_LINE> self.lowerFloatStr = self.lowerFloatStr[:-2] <NEW_LINE> conn.close() <NEW_LINE> return self.index() <NEW_LINE> <DEDENT> def getDB(self): <NEW_LINE> <INDENT> self.metadata = MetaData(ENGINE) <NEW_LINE> self.snmp_record = Table( 'snmp_record', self.metadata, Column('id', INTEGER, primary_key=True, autoincrement=True), Column('record_time', DateTime), Column('device_locate', String(200)), Column('oid', String(200)), Column('record_str', String(500)), Column('record_int', INTEGER), mysql_engine='InnoDB', mysql_charset='utf8', use_unicode=True, )
Report View
6259906826068e7796d4e0d7
class ArticleSearchClient(BaseSearchClient): <NEW_LINE> <INDENT> SEARCH_TYPE = "articles" <NEW_LINE> SCHEMA = schemas.ArticleSearchSchema <NEW_LINE> def one(self): <NEW_LINE> <INDENT> if len(self.results) > 1: <NEW_LINE> <INDENT> raise exceptions.MultipleResultsFound( "Found %d!" % len(self.results)) <NEW_LINE> <DEDENT> elif len(self.results) < 1: <NEW_LINE> <INDENT> raise exceptions.ResultNotFound( "Search returned zero results") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.results[0] <NEW_LINE> <DEDENT> <DEDENT> def search_by_doi(self, doi, exact=False): <NEW_LINE> <INDENT> match = DOI_RE.match(doi) <NEW_LINE> if not match: <NEW_LINE> <INDENT> raise ValueError("%s is not a valid doi" % doi) <NEW_LINE> <DEDENT> if exact: <NEW_LINE> <INDENT> prefix="doi.exact" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix="doi" <NEW_LINE> <DEDENT> return self.search(match.string, prefix=prefix) <NEW_LINE> <DEDENT> def search_by_publisher(self, publisher, exact=False): <NEW_LINE> <INDENT> if exact: <NEW_LINE> <INDENT> prefix="publisher.exact" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix="publisher" <NEW_LINE> <DEDENT> return self.search(publisher, prefix=prefix) <NEW_LINE> <DEDENT> def search_by_eissn(self, issn): <NEW_LINE> <INDENT> prefix="issn" <NEW_LINE> return self.search(issn, prefix=prefix)
Can search articles by DOI
62599068a17c0f6771d5d776
class FunctionTests(SimpleGetTest): <NEW_LINE> <INDENT> endpoint = '/rest-api/moron/' <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> Moron.objects.get_or_create(name='bob') <NEW_LINE> Moron.objects.get_or_create(name='paul') <NEW_LINE> super(FunctionTests, self).setUp() <NEW_LINE> <DEDENT> def test_get_list_response(self): <NEW_LINE> <INDENT> response = self.response <NEW_LINE> view = response.renderer_context['view'] <NEW_LINE> queryset = view.get_queryset() <NEW_LINE> list_response = services.get_list_response(view, queryset) <NEW_LINE> list_response.accepted_renderer = response.accepted_renderer <NEW_LINE> list_response.accepted_media_type = response.accepted_media_type <NEW_LINE> list_response.renderer_context = response.renderer_context <NEW_LINE> self.assertContains(list_response, "bob") <NEW_LINE> self.assertContains(list_response, "paul") <NEW_LINE> <DEDENT> def test_append_collection_links(self): <NEW_LINE> <INDENT> response = self.response <NEW_LINE> links = {"morons": self.endpoint} <NEW_LINE> response = services.append_collection_links(response, links) <NEW_LINE> self.assertEqual(response.data['collection_links'], {'morons': self.endpoint}) <NEW_LINE> <DEDENT> def test_append_collection_template(self): <NEW_LINE> <INDENT> response = self.response <NEW_LINE> template_data = {"name": ""} <NEW_LINE> response = services.append_collection_template(response, template_data) <NEW_LINE> self.assertEqual(response.data['template'], {'data': [{'name': 'name', 'value': ''}]}) <NEW_LINE> <DEDENT> def test_append_collection_querylist(self): <NEW_LINE> <INDENT> response = self.response <NEW_LINE> query_urls = [self.endpoint] <NEW_LINE> response = services.append_collection_querylist(response, query_urls) <NEW_LINE> self.assertEqual(response.data['queries'], [{'href': query_urls[0], 'rel': 'search', "data": [{"name": "name", "value": ""}]}])
Test top-level functions in the services module
62599068e1aae11d1e7cf3db
class LeaderboardStanding(CachingMixin, models.Model): <NEW_LINE> <INDENT> ranking = models.PositiveIntegerField() <NEW_LINE> user = models.ForeignKey(User) <NEW_LINE> value = models.PositiveIntegerField(default=0) <NEW_LINE> metric = models.CharField(max_length=255, choices=( ('link_clicks', 'Link Clicks'), ('firefox_downloads', 'Firefox Downloads'), ('firefox_os_referrals', ('Firefox OS Referrals')) )) <NEW_LINE> objects = CachingManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('ranking', 'metric') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'{metric}: {ranking}'.format(metric=self.metric, ranking=self.ranking)
Ranking in a leaderboard for a specific metric.
62599068435de62698e9d5a8
class base_data_reader_with_labels(base_data_reader): <NEW_LINE> <INDENT> def __init__(self,data_path,mode): <NEW_LINE> <INDENT> assert mode == 'TRAIN' or mode == 'PREDICT', 'UNKOW MODE' <NEW_LINE> self.mode = mode <NEW_LINE> if mode == 'TRAIN': <NEW_LINE> <INDENT> self.samples, self.labels = self.load_data_with_label(data_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.samples = self.load_data_without_label(data_path) <NEW_LINE> <DEDENT> <DEDENT> def load_data_with_label(self,data_path): <NEW_LINE> <INDENT> labels = [] <NEW_LINE> samples = [] <NEW_LINE> with open(data_path,'r') as f: <NEW_LINE> <INDENT> for i in f.readlines(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> label = self.process_label(i[0]) <NEW_LINE> sample = self.process_sample(i[2:]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('load data error:{}'.format(i)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> labels.append(label) <NEW_LINE> samples.append(sample) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> assert len(labels) == len(samples) <NEW_LINE> print("get data with label {}".format(len(labels))) <NEW_LINE> print('labels like:{}'.format(labels[:2])) <NEW_LINE> print('samples like:{}'.format(samples[:2])) <NEW_LINE> return samples, labels <NEW_LINE> <DEDENT> def load_data_without_label(self,data_path): <NEW_LINE> <INDENT> with open(data_path, 'r') as f: <NEW_LINE> <INDENT> samples = [self.process_sample(i) for i in f.readlines()] <NEW_LINE> <DEDENT> print("get data without label {}".format(len(samples))) <NEW_LINE> print('samples like:{}'.format(samples[:2])) <NEW_LINE> return samples <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> if self.mode == 'TRAIN': <NEW_LINE> <INDENT> return self.samples, self.labels <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.samples, None <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def process_label(self,label): <NEW_LINE> <INDENT> return int(label) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def process_sample(self, sample): <NEW_LINE> <INDENT> return sample
data file line should like: 1,how are you 0,fuck you no title
62599068aad79263cf42ff56
class IntentAdmin(GuardedModelAdmin): <NEW_LINE> <INDENT> list_display = ('space', 'user', 'token', 'requested_on') <NEW_LINE> search_fields = ('space', 'user') <NEW_LINE> fieldsets = [ (None, {'fields': ['user', 'space', 'token']}) ]
This is the administrative view to manage the request from users to participate on the spaces.
62599068097d151d1a2c280b
class Jsondata(): <NEW_LINE> <INDENT> def __init__(self, schemaID=None, xml=None, json=None, title=""): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> self.xmldata = db['xmldata'] <NEW_LINE> self.content = OrderedDict() <NEW_LINE> self.content['schema'] = schemaID <NEW_LINE> self.content['title'] = title <NEW_LINE> if (json is not None): <NEW_LINE> <INDENT> self.content['content'] = json <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.content['content'] = xmltodict.parse(xml, postprocessor=postprocessor) <NEW_LINE> <DEDENT> <DEDENT> def save(self): <NEW_LINE> <INDENT> docID = self.xmldata.insert(self.content) <NEW_LINE> return docID <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def objects(): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> xmldata = db['xmldata'] <NEW_LINE> cursor = xmldata.find(as_class = OrderedDict) <NEW_LINE> results = [] <NEW_LINE> for result in cursor: <NEW_LINE> <INDENT> results.append(result) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def find(params): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> xmldata = db['xmldata'] <NEW_LINE> cursor = xmldata.find(params, as_class = OrderedDict) <NEW_LINE> results = [] <NEW_LINE> for result in cursor: <NEW_LINE> <INDENT> results.append(result) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def executeQuery(query): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> xmldata = db['xmldata'] <NEW_LINE> cursor = xmldata.find(query,as_class = OrderedDict) <NEW_LINE> queryResults = [] <NEW_LINE> for result in cursor: <NEW_LINE> <INDENT> queryResults.append(result['content']) <NEW_LINE> <DEDENT> return queryResults <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def executeQueryFullResult(query): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> xmldata = db['xmldata'] <NEW_LINE> cursor = xmldata.find(query,as_class = OrderedDict) <NEW_LINE> results = [] <NEW_LINE> for result in cursor: <NEW_LINE> <INDENT> results.append(result) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get(postID): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> xmldata = db['xmldata'] <NEW_LINE> return xmldata.find_one({'_id': ObjectId(postID)}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def delete(postID): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> xmldata = db['xmldata'] <NEW_LINE> xmldata.remove({'_id': ObjectId(postID)}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def update(postID, json): <NEW_LINE> <INDENT> client = MongoClient(MONGODB_URI) <NEW_LINE> db = client['mgi'] <NEW_LINE> xmldata = db['xmldata'] <NEW_LINE> if '_id' in json: <NEW_LINE> <INDENT> del json['_id'] <NEW_LINE> <DEDENT> xmldata.update({'_id': ObjectId(postID)}, {"$set":json}, upsert=False)
Wrapper to manage JSON Documents, like mongoengine would have manage them (but with ordered data)
62599068adb09d7d5dc0bd09
class StopRecording(base_classes.Baserequests): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> base_classes.Baserequests.__init__(self) <NEW_LINE> self.name = "StopRecording"
Stop recording. Will return an `error` if recording is not active.
625990688e71fb1e983bd265
class BaseFileLock: <NEW_LINE> <INDENT> def __init__(self, lock_file, timeout = -1): <NEW_LINE> <INDENT> self._lock_file = lock_file <NEW_LINE> self._lock_file_fd = None <NEW_LINE> self.timeout = timeout <NEW_LINE> self._thread_lock = threading.Lock() <NEW_LINE> self._lock_counter = 0 <NEW_LINE> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def lock_file(self): <NEW_LINE> <INDENT> return self._lock_file <NEW_LINE> <DEDENT> @property <NEW_LINE> def timeout(self): <NEW_LINE> <INDENT> return self._timeout <NEW_LINE> <DEDENT> @timeout.setter <NEW_LINE> def timeout(self, value): <NEW_LINE> <INDENT> self._timeout = float(value) <NEW_LINE> return None <NEW_LINE> <DEDENT> def _acquire(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def _release(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_locked(self): <NEW_LINE> <INDENT> return self._lock_file_fd is not None <NEW_LINE> <DEDENT> def acquire(self, timeout=None, poll_intervall=0.05): <NEW_LINE> <INDENT> if timeout is None: <NEW_LINE> <INDENT> timeout = self.timeout <NEW_LINE> <DEDENT> with self._thread_lock: <NEW_LINE> <INDENT> self._lock_counter += 1 <NEW_LINE> <DEDENT> lock_id = id(self) <NEW_LINE> lock_filename = self._lock_file <NEW_LINE> start_time = time.time() <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> with self._thread_lock: <NEW_LINE> <INDENT> if not self.is_locked: <NEW_LINE> <INDENT> logger().debug('Attempting to acquire lock %s on %s', lock_id, lock_filename) <NEW_LINE> self._acquire() <NEW_LINE> <DEDENT> <DEDENT> if self.is_locked: <NEW_LINE> <INDENT> logger().debug('Lock %s acquired on %s', lock_id, lock_filename) <NEW_LINE> break <NEW_LINE> <DEDENT> elif timeout >= 0 and time.time() - start_time > timeout: <NEW_LINE> <INDENT> logger().debug('Timeout on acquiring lock %s on %s', lock_id, lock_filename) <NEW_LINE> raise Timeout(self._lock_file) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger().debug( 'Lock %s not acquired on %s, waiting %s seconds ...', lock_id, lock_filename, poll_intervall ) <NEW_LINE> time.sleep(poll_intervall) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> with self._thread_lock: <NEW_LINE> <INDENT> self._lock_counter = max(0, self._lock_counter - 1) <NEW_LINE> <DEDENT> raise <NEW_LINE> <DEDENT> return _Acquire_ReturnProxy(lock = self) <NEW_LINE> <DEDENT> def release(self, force = False): <NEW_LINE> <INDENT> with self._thread_lock: <NEW_LINE> <INDENT> if self.is_locked: <NEW_LINE> <INDENT> self._lock_counter -= 1 <NEW_LINE> if self._lock_counter == 0 or force: <NEW_LINE> <INDENT> lock_id = id(self) <NEW_LINE> lock_filename = self._lock_file <NEW_LINE> logger().debug('Attempting to release lock %s on %s', lock_id, lock_filename) <NEW_LINE> self._release() <NEW_LINE> self._lock_counter = 0 <NEW_LINE> logger().debug('Lock %s released on %s', lock_id, lock_filename) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.acquire() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> self.release() <NEW_LINE> return None <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.release(force = True) <NEW_LINE> return None
Implements the base class of a file lock.
6259906891f36d47f2231a5e
class Column(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, 'type': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(Column, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs['name'] <NEW_LINE> self.type = kwargs['type']
Query result column descriptor. All required parameters must be populated in order to send to Azure. :param name: Required. Column name. :type name: str :param type: Required. Column data type. Possible values include: "string", "integer", "number", "boolean", "object". :type type: str or ~azure.mgmt.resourcegraph.models.ColumnDataType
625990688da39b475be0498a
class NotificationsDB(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TABLE = "Notifications" <NEW_LINE> self.UID = "user_id" <NEW_LINE> self.NTF = "notification" <NEW_LINE> self.DATE = "date" <NEW_LINE> self.sql = MySQL() <NEW_LINE> <DEDENT> def send_notification(self, notification): <NEW_LINE> <INDENT> args = (notification[self.UID], notification[self.NTF], notification[self.DATE]) <NEW_LINE> string_format = "%s" <NEW_LINE> for i in xrange(len(args)-1): <NEW_LINE> <INDENT> string_format += ",%s" <NEW_LINE> <DEDENT> self.sql.insert_into(self.TABLE, string_format, args)
Обработка базы данных запросов Notifications: +-----------+----------------+---------+ | user_id | notification | date | +-----------+----------------+---------+
62599068b7558d5895464aff
class TestIoK8sApiCoreV1ResourceQuota(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testIoK8sApiCoreV1ResourceQuota(self): <NEW_LINE> <INDENT> pass
IoK8sApiCoreV1ResourceQuota unit test stubs
625990688e7ae83300eea82d
class ParameterInt(Parameter): <NEW_LINE> <INDENT> def __init__(self, **kwds): <NEW_LINE> <INDENT> super().__init__(**kwds) <NEW_LINE> self.ptype = "int" <NEW_LINE> <DEDENT> def toType(self, new_value): <NEW_LINE> <INDENT> return int(new_value)
Integer parameter.
62599068f7d966606f74948a
class Debit(Transaction): <NEW_LINE> <INDENT> type = 'debits' <NEW_LINE> uri_gen = wac.URIGen('/debits', '{debit}') <NEW_LINE> def refund(self, **kwargs): <NEW_LINE> <INDENT> return Refund( href=self.refunds.href, **kwargs ).save()
A Debit represents a transfer of funds from a FundingInstrument to your Marketplace's escrow account. A Debit may be created directly, or it will be created as a side-effect of capturing a CardHold. If you create a Debit directly it will implicitly create the associated CardHold if the FundingInstrument supports this.
62599068d486a94d0ba2d75d
class Solution: <NEW_LINE> <INDENT> def getMinimumStringArray(self, tagList, allTags): <NEW_LINE> <INDENT> e = {} <NEW_LINE> ec = 0 <NEW_LINE> need = set(tagList) <NEW_LINE> e = {tag: 0 for tag in need} <NEW_LINE> for tag in tagList: <NEW_LINE> <INDENT> e[tag] += 1 <NEW_LINE> ec += 1 <NEW_LINE> <DEDENT> left, right = -1, 0 <NEW_LINE> m = {tag: 0 for tag in need} <NEW_LINE> ans = sys.maxint <NEW_LINE> rc = 0 <NEW_LINE> while right < len(allTags): <NEW_LINE> <INDENT> tag = allTags[right] <NEW_LINE> if tag in need: <NEW_LINE> <INDENT> m[tag] += 1 <NEW_LINE> if m[tag] <= e[tag]: <NEW_LINE> <INDENT> rc += 1 <NEW_LINE> <DEDENT> while rc == ec: <NEW_LINE> <INDENT> l = right - left <NEW_LINE> ans = min(ans, l) <NEW_LINE> left += 1 <NEW_LINE> rw = allTags[left] <NEW_LINE> if rw in need: <NEW_LINE> <INDENT> m[rw] -= 1 <NEW_LINE> if m[rw] < e[rw]: <NEW_LINE> <INDENT> rc -= 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> right += 1 <NEW_LINE> <DEDENT> return -1 if ans == sys.maxint else ans
@param tagList: The tag list. @param allTags: All the tags. @return: Return the answer
625990682ae34c7f260ac887
class TestFilterIdGroupIdUpdatedAtArray(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testFilterIdGroupIdUpdatedAtArray(self): <NEW_LINE> <INDENT> pass
FilterIdGroupIdUpdatedAtArray unit test stubs
6259906823849d37ff852855
class A: <NEW_LINE> <INDENT> def __init__(self, value, max_valuse=None, step=1): <NEW_LINE> <INDENT> self.step = step <NEW_LINE> if max_valuse is None: <NEW_LINE> <INDENT> self.value = -self.step <NEW_LINE> self.max_value = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.value = value - self.step <NEW_LINE> self.max_value = max_valuse <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.step > 0 and (self.value + self.step) < self.max_value: <NEW_LINE> <INDENT> self.value += self.step <NEW_LINE> return self.value <NEW_LINE> <DEDENT> elif self.step < 0 and (self.value + self.step) > self.max_value: <NEW_LINE> <INDENT> self.value += self.step <NEW_LINE> return self.value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise StopIteration()
实现range函数
625990684e4d562566373ba6
class PivotSuggestions(Model): <NEW_LINE> <INDENT> _validation = { 'pivot': {'required': True}, 'suggestions': {'required': True}, } <NEW_LINE> _attribute_map = { 'pivot': {'key': 'pivot', 'type': 'str'}, 'suggestions': {'key': 'suggestions', 'type': '[Query]'}, } <NEW_LINE> def __init__(self, *, pivot: str, suggestions, **kwargs) -> None: <NEW_LINE> <INDENT> super(PivotSuggestions, self).__init__(**kwargs) <NEW_LINE> self.pivot = pivot <NEW_LINE> self.suggestions = suggestions
Defines the pivot segment. All required parameters must be populated in order to send to Azure. :param pivot: Required. The segment from the original query to pivot on. :type pivot: str :param suggestions: Required. A list of suggested queries for the pivot. :type suggestions: list[~azure.cognitiveservices.search.imagesearch.models.Query]
6259906821bff66bcd724405
class M4(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "https://www.gnu.org/software/m4/m4.html" <NEW_LINE> url = "https://ftp.gnu.org/gnu/m4/m4-1.4.18.tar.gz" <NEW_LINE> version('1.4.18', 'a077779db287adf4e12a035029002d28') <NEW_LINE> version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76') <NEW_LINE> patch('gnulib-pgi.patch', when='@1.4.18') <NEW_LINE> patch('pgi.patch', when='@1.4.17') <NEW_LINE> patch('secure_snprintf.patch', when='platform_os = highsierra') <NEW_LINE> variant('sigsegv', default=True, description="Build the libsigsegv dependency") <NEW_LINE> depends_on('libsigsegv', when='+sigsegv') <NEW_LINE> build_directory = 'spack-build' <NEW_LINE> def configure_args(self): <NEW_LINE> <INDENT> spec = self.spec <NEW_LINE> args = ['--enable-c++'] <NEW_LINE> if spec.satisfies('%clang') and not spec.satisfies('platform=darwin'): <NEW_LINE> <INDENT> args.append('CFLAGS=-rtlib=compiler-rt') <NEW_LINE> <DEDENT> if spec.satisfies('%intel'): <NEW_LINE> <INDENT> args.append('CFLAGS=-no-gcc') <NEW_LINE> <DEDENT> if '+sigsegv' in spec: <NEW_LINE> <INDENT> args.append('--with-libsigsegv-prefix={0}'.format( spec['libsigsegv'].prefix)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args.append('--without-libsigsegv-prefix') <NEW_LINE> <DEDENT> arch = spec.architecture <NEW_LINE> if (arch.platform == 'darwin' and arch.platform_os == 'sierra' and '%gcc' in spec): <NEW_LINE> <INDENT> args.append('ac_cv_type_struct_sched_param=yes') <NEW_LINE> <DEDENT> return args
GNU M4 is an implementation of the traditional Unix macro processor.
625990682c8b7c6e89bd4f85
class colors: <NEW_LINE> <INDENT> reset='\033[0m' <NEW_LINE> bold='\033[01m' <NEW_LINE> red='\033[31m' <NEW_LINE> cyan='\033[36m' <NEW_LINE> yellow='\033[93m'
pretty terminal colors
625990687c178a314d78e7bb
class Hsts_preloading(Hsts_base): <NEW_LINE> <INDENT> stix = Bundled(mitigation_object=load_mitigation("HSTS_NOT_PRELOADED")) <NEW_LINE> def _get_logger(self): <NEW_LINE> <INDENT> return Logger("Hsts Not Preloaded") <NEW_LINE> <DEDENT> def _set_arguments(self): <NEW_LINE> <INDENT> self._arguments = self._instance.HSTSPRELOAD <NEW_LINE> <DEDENT> def _set_mitigations(self, result: dict, key: str, condition: bool) -> dict: <NEW_LINE> <INDENT> if condition: <NEW_LINE> <INDENT> result["mitigation"] = load_mitigation( "HSTS_NOT_PRELOADED", raise_error=False ) <NEW_LINE> <DEDENT> return result if condition else {} <NEW_LINE> <DEDENT> def _worker(self, results): <NEW_LINE> <INDENT> return self._obtain_results(results)
Analysis of the HSTS Preloading status
6259906892d797404e38972d
class XpathUtil(): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def getRoot(html): <NEW_LINE> <INDENT> return etree.HTML(html) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getNodes(node,xpath): <NEW_LINE> <INDENT> return node.xpath(xpath) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getNode(node,xpath): <NEW_LINE> <INDENT> nodes=node.xpath(xpath) <NEW_LINE> if nodes is not None and len(nodes)>=1: <NEW_LINE> <INDENT> return nodes[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def getAttribs(node, xpath): <NEW_LINE> <INDENT> return node.xpath(xpath) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getAttrib(node, xpath, trim=True, default=None): <NEW_LINE> <INDENT> result=default <NEW_LINE> attrs = node.xpath(xpath) <NEW_LINE> if attrs is not None and len(attrs) >= 1: <NEW_LINE> <INDENT> if trim: <NEW_LINE> <INDENT> result=XpathUtil.htmltrim(str(attrs[0])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result=str(attrs[0]) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getNodeAttrib(node, attrib, trim=True, default=None): <NEW_LINE> <INDENT> result=default <NEW_LINE> if attrib in node.attrib: <NEW_LINE> <INDENT> if trim: <NEW_LINE> <INDENT> result = XpathUtil.htmltrim(str(node.attrib[attrib])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = str(node.attrib[attrib]) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getTexts(node, xpath): <NEW_LINE> <INDENT> return node.xpath(xpath) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getText(node, xpath, trim=True, default=None): <NEW_LINE> <INDENT> result = default <NEW_LINE> texts = node.xpath(xpath) <NEW_LINE> if texts is not None and len(texts) >= 1: <NEW_LINE> <INDENT> if trim: <NEW_LINE> <INDENT> result = XpathUtil.htmltrim(str(texts[0])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = str(texts[0]) <NEW_LINE> <DEDENT> <DEDENT> if result is None: <NEW_LINE> <INDENT> result=default; <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getJoinText(node, xpath, split='',trim=True, default=None): <NEW_LINE> <INDENT> result=default; <NEW_LINE> texts=__class__.getTexts(node,xpath) <NEW_LINE> result=split.join(texts) <NEW_LINE> if trim: <NEW_LINE> <INDENT> result=__class__.htmltrim(result) <NEW_LINE> <DEDENT> return result; <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getNodeAllText(node, trim=True, default=None): <NEW_LINE> <INDENT> result=default; <NEW_LINE> result=node.xpath('string(.)') <NEW_LINE> if trim: <NEW_LINE> <INDENT> result=__class__.htmltrim(result) <NEW_LINE> <DEDENT> return result; <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def htmltrim(str): <NEW_LINE> <INDENT> return str.replace('\xc2\xa0', ' ').strip() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def mergeBlank(str): <NEW_LINE> <INDENT> return ' '.join(str.split())
Xpath工具类
6259906826068e7796d4e0d9
class StreamTagger(CopyStreamResult): <NEW_LINE> <INDENT> def __init__(self, targets, add=None, discard=None): <NEW_LINE> <INDENT> super(StreamTagger, self).__init__(targets) <NEW_LINE> self.add = frozenset(add or ()) <NEW_LINE> self.discard = frozenset(discard or ()) <NEW_LINE> <DEDENT> def status(self, *args, **kwargs): <NEW_LINE> <INDENT> test_tags = kwargs.get('test_tags') or set() <NEW_LINE> test_tags.update(self.add) <NEW_LINE> test_tags.difference_update(self.discard) <NEW_LINE> kwargs['test_tags'] = test_tags or None <NEW_LINE> super(StreamTagger, self).status(*args, **kwargs)
Adds or discards tags from StreamResult events.
625990688a43f66fc4bf3931
class AdaptiveConcatPool2d(nn.Module): <NEW_LINE> <INDENT> def __init__(self, output_size=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.output_size = output_size or 1 <NEW_LINE> self.ap = nn.AdaptiveAvgPool2d(self.output_size) <NEW_LINE> self.mp = nn.AdaptiveMaxPool2d(self.output_size) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return torch.cat([self.mp(x), self.ap(x)], 1)
Layer that concats `AdaptiveAvgPool2d` and `AdaptiveMaxPool2d`.
62599068a8370b77170f1b65
class restrictionMiddleware(MiddlewareMixin): <NEW_LINE> <INDENT> def process_view(self,request,view_func,view_args,view_kwargs): <NEW_LINE> <INDENT> modulename=view_func.__module__ <NEW_LINE> user=request.user <NEW_LINE> if user.is_authenticated: <NEW_LINE> <INDENT> if user.user_type == "1": <NEW_LINE> <INDENT> if modulename=="account.instructorviews": <NEW_LINE> <INDENT> return redirect('dashboard') <NEW_LINE> <DEDENT> elif modulename=="account.studentviews": <NEW_LINE> <INDENT> return redirect('dashboard') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> elif user.user_type == "2": <NEW_LINE> <INDENT> if modulename == "account.instructorviews": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif modulename == "account.views": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return redirect('insdashboard') <NEW_LINE> <DEDENT> <DEDENT> elif user.user_type == "3": <NEW_LINE> <INDENT> if modulename == "account.studentviews": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif modulename == "account.views": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return redirect('studashboard') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return redirect('loginpage') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if request.path == reverse("loginpage"): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif modulename=="django.contrib.admin.sites": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse("loginpage"))
For restricting the user to view only pages they are supposed to
62599068a17c0f6771d5d777
class AbstractJointFacultyMembership(Model): <NEW_LINE> <INDENT> _limits = models.Q( app_label=swapper.get_model_name('kernel', 'Department').split('.')[0], model='department', ) | models.Q( app_label=swapper.get_model_name('kernel', 'Centre').split('.')[0], model='centre', ) <NEW_LINE> entity_content_type = models.ForeignKey( to=contenttypes_models.ContentType, on_delete=models.CASCADE, limit_choices_to=_limits, ) <NEW_LINE> entity_object_id = models.BigIntegerField() <NEW_LINE> content_object = contenttypes_fields.GenericForeignKey( ct_field='entity_content_type', fk_field='entity_object_id', ) <NEW_LINE> designation = models.CharField( max_length=63, ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> constraints = [ models.UniqueConstraint( fields=[ 'entity_content_type', 'entity_object_id', 'designation', ], name='unique_designation', ), ] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> designation = self.designation <NEW_LINE> department = self.department <NEW_LINE> return f'{designation}, {department}' <NEW_LINE> <DEDENT> @property <NEW_LINE> def department(self): <NEW_LINE> <INDENT> if self.entity_content_type.name == 'centre': <NEW_LINE> <INDENT> Class = swapper.load_model('shell', 'Centre') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Class = swapper.load_model('shell', 'Department') <NEW_LINE> <DEDENT> department = Class.objects.get(id=self.entity_object_id) <NEW_LINE> return department
This model holds information pretaining to department/centre and designation of joint faculty members.
6259906899cbb53fe6832685
class DeploymentTargetupdate(object): <NEW_LINE> <INDENT> def __init__(self, hosts=None, type=None, name=None): <NEW_LINE> <INDENT> self.swagger_types = { 'hosts': 'list[DeploymentTargetHostsupdate]', 'type': 'str', 'name': 'str' } <NEW_LINE> self.attribute_map = { 'hosts': 'hosts', 'type': 'type', 'name': 'name' } <NEW_LINE> self._hosts = hosts <NEW_LINE> self._type = type <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def hosts(self): <NEW_LINE> <INDENT> return self._hosts <NEW_LINE> <DEDENT> @hosts.setter <NEW_LINE> def hosts(self, hosts): <NEW_LINE> <INDENT> if hosts is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `hosts`, must not be `None`") <NEW_LINE> <DEDENT> self._hosts = hosts <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> if type is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `type`, must not be `None`") <NEW_LINE> <DEDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `name`, must not be `None`") <NEW_LINE> <DEDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DeploymentTargetupdate): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990687b180e01f3e49c34
class ConjugacyClassGAP(ConjugacyClass): <NEW_LINE> <INDENT> def __init__(self, group, element): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._gap_group = group._gap_() <NEW_LINE> self._gap_representative = element._gap_() <NEW_LINE> <DEDENT> except (AttributeError, TypeError): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._gap_group = group.gap() <NEW_LINE> self._gap_representative = element.gap() <NEW_LINE> <DEDENT> except (AttributeError, TypeError): <NEW_LINE> <INDENT> raise TypeError("The group %s cannot be defined as a GAP group"%group) <NEW_LINE> <DEDENT> <DEDENT> self._gap_conjugacy_class = self._gap_group.ConjugacyClass(self._gap_representative) <NEW_LINE> ConjugacyClass.__init__(self, group, element) <NEW_LINE> <DEDENT> def _gap_(self): <NEW_LINE> <INDENT> return self._gap_conjugacy_class <NEW_LINE> <DEDENT> @cached_method <NEW_LINE> def set(self): <NEW_LINE> <INDENT> from sage.sets.set import Set <NEW_LINE> try: <NEW_LINE> <INDENT> cc = self._gap_conjugacy_class.AsList().sage() <NEW_LINE> return Set([self._parent(x) for x in cc]) <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> return ConjugacyClass.set.f(self)
Class for a conjugacy class for groups defined over GAP. Intended for wrapping GAP methods on conjugacy classes. INPUT: - ``group`` -- the group in which the conjugacy class is taken - ``element`` -- the element generating the conjugacy class EXAMPLES:: sage: G = SymmetricGroup(4) sage: g = G((1,2,3,4)) sage: ConjugacyClassGAP(G,g) Conjugacy class of (1,2,3,4) in Symmetric group of order 4! as a permutation group
62599068d268445f2663a72d
class RpcHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> @gen.coroutine <NEW_LINE> def post(self): <NEW_LINE> <INDENT> tega_id = self.get_argument('tega_id') <NEW_LINE> path = self.get_argument('path') <NEW_LINE> args = kwargs = None <NEW_LINE> if self.request.body: <NEW_LINE> <INDENT> body = tornado.escape.json_decode(self.request.body) <NEW_LINE> args, kwargs = parse_rpc_body(body) <NEW_LINE> <DEDENT> result = yield tega.idb.rpc2(path, args, kwargs, tega_id) <NEW_LINE> if result: <NEW_LINE> <INDENT> self.write(json.dumps({'result': result})) <NEW_LINE> self.set_header('Content-Type', 'application/json')
RPC (Remote Procedure Call).
625990683eb6a72ae038be01
class SubnetGetAllRsp(ResponsePacket): <NEW_LINE> <INDENT> def __init__(self, raw_data): <NEW_LINE> <INDENT> __data = {} <NEW_LINE> __data["subnet_key_index"] = raw_data[:94] <NEW_LINE> raw_data = raw_data[94:] <NEW_LINE> assert(len(raw_data) == 0) <NEW_LINE> super(SubnetGetAllRsp, self).__init__("SubnetGetAll", 0x95, __data)
Response to a(n) SubnetGetAll command.
62599068796e427e5384ff17
class TextValue(CellValue): <NEW_LINE> <INDENT> def __init__(self, value: str): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> if not (isinstance(value, str) and len(value) > 0 and value[0] == "'"): <NEW_LINE> <INDENT> raise ValueError(f'Значение "{value}" не является текстом!') <NEW_LINE> <DEDENT> self._value: str = value[1:]
Текст. Начинается с символа '
62599068baa26c4b54d50a46
@admin.register(models.OrderUpdate) <NEW_LINE> class OrderUpdateAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> fields = ("started_at", "status", "completed_at") <NEW_LINE> readonly_fields = ("started_at",) <NEW_LINE> list_display = ("__str__", "status", "started_at", "completed_at") <NEW_LINE> list_editable = ("status",) <NEW_LINE> list_filter = ("status",)
Admin for the OrderUpdate models.
6259906891f36d47f2231a5f
class CollatedLabelDefinitions(object): <NEW_LINE> <INDENT> openapi_types = { 'worksheet': 'LabelDefinitions', 'user': 'LabelDefinitions', 'tenant': 'LabelDefinitions' } <NEW_LINE> attribute_map = { 'worksheet': 'worksheet', 'user': 'user', 'tenant': 'tenant' } <NEW_LINE> def __init__(self, worksheet=None, user=None, tenant=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._worksheet = None <NEW_LINE> self._user = None <NEW_LINE> self._tenant = None <NEW_LINE> self.discriminator = None <NEW_LINE> if worksheet is not None: <NEW_LINE> <INDENT> self.worksheet = worksheet <NEW_LINE> <DEDENT> if user is not None: <NEW_LINE> <INDENT> self.user = user <NEW_LINE> <DEDENT> if tenant is not None: <NEW_LINE> <INDENT> self.tenant = tenant <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def worksheet(self): <NEW_LINE> <INDENT> return self._worksheet <NEW_LINE> <DEDENT> @worksheet.setter <NEW_LINE> def worksheet(self, worksheet): <NEW_LINE> <INDENT> self._worksheet = worksheet <NEW_LINE> <DEDENT> @property <NEW_LINE> def user(self): <NEW_LINE> <INDENT> return self._user <NEW_LINE> <DEDENT> @user.setter <NEW_LINE> def user(self, user): <NEW_LINE> <INDENT> self._user = user <NEW_LINE> <DEDENT> @property <NEW_LINE> def tenant(self): <NEW_LINE> <INDENT> return self._tenant <NEW_LINE> <DEDENT> @tenant.setter <NEW_LINE> def tenant(self, tenant): <NEW_LINE> <INDENT> self._tenant = tenant <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CollatedLabelDefinitions): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CollatedLabelDefinitions): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62599068fff4ab517ebcefbc
class MmsInstanceInfoList(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Total = None <NEW_LINE> self.List = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Total = params.get("Total") <NEW_LINE> if params.get("List") is not None: <NEW_LINE> <INDENT> self.List = [] <NEW_LINE> for item in params.get("List"): <NEW_LINE> <INDENT> obj = MmsInstanceInfo() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.List.append(obj) <NEW_LINE> <DEDENT> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
彩信实例状态列表
625990687d847024c075db7a
class DetectedTerms(Model): <NEW_LINE> <INDENT> _attribute_map = { 'index': {'key': 'Index', 'type': 'int'}, 'original_index': {'key': 'OriginalIndex', 'type': 'int'}, 'list_id': {'key': 'ListId', 'type': 'int'}, 'term': {'key': 'Term', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, index: int=None, original_index: int=None, list_id: int=None, term: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(DetectedTerms, self).__init__(**kwargs) <NEW_LINE> self.index = index <NEW_LINE> self.original_index = original_index <NEW_LINE> self.list_id = list_id <NEW_LINE> self.term = term
Detected Terms details. :param index: Index(Location) of the detected profanity term in the input text content. :type index: int :param original_index: Original Index(Location) of the detected profanity term in the input text content. :type original_index: int :param list_id: Matched Terms list Id. :type list_id: int :param term: Detected profanity term. :type term: str
6259906838b623060ffaa422
class SBEnvironment(object): <NEW_LINE> <INDENT> thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> _lldb.SBEnvironment_swiginit(self, _lldb.new_SBEnvironment(*args)) <NEW_LINE> <DEDENT> __swig_destroy__ = _lldb.delete_SBEnvironment <NEW_LINE> def GetNumValues(self) -> int: <NEW_LINE> <INDENT> return _lldb.SBEnvironment_GetNumValues(self) <NEW_LINE> <DEDENT> def Get(self, name: str) -> str: <NEW_LINE> <INDENT> return _lldb.SBEnvironment_Get(self, name) <NEW_LINE> <DEDENT> def GetNameAtIndex(self, index: int) -> str: <NEW_LINE> <INDENT> return _lldb.SBEnvironment_GetNameAtIndex(self, index) <NEW_LINE> <DEDENT> def GetValueAtIndex(self, index: int) -> str: <NEW_LINE> <INDENT> return _lldb.SBEnvironment_GetValueAtIndex(self, index) <NEW_LINE> <DEDENT> def GetEntries(self) -> 'SBStringList': <NEW_LINE> <INDENT> return _lldb.SBEnvironment_GetEntries(self) <NEW_LINE> <DEDENT> def PutEntry(self, name_and_value: str): <NEW_LINE> <INDENT> return _lldb.SBEnvironment_PutEntry(self, name_and_value) <NEW_LINE> <DEDENT> def SetEntries(self, entries: "SBStringList", append: bool): <NEW_LINE> <INDENT> return _lldb.SBEnvironment_SetEntries(self, entries, append) <NEW_LINE> <DEDENT> def Set(self, name: str, value: str, overwrite: bool) -> bool: <NEW_LINE> <INDENT> return _lldb.SBEnvironment_Set(self, name, value, overwrite) <NEW_LINE> <DEDENT> def Unset(self, name: str) -> bool: <NEW_LINE> <INDENT> return _lldb.SBEnvironment_Unset(self, name) <NEW_LINE> <DEDENT> def Clear(self): <NEW_LINE> <INDENT> return _lldb.SBEnvironment_Clear(self)
Represents the environment of a certain process. Example: for entry in lldb.debugger.GetSelectedTarget().GetEnvironment().GetEntries(): print(entry)
62599068d6c5a102081e38c8
class SchemaValidation: <NEW_LINE> <INDENT> def __init__(self, spec_path): <NEW_LINE> <INDENT> self._spec_dict = load(open(spec_path, mode="r"), Loader=FullLoader) <NEW_LINE> self._schemas = dict() <NEW_LINE> for key, value in self._spec_dict["definitions"].items(): <NEW_LINE> <INDENT> self._schemas[key] = compile(value) <NEW_LINE> <DEDENT> <DEDENT> def validate(self, schema_name): <NEW_LINE> <INDENT> def _decorator(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def _wrapper(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> validator = self._schemas[schema_name] <NEW_LINE> body = request.get_json() <NEW_LINE> validator(body) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise SchemaNotFoundError(schema_name) <NEW_LINE> <DEDENT> except JsonSchemaException as e: <NEW_LINE> <INDENT> raise UnprocessableEntity(e.message) <NEW_LINE> <DEDENT> kwargs["body"] = body <NEW_LINE> return func(*args, **kwargs) <NEW_LINE> <DEDENT> return _wrapper <NEW_LINE> <DEDENT> return _decorator
A Swagger validator for Flask request bodies This validator does not follow references in the schema. To be able to use it with schemas including $ref keys, schemas should be bundled by dereferencing all keys. https://www.npmjs.com/package/swagger-cli can be used for this purpose. Ex: swagger-cli bundle -r -o swagger-flat.yml -t yaml swagger.yml
62599068d486a94d0ba2d75f
class DevJiraConfig(JiraConfig): <NEW_LINE> <INDENT> FLASK_ENV = 'development' <NEW_LINE> LOGGING_LEVEL = 'DEBUG' <NEW_LINE> DEBUG = True <NEW_LINE> TESTING = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._jira_url = None <NEW_LINE> self._jira_access_token = None <NEW_LINE> self._jira_access_token_secret = None <NEW_LINE> self._jira_consumer_key = None <NEW_LINE> self._jira_key_cert = None <NEW_LINE> self._jira_project = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def JIRA_URL(self): <NEW_LINE> <INDENT> if self._jira_url is None: <NEW_LINE> <INDENT> secret = secrets.EnvironmentVariableSecret('JIRA_URL') <NEW_LINE> self._jira_url = secret.get_secret_value() <NEW_LINE> <DEDENT> return self._jira_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def JIRA_ACCESS_TOKEN(self): <NEW_LINE> <INDENT> if self._jira_access_token is None: <NEW_LINE> <INDENT> secret = secrets.EnvironmentVariableSecret('JIRA_ACCESS_TOKEN') <NEW_LINE> self._jira_access_token = secret.get_secret_value() <NEW_LINE> <DEDENT> return self._jira_access_token <NEW_LINE> <DEDENT> @property <NEW_LINE> def JIRA_ACCESS_TOKEN_SECRET(self): <NEW_LINE> <INDENT> if self._jira_access_token_secret is None: <NEW_LINE> <INDENT> secret = secrets.EnvironmentVariableSecret('JIRA_ACCESS_TOKEN_SECRET') <NEW_LINE> self._jira_access_token_secret = secret.get_secret_value() <NEW_LINE> <DEDENT> return self._jira_access_token_secret <NEW_LINE> <DEDENT> @property <NEW_LINE> def JIRA_CONSUMER_KEY(self): <NEW_LINE> <INDENT> if self._jira_consumer_key is None: <NEW_LINE> <INDENT> secret = secrets.EnvironmentVariableSecret('JIRA_CONSUMER_KEY') <NEW_LINE> self._jira_consumer_key = secret.get_secret_value() <NEW_LINE> <DEDENT> return self._jira_consumer_key <NEW_LINE> <DEDENT> @property <NEW_LINE> def JIRA_KEY_CERT(self): <NEW_LINE> <INDENT> if self._jira_key_cert is None: <NEW_LINE> <INDENT> secret = secrets.EnvironmentVariableSecret('JIRA_KEY_CERT') <NEW_LINE> self._jira_key_cert = secret.get_secret_value() <NEW_LINE> <DEDENT> return self._jira_key_cert <NEW_LINE> <DEDENT> @property <NEW_LINE> def JIRA_PROJECT(self): <NEW_LINE> <INDENT> if self._jira_project is None: <NEW_LINE> <INDENT> secret = secrets.EnvironmentVariableSecret('JIRA_PROJECT') <NEW_LINE> self._jira_project = secret.get_secret_value() <NEW_LINE> <DEDENT> return self._jira_project
Development Jira config.
625990687d43ff2487427fe1
class TestJoinClusterParameters(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testJoinClusterParameters(self): <NEW_LINE> <INDENT> pass
JoinClusterParameters unit test stubs
625990687c178a314d78e7bc
class Admin(User): <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, username, sex, age, privileges): <NEW_LINE> <INDENT> super().__init__(first_name, last_name, username, sex, age) <NEW_LINE> self.privileges = privileges <NEW_LINE> <DEDENT> def show_privileges(self): <NEW_LINE> <INDENT> for privilege in self.privileges: <NEW_LINE> <INDENT> print(f"- {privilege}")
Model of a user that has extra privileges over regular users.
6259906844b2445a339b7530
class PollingThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, q_visibility, logging): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.logging = logging <NEW_LINE> self.timestamp = None <NEW_LINE> self.gpio_value = None <NEW_LINE> self.q_visibility = q_visibility <NEW_LINE> GPIO.setmode(GPIO.BOARD) <NEW_LINE> GPIO.setup(PIN_ONOFF, GPIO.IN, pull_up_down=GPIO.PUD_UP) <NEW_LINE> GPIO.setup(LED_ONLINE, GPIO.OUT) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> self.gpio_value = GPIO.input(PIN_ONOFF) <NEW_LINE> if self.gpio_value: <NEW_LINE> <INDENT> self.q_visibility.put('HIDE') <NEW_LINE> GPIO.output(LED_ONLINE, GPIO.LOW) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.q_visibility.put('SHOW') <NEW_LINE> GPIO.output(LED_ONLINE, GPIO.HIGH) <NEW_LINE> <DEDENT> self.timestamp = time.time() <NEW_LINE> self.logging.debug("GPIO Status [" + str(self.gpio_value) + '] @ ' + str(round(self.timestamp)))
This thread will keep a checking the On/Off status of the main switch. If - On: The GUI of the launcher will be shown. - Off: The GUI of the launcher will be hidden.
62599068cc40096d6161adb1
class LSTMCombinerNetwork(nn.Module): <NEW_LINE> <INDENT> def __init__(self, embedding_dim, num_layers, dropout): <NEW_LINE> <INDENT> super(LSTMCombinerNetwork, self).__init__() <NEW_LINE> self.embedding_dim = embedding_dim <NEW_LINE> self.num_layers = num_layers <NEW_LINE> self.use_cuda = False <NEW_LINE> self.mlplstm = nn.LSTM(2*self.embedding_dim, self.embedding_dim, num_layers=self.num_layers,dropout=dropout) <NEW_LINE> self.hidden = self.init_hidden() <NEW_LINE> <DEDENT> def init_hidden(self): <NEW_LINE> <INDENT> if self.use_cuda: <NEW_LINE> <INDENT> return (ag.Variable(cuda.FloatTensor(self.num_layers, 1, self.embedding_dim).zero_()), ag.Variable(cuda.FloatTensor(self.num_layers, 1, self.embedding_dim).zero_())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (ag.Variable(torch.FloatTensor(self.num_layers, 1, self.embedding_dim).zero_()), ag.Variable(torch.FloatTensor(self.num_layers, 1, self.embedding_dim).zero_())) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, head_embed, modifier_embed): <NEW_LINE> <INDENT> x = utils.concat_and_flatten([head_embed, modifier_embed]).view(1, 1,-1) <NEW_LINE> y= self.mlplstm(x,self.hidden) <NEW_LINE> out,self.hidden=y <NEW_LINE> return self.hidden[0][0] <NEW_LINE> <DEDENT> def clear_hidden_state(self): <NEW_LINE> <INDENT> self.hidden = self.init_hidden()
A combiner network that does a sequence model over states, rather than just some simple encoder like above. Input: 2 embeddings, the head embedding and modifier embedding Output: Concatenate the 2 embeddings together and do one timestep of the LSTM, returning the hidden state, which will be placed on the stack.
6259906826068e7796d4e0db
class _Coordinates: <NEW_LINE> <INDENT> def __init__(self, beg=0, end=0, sep=0): <NEW_LINE> <INDENT> self.beg = beg <NEW_LINE> self.end = end <NEW_LINE> self.sep = sep
Define the coordinates of a table's columns. The coordinates specify a location on the screen and are referred to as screen coordinates. They define the range occupied by one column of a table (table column refers to a column in a table in a database). Instance variables: beg: The screen column at which the table column begins. end: The screen column at which the table column ends. sep: The screen column that separates adjacent table columns. This is where the character used to separate columns is drawn.
625990683617ad0b5ee078f4