code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Vars: <NEW_LINE> <INDENT> key_change = 'keyChange' <NEW_LINE> scope = 'scope' <NEW_LINE> scope_mode = 'scopeMode' <NEW_LINE> scope_change = 'scopeChange' <NEW_LINE> function_mode = 'functionMode' <NEW_LINE> function_code = 'functionCode' <NEW_LINE> function_url = 'functionURL' <NEW_LINE> function_send = 'functionSend' <NEW_LINE> dialog_style_setting = 'hotkey_dialog_style' | Stub for these hotkey strings. | 6259906797e22403b383c68c |
class CombinedPASCALDatasetsGeneratorFactory: <NEW_LINE> <INDENT> def __init__(self, voc_config, hariharan_config, size_factor, categories_count): <NEW_LINE> <INDENT> self.voc_config = voc_config <NEW_LINE> self.hariharan_config = hariharan_config <NEW_LINE> self.size_factor = size_factor <NEW_LINE> self.indices_to_colors_map, self.void_color = get_colors_info(categories_count) <NEW_LINE> self.combined_datasets_filenames = self._get_combined_datasets_filenames() <NEW_LINE> <DEDENT> def get_generator(self): <NEW_LINE> <INDENT> local_combined_datasets_filenames = copy.deepcopy(self.combined_datasets_filenames) <NEW_LINE> sample_getters_map = { "voc": self._get_voc_sample, "hariharan": self._get_hariharan_sample } <NEW_LINE> while True: <NEW_LINE> <INDENT> random.shuffle(local_combined_datasets_filenames) <NEW_LINE> for dataset, filename in local_combined_datasets_filenames: <NEW_LINE> <INDENT> image, segmentation = sample_getters_map[dataset](filename) <NEW_LINE> target_size = get_target_image_size(image.shape[:2], self.size_factor) <NEW_LINE> target_size = target_size[1], target_size[0] <NEW_LINE> image = cv2.resize(image, target_size, interpolation=cv2.INTER_CUBIC) <NEW_LINE> segmentation = cv2.resize(segmentation, target_size, interpolation=cv2.INTER_NEAREST) <NEW_LINE> yield image, segmentation <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_size(self): <NEW_LINE> <INDENT> return len(self.combined_datasets_filenames) <NEW_LINE> <DEDENT> def _get_combined_datasets_filenames(self): <NEW_LINE> <INDENT> voc_filenames_list = get_dataset_filenames( self.voc_config["data_directory"], self.voc_config["data_set_path"]) <NEW_LINE> hariharan_filenames_list = get_dataset_filenames( self.hariharan_config["data_directory"], self.hariharan_config["data_set_path"]) <NEW_LINE> unique_hariharan_filenames_list = list(set(hariharan_filenames_list).difference(voc_filenames_list)) <NEW_LINE> combined_datasets_filenames = [("voc", filename) for filename in voc_filenames_list] + [("hariharan", filename) for filename in unique_hariharan_filenames_list] <NEW_LINE> return combined_datasets_filenames <NEW_LINE> <DEDENT> def _get_voc_sample(self, filename): <NEW_LINE> <INDENT> image_path = os.path.join(self.voc_config["data_directory"], "JPEGImages/{}.jpg".format(filename)) <NEW_LINE> segmentation_path = os.path.join(self.voc_config["data_directory"], "SegmentationClass/{}.png".format(filename)) <NEW_LINE> return cv2.imread(image_path), cv2.imread(segmentation_path) <NEW_LINE> <DEDENT> def _get_hariharan_sample(self, filename): <NEW_LINE> <INDENT> image_path = os.path.join(self.hariharan_config["data_directory"], "dataset/img", filename + ".jpg") <NEW_LINE> image = cv2.imread(image_path) <NEW_LINE> segmentation_path = os.path.join(self.hariharan_config["data_directory"], "dataset/cls", filename + ".mat") <NEW_LINE> segmentation_data = scipy.io.loadmat(segmentation_path) <NEW_LINE> segmentation_matrix = segmentation_data["GTcls"][0][0][1] <NEW_LINE> segmentation = self.void_color * np.ones(shape=image.shape, dtype=np.uint8) <NEW_LINE> for category_index in set(segmentation_matrix.reshape(-1)): <NEW_LINE> <INDENT> segmentation[segmentation_matrix == category_index] = self.indices_to_colors_map[category_index] <NEW_LINE> <DEDENT> return image, segmentation | Factory class that merges VOC 2012 and Hariharan's PASCAL datasets.
Builds a generator for that returns (image, segmentation) tuples. | 625990677b25080760ed88a1 |
class RestrictionSerializer(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_serializable_restriction(restriction): <NEW_LINE> <INDENT> serializable_restriction = {} <NEW_LINE> serializable_restriction["index"] = restriction.index <NEW_LINE> serializable_restriction["name"] = restriction.name <NEW_LINE> serializable_restriction["objective_func"] = restriction.objective_func <NEW_LINE> serializable_restriction["normalization"] = restriction.normalization <NEW_LINE> serializable_restriction["default_low"] = restriction.default_low <NEW_LINE> serializable_restriction["default_upp"] = restriction.default_upp <NEW_LINE> serializable_restriction["dec_pt"] = restriction.dec_pt <NEW_LINE> return serializable_restriction <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def serialize(restriction): <NEW_LINE> <INDENT> return json.dumps(RestrictionSerializer.get_serializable_restriction(restriction), indent=4) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def serialize_dict(restriction_dict): <NEW_LINE> <INDENT> serializable_dict = {}; <NEW_LINE> for index, restriction in restriction_dict.items(): <NEW_LINE> <INDENT> serializable_dict[index] = RestrictionSerializer.get_serializable_restriction(restriction) <NEW_LINE> <DEDENT> return serializable_dict <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_restriction(serialized_restriction): <NEW_LINE> <INDENT> restriction = Restriction(serialized_restriction["index"], serialized_restriction["name"], serialized_restriction["objective_func"], serialized_restriction["normalization"], serialized_restriction["default_low"], serialized_restriction["default_upp"], dec_pt=serialized_restriction["dec_pt"]) <NEW_LINE> return restriction <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def deserialize(json_str): <NEW_LINE> <INDENT> serialized_restriction_dict = json.loads(json_str) <NEW_LINE> return RestrictionSerializer.get_restriction(serialized_restriction_dict) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def deserialize_dict(serialized_restriction_dict): <NEW_LINE> <INDENT> restriction_dict = {} <NEW_LINE> for i, serialized_restriction in serialized_restriction_dict.items(): <NEW_LINE> <INDENT> restriction_dict[i] = RestrictionSerializer.get_restriction(serialized_restriction) <NEW_LINE> <DEDENT> return restriction_dict | A class to support serializing/deserializing of a single restriction and dictionaries of restrictions. Needs improvement | 6259906701c39578d7f142f4 |
class ColorFactory(object): <NEW_LINE> <INDENT> _colors = weakref.WeakValueDictionary() <NEW_LINE> @classmethod <NEW_LINE> def get_color(cls, name): <NEW_LINE> <INDENT> value = cls._colors.get(name) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> value = Color(name) <NEW_LINE> cls._colors[name] = value <NEW_LINE> <DEDENT> return value | Фабрика приспособленцев | 625990677047854f46340b33 |
class SocksProxy(registry.String): <NEW_LINE> <INDENT> def setValue(self, v): <NEW_LINE> <INDENT> if ':' not in v: <NEW_LINE> <INDENT> self.error() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> int(v.rsplit(':', 1)[1]) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.error() <NEW_LINE> <DEDENT> super(SocksProxy, self).setValue(v) | Value must be a valid hostname:port string. | 6259906723849d37ff852835 |
class ChatImageList(_ChatFileList): <NEW_LINE> <INDENT> serializer_class = ChatImageSerializer <NEW_LINE> queryset = ChatImage.objects.all() <NEW_LINE> ActionType = ChatImageMessageAction <NEW_LINE> field = IMAGE_MESSAGE_FIELD | post:
requires chat-type from url (private-chat, group-chat, encrypted-private-chat) and its id.
Creates image in the related chat
get:
requires chat-type from url (private-chat, group-chat, encrypted-private-chat) and its id.
get list of images in the related chat | 6259906792d797404e38971d |
class GeoDataMixin: <NEW_LINE> <INDENT> @need_extension('GeoData') <NEW_LINE> def loadcoordinfo(self, page): <NEW_LINE> <INDENT> title = page.title(with_section=False) <NEW_LINE> query = self._generator(api.PropertyGenerator, type_arg='coordinates', titles=title.encode(self.encoding()), coprop=['type', 'name', 'dim', 'country', 'region', 'globe'], coprimary='all') <NEW_LINE> self._update_page(page, query) | APISite mixin for GeoData extension. | 625990672ae34c7f260ac867 |
class TermRange(RangeMixin, terms.MultiTerm): <NEW_LINE> <INDENT> def __init__(self, fieldname, start, end, startexcl=False, endexcl=False, boost=1.0, constantscore=True): <NEW_LINE> <INDENT> self.fieldname = fieldname <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.startexcl = startexcl <NEW_LINE> self.endexcl = endexcl <NEW_LINE> self.boost = boost <NEW_LINE> self.constantscore = constantscore <NEW_LINE> <DEDENT> def normalize(self): <NEW_LINE> <INDENT> if self.start in ('', None) and self.end in (u('\\uffff'), None): <NEW_LINE> <INDENT> from whoosh.query import Every <NEW_LINE> return Every(self.fieldname, boost=self.boost) <NEW_LINE> <DEDENT> elif self.start == self.end: <NEW_LINE> <INDENT> if self.startexcl or self.endexcl: <NEW_LINE> <INDENT> return qcore.NullQuery <NEW_LINE> <DEDENT> return terms.Term(self.fieldname, self.start, boost=self.boost) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return TermRange(self.fieldname, self.start, self.end, self.startexcl, self.endexcl, boost=self.boost) <NEW_LINE> <DEDENT> <DEDENT> def _btexts(self, ixreader): <NEW_LINE> <INDENT> fieldname = self.fieldname <NEW_LINE> field = ixreader.schema[fieldname] <NEW_LINE> startexcl = self.startexcl <NEW_LINE> endexcl = self.endexcl <NEW_LINE> if self.start is None: <NEW_LINE> <INDENT> start = b("") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> start = field.to_bytes(self.start) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> if self.end is None: <NEW_LINE> <INDENT> end = b("\xFF\xFF\xFF\xFF") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> end = field.to_bytes(self.end) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> for fname, t in ixreader.terms_from(fieldname, start): <NEW_LINE> <INDENT> if fname != fieldname: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if t == start and startexcl: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if t == end and endexcl: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if t > end: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> yield t | Matches documents containing any terms in a given range.
>>> # Match documents where the indexed "id" field is greater than or equal
>>> # to 'apple' and less than or equal to 'pear'.
>>> TermRange("id", u"apple", u"pear") | 62599067d486a94d0ba2d73e |
class RestaurantViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> permission_classes = [permissions.IsAuthenticated] <NEW_LINE> queryset = restaurant_models.Restaurant.objects.all() <NEW_LINE> serializer_class = restaurant_serializers.RestaurantSerializer | To handles CRUD operation on resturants | 62599067f7d966606f74947a |
class AuthError(Exception): <NEW_LINE> <INDENT> pass | Generic Authentication/authorization errors. | 62599067d6c5a102081e38a6 |
class ZWaveDeviceEntity(ZWaveBaseEntity): <NEW_LINE> <INDENT> def __init__(self, values, domain): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> from openzwave.network import ZWaveNetwork <NEW_LINE> from pydispatch import dispatcher <NEW_LINE> self.values = values <NEW_LINE> self.node = values.primary.node <NEW_LINE> self.values.primary.set_change_verified(False) <NEW_LINE> self._name = _value_name(self.values.primary) <NEW_LINE> self._unique_id = "ZWAVE-{}-{}".format(self.node.node_id, self.values.primary.object_id) <NEW_LINE> self._update_attributes() <NEW_LINE> dispatcher.connect( self.network_value_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED) <NEW_LINE> <DEDENT> def network_value_changed(self, value): <NEW_LINE> <INDENT> if value.value_id in [v.value_id for v in self.values if v]: <NEW_LINE> <INDENT> return self.value_changed() <NEW_LINE> <DEDENT> <DEDENT> def value_added(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def value_changed(self): <NEW_LINE> <INDENT> self._update_attributes() <NEW_LINE> self.update_properties() <NEW_LINE> self.maybe_schedule_update() <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_added_to_hass(self): <NEW_LINE> <INDENT> async_dispatcher_connect( self.hass, SIGNAL_REFRESH_ENTITY_FORMAT.format(self.entity_id), self.refresh_from_network) <NEW_LINE> <DEDENT> def _update_attributes(self): <NEW_LINE> <INDENT> self.node_id = self.node.node_id <NEW_LINE> if self.values.power: <NEW_LINE> <INDENT> self.power_consumption = round( self.values.power.data, self.values.power.precision) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.power_consumption = None <NEW_LINE> <DEDENT> <DEDENT> def update_properties(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> attrs = { const.ATTR_NODE_ID: self.node_id, const.ATTR_VALUE_INDEX: self.values.primary.index, const.ATTR_VALUE_INSTANCE: self.values.primary.instance, 'old_entity_id': self.old_entity_id, 'new_entity_id': self.new_entity_id, } <NEW_LINE> if self.power_consumption is not None: <NEW_LINE> <INDENT> attrs[ATTR_POWER] = self.power_consumption <NEW_LINE> <DEDENT> return attrs <NEW_LINE> <DEDENT> def refresh_from_network(self): <NEW_LINE> <INDENT> for value in self.values: <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> self.node.refresh_value(value.value_id) | Representation of a Z-Wave node entity. | 62599067baa26c4b54d50a26 |
class AspectBaseTest: <NEW_LINE> <INDENT> def test_class(self): <NEW_LINE> <INDENT> assert not isinstance(aspectbase, aspectclass) <NEW_LINE> <DEDENT> def test_init_needs_aspectclass(self): <NEW_LINE> <INDENT> error_str = 'object has no attribute' <NEW_LINE> with pytest.raises(AttributeError) as exc: <NEW_LINE> <INDENT> aspectbase('py') <NEW_LINE> <DEDENT> exc.match(error_str) | aspectbase is just a mixin base class for new aspectclasses
and is therefore only usable works with a derived aspectclass
and therefore doesn't use the aspectclass meta for itself | 62599067cc0a2c111447c690 |
@dataclass(init=True) <NEW_LINE> class DuplicateWavelengthStates: <NEW_LINE> <INDENT> transmission: StateCalculateTransmission <NEW_LINE> normalize: StateNormalizeToMonitor <NEW_LINE> wavelength: StateWavelength <NEW_LINE> pixel: StateWavelengthAndPixelAdjustment <NEW_LINE> def iterate_fields(self): <NEW_LINE> <INDENT> return [self.transmission, self.normalize, self.wavelength, self.pixel] | These four classes contain duplicated attributes, so this POD
class ties them together for subsequent setters | 625990670c0af96317c5791f |
class Script: <NEW_LINE> <INDENT> def __init__(self, script): <NEW_LINE> <INDENT> self.script = script <NEW_LINE> self.serialized = self.parse() <NEW_LINE> self.size = len(self.serialized) <NEW_LINE> <DEDENT> def parse(self) : <NEW_LINE> <INDENT> instructions = self.script.split(' ') <NEW_LINE> serialized = b'' <NEW_LINE> for i in instructions: <NEW_LINE> <INDENT> if i in opcodes.OPCODE_NAMES: <NEW_LINE> <INDENT> op = opcodes.OPCODE_NAMES.index(i) <NEW_LINE> serialized += op.to_bytes(sizeof(op), 'big') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(i, 16) <NEW_LINE> length = sizeof(value) <NEW_LINE> serialized += length.to_bytes(sizeof(length), 'big') + value.to_bytes(sizeof(value), 'big') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise Exception('Unexpected instruction in script : {}'.format(i)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(serialized) > 10000: <NEW_LINE> <INDENT> raise Exception('Serialized script should be less than 10,000 bytes long') <NEW_LINE> <DEDENT> return serialized | This class represents a Bitcoin script. | 62599067fff4ab517ebcef9b |
class PersonalRelationship(neomodel.StructuredRel): <NEW_LINE> <INDENT> on_date = neomodel.DateProperty(default_now = True) | A very simple relationship between two basePersons that simply records
the date at which an acquaintance was established.
This relationship should be carried over to anything that inherits from
basePerson without any further effort. | 62599067a8370b77170f1b50 |
class TestLink(unittest.TestCase): <NEW_LINE> <INDENT> def _get_target_class(self): <NEW_LINE> <INDENT> from randopony.models import Link <NEW_LINE> return Link <NEW_LINE> <DEDENT> def _make_one(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._get_target_class()(*args, **kwargs) <NEW_LINE> <DEDENT> def test_repr(self): <NEW_LINE> <INDENT> email = self._make_one( key='club_site', url='http://randonneurs.bc.ca') <NEW_LINE> self.assertEqual( repr(email), '<Link(club_site=http://randonneurs.bc.ca)>') | Unit tests for Link data model.
| 625990674f88993c371f10df |
class _ConfigLoader(FileLoader, SourceLoader): <NEW_LINE> <INDENT> def get_code(self, fullname: str) -> CodeType: <NEW_LINE> <INDENT> source = self.get_source(fullname) <NEW_LINE> path = self.get_filename(fullname) <NEW_LINE> parsed = ast.parse(source) <NEW_LINE> return compile(parsed, path, 'exec', dont_inherit=True) <NEW_LINE> <DEDENT> def module_repr(self, module: ModuleType): <NEW_LINE> <INDENT> return f'<config module {module.__name__} at {module.__file__}>' | Config module loader class. | 625990678e71fb1e983bd246 |
class LayerModule(_Layer): <NEW_LINE> <INDENT> def __init__(self, layers: Sequence[_Layer]): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.layers = layers <NEW_LINE> <DEDENT> def apply(self, inputs: _OneOrMore(tf.Tensor), is_training: tf.Tensor) -> tf.Tensor: <NEW_LINE> <INDENT> output = inputs <NEW_LINE> for layer in self.layers: <NEW_LINE> <INDENT> output = layer.apply(output, is_training) <NEW_LINE> <DEDENT> return output | A set of layers that can be applied as a group (useful if you want to use them in multiple places). | 62599067be8e80087fbc080a |
@attr.s(auto_attribs=True) <NEW_LINE> class CompactArrayData(BaseDlmsData): <NEW_LINE> <INDENT> TAG = 19 <NEW_LINE> LENGTH = VARIABLE_LENGTH | Contains a Type description and arrray content in form of octet string
content_description -> Type Description tag = 0
array_content -> Octet string tag = 1 | 62599067a17c0f6771d5d767 |
class RedisConsistentHashRouter(BaseRedisRouter): <NEW_LINE> <INDENT> def __init__(self, hosts): <NEW_LINE> <INDENT> BaseRedisRouter.__init__(self, hosts) <NEW_LINE> self._hashring = HashRing(hosts.values()) <NEW_LINE> <DEDENT> def get_host_for_key(self, key): <NEW_LINE> <INDENT> node = self._hashring.get_node(key) <NEW_LINE> if node is None: <NEW_LINE> <INDENT> raise RuntimeError('Can not find a host using consistent hash') <NEW_LINE> <DEDENT> return node.host_name | Use ketama for hash partitioning. | 625990674527f215b58eb563 |
class CartAddSerializer(serializers.Serializer): <NEW_LINE> <INDENT> product_id = serializers.IntegerField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> fields = ["product_id"] | Serializer to represent in docs | 625990676e29344779b01dd2 |
class JSONField(JSONFieldBase, models.TextField): <NEW_LINE> <INDENT> form_class = JSONFormField <NEW_LINE> def dumps_for_display(self, value): <NEW_LINE> <INDENT> kwargs = {"indent": 2} <NEW_LINE> kwargs.update(self.dump_kwargs) <NEW_LINE> return json.dumps(value, ensure_ascii=False, **kwargs) | JSONField is a generic textfield that serializes/deserializes JSON objects | 62599067d486a94d0ba2d73f |
class CIFAR10RandomLabels(datasets.CIFAR10): <NEW_LINE> <INDENT> def __init__(self, corrupt_prob=0.0, num_classes=10, **kwargs): <NEW_LINE> <INDENT> super(CIFAR10RandomLabels, self).__init__(**kwargs) <NEW_LINE> self.n_classes = num_classes <NEW_LINE> if corrupt_prob > 0: <NEW_LINE> <INDENT> self.corrupt_labels(corrupt_prob) <NEW_LINE> <DEDENT> <DEDENT> def corrupt_labels(self, corrupt_prob): <NEW_LINE> <INDENT> labels = np.array(self.train_labels if self.train else self.test_labels) <NEW_LINE> np.random.seed(12345) <NEW_LINE> mask = np.random.rand(len(labels)) <= corrupt_prob <NEW_LINE> rnd_labels = np.random.choice(self.n_classes, mask.sum()) <NEW_LINE> labels[mask] = rnd_labels <NEW_LINE> labels = [int(x) for x in labels] <NEW_LINE> if self.train: <NEW_LINE> <INDENT> self.train_labels = labels <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.test_labels = labels | CIFAR10 dataset, with support for randomly corrupt labels.
Params
------
corrupt_prob: float
Default 0.0. The probability of a label being replaced with
random label.
num_classes: int
Default 10. The number of classes in the dataset. | 6259906771ff763f4b5e8f27 |
class EntityType: <NEW_LINE> <INDENT> def __init__(self, *components, layer, name, tmp_rendering=None): <NEW_LINE> <INDENT> self.layer = layer <NEW_LINE> self.name = name <NEW_LINE> self.tmp_rendering = tmp_rendering <NEW_LINE> self.components = {} <NEW_LINE> self.component_data = {} <NEW_LINE> for component in components: <NEW_LINE> <INDENT> iface = component.interface <NEW_LINE> if iface in self.components: <NEW_LINE> <INDENT> raise TypeError( "Got two components for the same interface " "({!r}): {!r} and {!r}" .format(iface, self.components[iface], component)) <NEW_LINE> <DEDENT> self.components[iface] = component.component <NEW_LINE> component.init_entity_type(self) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{}: {}>".format(type(self).__qualname__, self.name) <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return Entity(self, *args, **kwargs) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.component_data[key] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.component_data[key] = value | A class of entity, except deliberately not implemented as a class.
Consists primarily of some number of components, each implementing a
different interface. | 625990673317a56b869bf103 |
class DeflateBuffer: <NEW_LINE> <INDENT> def __init__(self, out, encoding): <NEW_LINE> <INDENT> self.out = out <NEW_LINE> self.size = 0 <NEW_LINE> self.encoding = encoding <NEW_LINE> self._started_decoding = False <NEW_LINE> zlib_mode = (16 + zlib.MAX_WBITS if encoding == 'gzip' else -zlib.MAX_WBITS) <NEW_LINE> self.zlib = zlib.decompressobj(wbits=zlib_mode) <NEW_LINE> <DEDENT> def set_exception(self, exc): <NEW_LINE> <INDENT> self.out.set_exception(exc) <NEW_LINE> <DEDENT> def feed_data(self, chunk, size): <NEW_LINE> <INDENT> self.size += size <NEW_LINE> try: <NEW_LINE> <INDENT> chunk = self.zlib.decompress(chunk) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> if not self._started_decoding and self.encoding == 'deflate': <NEW_LINE> <INDENT> self.zlib = zlib.decompressobj() <NEW_LINE> try: <NEW_LINE> <INDENT> chunk = self.zlib.decompress(chunk) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise ContentEncodingError( 'Can not decode content-encoding: %s' % self.encoding) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ContentEncodingError( 'Can not decode content-encoding: %s' % self.encoding) <NEW_LINE> <DEDENT> <DEDENT> if chunk: <NEW_LINE> <INDENT> self._started_decoding = True <NEW_LINE> self.out.feed_data(chunk, len(chunk)) <NEW_LINE> <DEDENT> <DEDENT> def feed_eof(self): <NEW_LINE> <INDENT> chunk = self.zlib.flush() <NEW_LINE> if chunk or self.size > 0: <NEW_LINE> <INDENT> self.out.feed_data(chunk, len(chunk)) <NEW_LINE> if not self.zlib.eof: <NEW_LINE> <INDENT> raise ContentEncodingError('deflate') <NEW_LINE> <DEDENT> <DEDENT> self.out.feed_eof() | DeflateStream decompress stream and feed data into specified stream. | 6259906799fddb7c1ca63990 |
class BlobbableBinary(object): <NEW_LINE> <INDENT> implements(IBlobbable) <NEW_LINE> adapts(Binary) <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> def feed(self, blob): <NEW_LINE> <INDENT> blobfile = blob.open('w') <NEW_LINE> blobfile.writelines(self.context.data) <NEW_LINE> blobfile.close() <NEW_LINE> <DEDENT> def filename(self): <NEW_LINE> <INDENT> return getattr(self.context, 'filename', None) <NEW_LINE> <DEDENT> def mimetype(self): <NEW_LINE> <INDENT> return guessMimetype(self.context.data, self.filename()) | adapter for xmlrpclib Binary instance to work with blobs | 62599067d486a94d0ba2d740 |
class Describe(base.DescribeCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> user_utils.AddUserArgument(parser, 'describe') <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> compute_holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) <NEW_LINE> holder = base_classes.ComputeUserAccountsApiHolder(self.ReleaseTrack()) <NEW_LINE> client = holder.client <NEW_LINE> user = args.name <NEW_LINE> if not user: <NEW_LINE> <INDENT> user = gaia.GetDefaultAccountName( compute_holder.client.apitools_client.http) <NEW_LINE> <DEDENT> user_ref = holder.resources.Parse( user, params={'project': properties.VALUES.core.project.GetOrFail}, collection='clouduseraccounts.users') <NEW_LINE> request = client.MESSAGES_MODULE.ClouduseraccountsUsersGetRequest( project=user_ref.project, user=user_ref.Name()) <NEW_LINE> return compute_holder.client.MakeRequests([(client.users, 'Get', request)]) | Describe a Google Compute Engine user.
*{command}* displays all data associated with a Google Compute
Engine user in a project.
## EXAMPLES
To describe a user, run:
$ {command} example-user
To describe the default user mapped from the currently authenticated
Google account email, run:
$ {command} | 6259906744b2445a339b7521 |
class Parameter(Variable): <NEW_LINE> <INDENT> def __new__(cls, data=None, requires_grad=True): <NEW_LINE> <INDENT> return super(Parameter, cls).__new__(cls, data, requires_grad=requires_grad) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Parameter containing:' + self.data.__repr__() | A kind of Variable that is to be considered a module parameter.
Parameters are :class:`~torch.autograd.Variable` subclasses, that have a
very special property when used with :class:`Module` s - when they're
assigned as Module attributes they are automatically added to the list of
its parameters, and will appear e.g. in :meth:`~Module.parameters` iterator.
Assigning a Variable doesn't have such effect. This is because one might
want to cache some temporary state, like last hidden state of the RNN, in
the model. If there was no such class as :class:`Parameter`, these
temporaries would get registered too.
Arguments:
data (Tensor): parameter tensor.
requires_grad (bool, optional): if the parameter requires gradient. See
:ref:`excluding-subgraphs` for more details. | 625990674a966d76dd5f0676 |
class Aperture(object): <NEW_LINE> <INDENT> def __init__(self,E0): <NEW_LINE> <INDENT> self.E0 = E0; <NEW_LINE> <DEDENT> def p(self,q): <NEW_LINE> <INDENT> raise NotImplemented('abstract method'); <NEW_LINE> <DEDENT> def qmin(self): <NEW_LINE> <INDENT> raise NotImplemented('abstract method'); <NEW_LINE> <DEDENT> def qmax(self): <NEW_LINE> <INDENT> raise NotImplemented('abstract method'); <NEW_LINE> <DEDENT> def weight_q(self,q,E): <NEW_LINE> <INDENT> qE = conv.Qmin(E,self.E0) * conv.bohr; <NEW_LINE> return 1. / (q**2 + qE**2) * self.p(q); <NEW_LINE> <DEDENT> def moment(self,n,E): <NEW_LINE> <INDENT> def f(q): <NEW_LINE> <INDENT> return q**n * self.weight_q(q,E); <NEW_LINE> <DEDENT> F, err = quad(f,self.qmin(),self.qmax()); <NEW_LINE> assert(abs(err)<1e-5); <NEW_LINE> return F; <NEW_LINE> <DEDENT> def mean(self,E): <NEW_LINE> <INDENT> return self.moment(1,E) / self.moment(0,E); <NEW_LINE> <DEDENT> def var(self,E): <NEW_LINE> <INDENT> norm = self.moment(0,E); <NEW_LINE> return self.moment(2,E) / norm - (self.moment(1,E)/norm)**2 <NEW_LINE> <DEDENT> def get_eels(self, Iepsi, dq, qmax=None): <NEW_LINE> <INDENT> qmax = self.qmax() if qmax is None else qmax; <NEW_LINE> q = np.arange(self.qmin(), qmax, dq); <NEW_LINE> E = Iepsi.get_E(); <NEW_LINE> eels = Iepsi.get_eels(q); <NEW_LINE> assert( len(q) == eels.shape[0] ); <NEW_LINE> assert( len(E) == eels.shape[1] ); <NEW_LINE> wq = np.asarray([ self.weight_q(q[iq],E) for iq in range(len(q)) ]); <NEW_LINE> APC = dq*np.sum( wq, axis=0 ); <NEW_LINE> eels = dq*np.sum( wq*eels, axis=0 ); <NEW_LINE> return eels/APC; <NEW_LINE> <DEDENT> def get_APC(self,E): <NEW_LINE> <INDENT> return self.moment(0,E); | Abstract base class for all special apertures
methods used: p(q), qmin(), qmax() | 625990679c8ee82313040d49 |
class UserForm(messages.Message): <NEW_LINE> <INDENT> name = messages.StringField(1, required=True) <NEW_LINE> email = messages.StringField(2, required=True) | UserForm -- User outbound form message | 62599067baa26c4b54d50a28 |
class Base(db.Model): <NEW_LINE> <INDENT> __abstract__ = True <NEW_LINE> created_at = db.Column(db.DateTime, default=datetime.utcnow) <NEW_LINE> updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) | 所有的 model 的一个基类,默认添加了时间戳 | 625990678e7ae83300eea811 |
class ImaginaryUnit(AtomicExpr, metaclass=Singleton): <NEW_LINE> <INDENT> is_commutative = True <NEW_LINE> is_imaginary = True <NEW_LINE> is_finite = True <NEW_LINE> is_number = True <NEW_LINE> is_algebraic = True <NEW_LINE> is_transcendental = False <NEW_LINE> kind = NumberKind <NEW_LINE> __slots__ = () <NEW_LINE> def _latex(self, printer): <NEW_LINE> <INDENT> return printer._settings['imaginary_unit_latex'] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __abs__(): <NEW_LINE> <INDENT> return S.One <NEW_LINE> <DEDENT> def _eval_evalf(self, prec): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def _eval_conjugate(self): <NEW_LINE> <INDENT> return -S.ImaginaryUnit <NEW_LINE> <DEDENT> def _eval_power(self, expt): <NEW_LINE> <INDENT> if isinstance(expt, Integer): <NEW_LINE> <INDENT> expt = expt % 4 <NEW_LINE> if expt == 0: <NEW_LINE> <INDENT> return S.One <NEW_LINE> <DEDENT> elif expt == 1: <NEW_LINE> <INDENT> return S.ImaginaryUnit <NEW_LINE> <DEDENT> elif expt == 2: <NEW_LINE> <INDENT> return S.NegativeOne <NEW_LINE> <DEDENT> elif expt == 3: <NEW_LINE> <INDENT> return -S.ImaginaryUnit <NEW_LINE> <DEDENT> <DEDENT> if isinstance(expt, Rational): <NEW_LINE> <INDENT> i, r = divmod(expt, 2) <NEW_LINE> rv = Pow(S.ImaginaryUnit, r, evaluate=False) <NEW_LINE> if i % 2: <NEW_LINE> <INDENT> return Mul(S.NegativeOne, rv, evaluate=False) <NEW_LINE> <DEDENT> return rv <NEW_LINE> <DEDENT> <DEDENT> def as_base_exp(self): <NEW_LINE> <INDENT> return S.NegativeOne, S.Half <NEW_LINE> <DEDENT> @property <NEW_LINE> def _mpc_(self): <NEW_LINE> <INDENT> return (Float(0)._mpf_, Float(1)._mpf_) | The imaginary unit, `i = \sqrt{-1}`.
I is a singleton, and can be accessed by ``S.I``, or can be
imported as ``I``.
Examples
========
>>> from sympy import I, sqrt
>>> sqrt(-1)
I
>>> I*I
-1
>>> 1/I
-I
References
==========
.. [1] https://en.wikipedia.org/wiki/Imaginary_unit | 6259906755399d3f05627ca3 |
class itkNumericTraitsFAUI1(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _itkNumericTraitsPython.delete_itkNumericTraitsFAUI1 <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> _itkNumericTraitsPython.itkNumericTraitsFAUI1_swiginit(self,_itkNumericTraitsPython.new_itkNumericTraitsFAUI1(*args)) <NEW_LINE> <DEDENT> def max(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsFAUI1_max() <NEW_LINE> <DEDENT> max = staticmethod(max) <NEW_LINE> def min(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsFAUI1_min() <NEW_LINE> <DEDENT> min = staticmethod(min) <NEW_LINE> def NonpositiveMin(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsFAUI1_NonpositiveMin() <NEW_LINE> <DEDENT> NonpositiveMin = staticmethod(NonpositiveMin) <NEW_LINE> def ZeroValue(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsFAUI1_ZeroValue() <NEW_LINE> <DEDENT> ZeroValue = staticmethod(ZeroValue) <NEW_LINE> def OneValue(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsFAUI1_OneValue() <NEW_LINE> <DEDENT> OneValue = staticmethod(OneValue) | Proxy of C++ itkNumericTraitsFAUI1 class | 625990677c178a314d78e7ad |
class MySocket: <NEW_LINE> <INDENT> def __init__(self, sock=None): <NEW_LINE> <INDENT> if sock is None: <NEW_LINE> <INDENT> self.sock = socket.socket( socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sock = sock <NEW_LINE> <DEDENT> <DEDENT> def connect(self, host, port): <NEW_LINE> <INDENT> self.sock.connect((host, port)) <NEW_LINE> <DEDENT> def mysend(self, msg): <NEW_LINE> <INDENT> totalsent = 0 <NEW_LINE> self.msglen = len(msg) <NEW_LINE> msg = msg.encode() <NEW_LINE> while totalsent < self.msglen: <NEW_LINE> <INDENT> sent = self.sock.send(msg[totalsent:]) <NEW_LINE> if sent == 0: <NEW_LINE> <INDENT> raise RuntimeError("socket connection broken") <NEW_LINE> <DEDENT> totalsent = totalsent + sent <NEW_LINE> <DEDENT> <DEDENT> def myreceive(self): <NEW_LINE> <INDENT> chunks = [] <NEW_LINE> bytes_recd = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> chunk = self.sock.recv(128) <NEW_LINE> if chunk == b'': <NEW_LINE> <INDENT> raise RuntimeError("socket connection broken") <NEW_LINE> <DEDENT> chunks.append(chunk) <NEW_LINE> bytes_recd = bytes_recd + len(chunk) <NEW_LINE> print(chunk) <NEW_LINE> if "\n" in chunk.decode(): <NEW_LINE> <INDENT> break; <NEW_LINE> <DEDENT> <DEDENT> return b''.join(chunks) | demonstration class only
- coded for clarity, not efficiency | 6259906776e4537e8c3f0d05 |
class Grid(object): <NEW_LINE> <INDENT> def __init__(self,zs,nplanes=100,cosmo=[0.25,0.75,0.73]): <NEW_LINE> <INDENT> distance = Distance() <NEW_LINE> self.zmax = zs*1.0 <NEW_LINE> self.zs = zs*1.0 <NEW_LINE> self.nplanes = nplanes <NEW_LINE> self.cosmo = cosmo <NEW_LINE> self.redshifts, self.dz = linspace(0.0,self.zmax,self.nplanes, endpoint=True,retstep=True) <NEW_LINE> self.redshifts += (self.dz/2.) <NEW_LINE> self.nz = len(self.redshifts) <NEW_LINE> self.Da_s = distance.Da(zs) <NEW_LINE> self.plane = {} <NEW_LINE> self.Da_p = zeros(self.nz) <NEW_LINE> self.rho_crit = zeros(self.nz) <NEW_LINE> self.Da_ps = zeros(self.nz) <NEW_LINE> self.Da_pl = zeros(self.nz) <NEW_LINE> self.sigma_crit = zeros(self.nz) <NEW_LINE> for i in range(self.nz): <NEW_LINE> <INDENT> z = self.redshifts[i] <NEW_LINE> self.Da_p[i] = distance.Da(0,z) <NEW_LINE> self.rho_crit[i] = distance.rho_crit_univ(z) <NEW_LINE> self.Da_ps[i] = distance.Da(z,zs) <NEW_LINE> zl = 0 <NEW_LINE> self.Da_pl[i] = distance.Da(z,zl) <NEW_LINE> self.sigma_crit[i] = (1.663*10**18)*(self.Da_s/(self.Da_p[i]*self.Da_ps[i])) <NEW_LINE> <DEDENT> <DEDENT> def snap(self, z): <NEW_LINE> <INDENT> snapped_p = digitize(z, self.redshifts - self.dz / 2.0) - 1 <NEW_LINE> snapped_p[snapped_p < 0] = 0 <NEW_LINE> snapped_z = self.redshifts[snapped_p] <NEW_LINE> return snapped_p | TODO | 625990673617ad0b5ee078d4 |
class ExpiredTest(django.test.TestCase): <NEW_LINE> <INDENT> fixtures = ['core.json'] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_expired(self): <NEW_LINE> <INDENT> table = [ ('530d166a-ace9-46d5-b817-9168ca5946b1', '495bb3be-e327-4840-accf-afefcd411e06', ['bf6893b6-7963-4b61-9149-5a0a67ecd78e'],), ('7d3fe736-5902-44d5-a34c-86f877190523', '495bb3be-e327-4840-accf-afefcd411e06', ['ae443952-7990-4cee-9913-41dfd0092dc1'],), ] <NEW_LINE> expired = productstatus.core.expired.get_expired_datainstances() <NEW_LINE> self.assertEqual(len(expired), 2) <NEW_LINE> for i, item in enumerate(expired): <NEW_LINE> <INDENT> product, servicebackend, instances = item <NEW_LINE> ids = [str(instance.id) for instance in instances] <NEW_LINE> self.assertEqual(str(product.id), table[i][0]) <NEW_LINE> self.assertEqual(str(servicebackend.id), table[i][1]) <NEW_LINE> self.assertEqual(ids, table[i][2]) | !
Tests for expired datainstance lookups. | 625990678a43f66fc4bf3913 |
class Reader(): <NEW_LINE> <INDENT> def __init__(self, lastz_file, long_format = False): <NEW_LINE> <INDENT> self.file = open(lastz_file, 'rU') <NEW_LINE> self.long_format = long_format <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> <DEDENT> def __iter__( self ): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> yield self.next() <NEW_LINE> <DEDENT> <DEDENT> def next(self): <NEW_LINE> <INDENT> lastz_result = self.file.readline() <NEW_LINE> if not lastz_result: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> if not self.long_format: <NEW_LINE> <INDENT> Lastz = namedtuple('Lastz', 'score,name1,strand1,zstart1,end1,length1,name2,'+ 'strand2,zstart2,end2,length2,diff,cigar,identity,percent_identity,'+ 'continuity,percent_continuity') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Lastz = namedtuple('Lastz', 'score,name1,strand1,zstart1,end1,length1,name2,'+ 'strand2,zstart2,end2,length2,diff,cigar,identity,percent_identity,'+ 'continuity,percent_continuity,coverage,percent_coverage') <NEW_LINE> <DEDENT> aligns = defaultdict(lambda: defaultdict(list)) <NEW_LINE> lastz_result_split = lastz_result.strip('\n').split('\t') <NEW_LINE> for k,v in enumerate(lastz_result_split): <NEW_LINE> <INDENT> if k in [3,4,5,8,9,10]: <NEW_LINE> <INDENT> lastz_result_split[k] = int(v) <NEW_LINE> <DEDENT> elif '%' in v: <NEW_LINE> <INDENT> lastz_result_split[k] = float(v.strip('%')) <NEW_LINE> <DEDENT> <DEDENT> lastz_result_split[1] = lastz_result_split[1].lstrip('>') <NEW_LINE> lastz_result_split[6] = lastz_result_split[6].lstrip('>') <NEW_LINE> return Lastz._make(lastz_result_split) | read a lastz file and return an iterator over that file | 62599067009cb60464d02cbc |
class Indents(BaseTestCase): <NEW_LINE> <INDENT> sample = '\n'.join([ 'line=1', ' line=2', '', 'line=4', ]) <NEW_LINE> sample_update = sample.replace('line=', 'line+=') <NEW_LINE> def test_compare(self): <NEW_LINE> <INDENT> self.assertEqual(indents.indent0, self.sample) <NEW_LINE> self.assertEqual(indents.indent4, self.sample) <NEW_LINE> self.check_updated_files() <NEW_LINE> <DEDENT> def test_update(self): <NEW_LINE> <INDENT> self.assertNotEqual(indents.indent0, self.sample_update) <NEW_LINE> self.assertNotEqual(indents.indent4, self.sample_update) <NEW_LINE> self.check_updated_files({indents: [('line=', 'line+=')]}) | Test indented lines in baselined text compare and updated correctly. | 625990677047854f46340b36 |
class EOSAbiProcessingError(Exception): <NEW_LINE> <INDENT> pass | Raised when the abi action cannot be processed | 625990671b99ca40022900f7 |
class XdrsKeystoneContext(wsgi.Middleware): <NEW_LINE> <INDENT> @webob.dec.wsgify(RequestClass=wsgi.Request) <NEW_LINE> def __call__(self, req): <NEW_LINE> <INDENT> user_id = req.headers.get('X_USER') <NEW_LINE> user_id = req.headers.get('X_USER_ID', user_id) <NEW_LINE> if user_id is None: <NEW_LINE> <INDENT> LOG.debug("Neither X_USER_ID nor X_USER found in request") <NEW_LINE> return webob.exc.HTTPUnauthorized() <NEW_LINE> <DEDENT> roles = self._get_roles(req) <NEW_LINE> if 'X_TENANT_ID' in req.headers: <NEW_LINE> <INDENT> project_id = req.headers['X_TENANT_ID'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> project_id = req.headers['X_TENANT'] <NEW_LINE> <DEDENT> project_name = req.headers.get('X_TENANT_NAME') <NEW_LINE> user_name = req.headers.get('X_USER_NAME') <NEW_LINE> auth_token = req.headers.get('X_AUTH_TOKEN', req.headers.get('X_STORAGE_TOKEN')) <NEW_LINE> remote_address = req.remote_addr <NEW_LINE> if CONF.use_forwarded_for: <NEW_LINE> <INDENT> remote_address = req.headers.get('X-Forwarded-For', remote_address) <NEW_LINE> <DEDENT> service_catalog = None <NEW_LINE> if req.headers.get('X_SERVICE_CATALOG') is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> catalog_header = req.headers.get('X_SERVICE_CATALOG') <NEW_LINE> service_catalog = jsonutils.loads(catalog_header) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise webob.exc.HTTPInternalServerError( _('Invalid service catalog json.')) <NEW_LINE> <DEDENT> <DEDENT> ctx = context.RequestContext(user_id, project_id, user_name=user_name, project_name=project_name, roles=roles, auth_token=auth_token, remote_address=remote_address, service_catalog=service_catalog) <NEW_LINE> req.environ['xdrs.context'] = ctx <NEW_LINE> return self.application <NEW_LINE> <DEDENT> def _get_roles(self, req): <NEW_LINE> <INDENT> if 'X_ROLES' in req.headers: <NEW_LINE> <INDENT> roles = req.headers.get('X_ROLES', '') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> roles = req.headers.get('X_ROLE', '') <NEW_LINE> if roles: <NEW_LINE> <INDENT> LOG.warn(_("Sourcing roles from deprecated X-Role HTTP " "header")) <NEW_LINE> <DEDENT> <DEDENT> return [r.strip() for r in roles.split(',')] | Make a request context from keystone headers.
从keystone头文件中形成request上下文信息; | 625990677d43ff2487427fd2 |
class Builder(object): <NEW_LINE> <INDENT> def __init__(self, resolver): <NEW_LINE> <INDENT> self.resolver = resolver <NEW_LINE> <DEDENT> def build(self, name): <NEW_LINE> <INDENT> if isinstance(name, six.string_types): <NEW_LINE> <INDENT> type = self.resolver.find(name) <NEW_LINE> if type is None: <NEW_LINE> <INDENT> raise TypeNotFound(name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> type = name <NEW_LINE> <DEDENT> cls = type.name <NEW_LINE> if type.mixed(): <NEW_LINE> <INDENT> data = Factory.property(cls) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = Factory.object(cls) <NEW_LINE> <DEDENT> resolved = type.resolve() <NEW_LINE> md = data.__metadata__ <NEW_LINE> md.sxtype = resolved <NEW_LINE> md.ordering = self.ordering(resolved) <NEW_LINE> history = [] <NEW_LINE> self.add_attributes(data, resolved) <NEW_LINE> for child, ancestry in type.children(): <NEW_LINE> <INDENT> if self.skip_child(child, ancestry): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.process(data, child, history[:]) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def process(self, data, type, history): <NEW_LINE> <INDENT> if type in history: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if type.enum(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> history.append(type) <NEW_LINE> resolved = type.resolve() <NEW_LINE> value = None <NEW_LINE> if type.unbounded(): <NEW_LINE> <INDENT> value = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(resolved) > 0: <NEW_LINE> <INDENT> if resolved.mixed(): <NEW_LINE> <INDENT> value = Factory.property(resolved.name) <NEW_LINE> md = value.__metadata__ <NEW_LINE> md.sxtype = resolved <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = Factory.object(resolved.name) <NEW_LINE> md = value.__metadata__ <NEW_LINE> md.sxtype = resolved <NEW_LINE> md.ordering = self.ordering(resolved) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> setattr(data, type.name, value) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> data = value <NEW_LINE> <DEDENT> if not isinstance(data, list): <NEW_LINE> <INDENT> self.add_attributes(data, resolved) <NEW_LINE> for child, ancestry in resolved.children(): <NEW_LINE> <INDENT> if self.skip_child(child, ancestry): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.process(data, child, history[:]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def add_attributes(self, data, type): <NEW_LINE> <INDENT> for attr, ancestry in type.attributes(): <NEW_LINE> <INDENT> name = '_%s' % attr.name <NEW_LINE> value = attr.get_default() <NEW_LINE> setattr(data, name, value) <NEW_LINE> <DEDENT> <DEDENT> def skip_child(self, child, ancestry): <NEW_LINE> <INDENT> if child.any(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for x in ancestry: <NEW_LINE> <INDENT> if x.choice(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def ordering(self, type): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for child, ancestry in type.resolve(): <NEW_LINE> <INDENT> name = child.name <NEW_LINE> if child.name is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if child.isattr(): <NEW_LINE> <INDENT> name = '_%s' % child.name <NEW_LINE> <DEDENT> result.append(name) <NEW_LINE> <DEDENT> return result | Builder used to construct an object for types defined in the schema | 6259906771ff763f4b5e8f29 |
class NodeLookup(object): <NEW_LINE> <INDENT> def __init__(self, label_lookup_path=None, uid_lookup_path=None): <NEW_LINE> <INDENT> if not label_lookup_path: <NEW_LINE> <INDENT> label_lookup_path = './imagenet/imagenet_2012_challenge_label_map_proto.pbtxt' <NEW_LINE> <DEDENT> if not uid_lookup_path: <NEW_LINE> <INDENT> uid_lookup_path = './imagenet/imagenet_synset_to_human_label_map.txt' <NEW_LINE> <DEDENT> self.node_lookup = self.load(label_lookup_path, uid_lookup_path) <NEW_LINE> <DEDENT> def load(self, label_lookup_path, uid_lookup_path): <NEW_LINE> <INDENT> if not tf.gfile.Exists(uid_lookup_path): <NEW_LINE> <INDENT> tf.logging.fatal('File does not exist %s', uid_lookup_path) <NEW_LINE> <DEDENT> if not tf.gfile.Exists(label_lookup_path): <NEW_LINE> <INDENT> tf.logging.fatal('File does not exist %s', label_lookup_path) <NEW_LINE> <DEDENT> proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines() <NEW_LINE> uid_to_human = {} <NEW_LINE> p = re.compile(r'[n\d]*[ \S,]*') <NEW_LINE> for line in proto_as_ascii_lines: <NEW_LINE> <INDENT> parsed_items = p.findall(line) <NEW_LINE> uid = parsed_items[0] <NEW_LINE> human_string = parsed_items[2] <NEW_LINE> uid_to_human[uid] = human_string <NEW_LINE> <DEDENT> node_id_to_uid = {} <NEW_LINE> proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines() <NEW_LINE> for line in proto_as_ascii: <NEW_LINE> <INDENT> if line.startswith(' target_class:'): <NEW_LINE> <INDENT> target_class = int(line.split(': ')[1]) <NEW_LINE> <DEDENT> if line.startswith(' target_class_string:'): <NEW_LINE> <INDENT> target_class_string = line.split(': ')[1] <NEW_LINE> node_id_to_uid[target_class] = target_class_string[1:-2] <NEW_LINE> <DEDENT> <DEDENT> node_id_to_name = {} <NEW_LINE> for key, val in node_id_to_uid.items(): <NEW_LINE> <INDENT> if val not in uid_to_human: <NEW_LINE> <INDENT> tf.logging.fatal('Failed to locate: %s', val) <NEW_LINE> <DEDENT> name = uid_to_human[val] <NEW_LINE> node_id_to_name[key] = name <NEW_LINE> <DEDENT> return node_id_to_name <NEW_LINE> <DEDENT> def id_to_string(self, node_id): <NEW_LINE> <INDENT> if node_id not in self.node_lookup: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> return self.node_lookup[node_id] | Converts integer node ID's to human readable labels. | 625990672ae34c7f260ac86b |
class ReferendumComments(APIView): <NEW_LINE> <INDENT> permission_classes = (permissions.IsAuthenticatedOrReadOnly,) <NEW_LINE> def get_referendum(self, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Referendum.objects.get(pk=pk) <NEW_LINE> <DEDENT> except Referendum.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> <DEDENT> def create_root_comment(self, referendum): <NEW_LINE> <INDENT> root_comment = Comment.add_root(content='referendum', user_id=referendum.user.id) <NEW_LINE> referendum.comment = root_comment <NEW_LINE> referendum.save() <NEW_LINE> return root_comment <NEW_LINE> <DEDENT> def update_information_from_tree(self, tree): <NEW_LINE> <INDENT> for element in tree: <NEW_LINE> <INDENT> user_id = element['data']['user'] <NEW_LINE> comment_id = element['id'] <NEW_LINE> user = User.objects.get(pk=user_id) <NEW_LINE> element['data']['user'] = user.get_data_dictionary() <NEW_LINE> if self.request.user.is_authenticated(): <NEW_LINE> <INDENT> current_user_id = self.request.user.id <NEW_LINE> try: <NEW_LINE> <INDENT> vote = CommentUserVote.objects.get(comment_id=comment_id, user_id=current_user_id) <NEW_LINE> vote_value = vote.value <NEW_LINE> <DEDENT> except CommentUserVote.DoesNotExist: <NEW_LINE> <INDENT> vote_value = 0 <NEW_LINE> <DEDENT> element['data']['current_user_vote'] = vote_value <NEW_LINE> <DEDENT> if 'children' in element: <NEW_LINE> <INDENT> self.update_information_from_tree(element['children']) <NEW_LINE> <DEDENT> <DEDENT> return tree <NEW_LINE> <DEDENT> def get(self, request, pk, format=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> referendum = self.get_referendum(pk) <NEW_LINE> <DEDENT> except Http404: <NEW_LINE> <INDENT> return Response({'message': 'Referendum not found'}, status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> if referendum.comment: <NEW_LINE> <INDENT> root_comment = referendum.comment <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> root_comment = self.create_root_comment(referendum) <NEW_LINE> referendum.comment = root_comment <NEW_LINE> referendum.save() <NEW_LINE> <DEDENT> data = Comment.dump_bulk(parent=root_comment) <NEW_LINE> data = self.update_information_from_tree(data) <NEW_LINE> return Response(data) <NEW_LINE> <DEDENT> def post(self, request, pk, format=None): <NEW_LINE> <INDENT> from ekratia.threads.serializers import CommentThreadSerializer <NEW_LINE> try: <NEW_LINE> <INDENT> referendum = self.get_referendum(pk) <NEW_LINE> <DEDENT> except Http404: <NEW_LINE> <INDENT> return Response({'message': 'Referendum not found'}, status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> serializer = CommentThreadSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> parent_id = serializer.data['parent'] <NEW_LINE> if(parent_id): <NEW_LINE> <INDENT> node = Comment.objects.get(pk=parent_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = referendum.comment <NEW_LINE> <DEDENT> node.add_child(content=serializer.data['content'], user_id=request.user.id) <NEW_LINE> notify_comment_node(request, node, 'referendum') <NEW_LINE> return Response(serializer.data, status=status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | List Comments of the referendum in a Tree | 62599067b7558d5895464af1 |
class PDFTemplateResponseMixin(TemplateResponseMixin): <NEW_LINE> <INDENT> pdf_filename = None <NEW_LINE> pdf_kwargs = None <NEW_LINE> def get_pdf_filename(self): <NEW_LINE> <INDENT> return self.pdf_filename <NEW_LINE> <DEDENT> def get_pdf_kwargs(self): <NEW_LINE> <INDENT> if self.pdf_kwargs is None: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> return copy.copy(self.pdf_kwargs) <NEW_LINE> <DEDENT> def get_pdf_response(self, context, **response_kwargs): <NEW_LINE> <INDENT> return render_to_pdf_response( request=self.request, template=self.get_template_names(), context=context, using=self.template_engine, filename=self.get_pdf_filename(), **self.get_pdf_kwargs() ) <NEW_LINE> <DEDENT> def render_to_response(self, context, **response_kwargs): <NEW_LINE> <INDENT> return self.get_pdf_response(context, **response_kwargs) | A mixin class that implements PDF rendering and Django response construction. | 625990674428ac0f6e659cb6 |
class CommandMarkdownGenerator(MarkdownGenerator): <NEW_LINE> <INDENT> def __init__(self, command): <NEW_LINE> <INDENT> self._command = command <NEW_LINE> command.LoadAllSubElements() <NEW_LINE> self._root_command = command._TopCLIElement() <NEW_LINE> self._subcommands = command.GetSubCommandHelps() <NEW_LINE> self._subgroups = command.GetSubGroupHelps() <NEW_LINE> super(CommandMarkdownGenerator, self).__init__( command.GetPath(), command.ReleaseTrack(), command.IsHidden()) <NEW_LINE> self._capsule = self._command.short_help <NEW_LINE> self._docstring = self._command.long_help <NEW_LINE> self._ExtractSectionsFromDocstring(self._docstring) <NEW_LINE> self._sections.update(getattr(self._command, 'detailed_help', {})) <NEW_LINE> self._subcommands = command.GetSubCommandHelps() <NEW_LINE> self._subgroups = command.GetSubGroupHelps() <NEW_LINE> <DEDENT> def _SetSectionHelp(self, name, lines): <NEW_LINE> <INDENT> while lines and not lines[0]: <NEW_LINE> <INDENT> lines = lines[1:] <NEW_LINE> <DEDENT> while lines and not lines[-1]: <NEW_LINE> <INDENT> lines = lines[:-1] <NEW_LINE> <DEDENT> if lines: <NEW_LINE> <INDENT> self._sections[name] = '\n'.join(lines) <NEW_LINE> <DEDENT> <DEDENT> def _ExtractSectionsFromDocstring(self, docstring): <NEW_LINE> <INDENT> name = 'DESCRIPTION' <NEW_LINE> lines = [] <NEW_LINE> for line in textwrap.dedent(docstring).strip().splitlines(): <NEW_LINE> <INDENT> if len(line) >= 4 and line.startswith('## '): <NEW_LINE> <INDENT> self._SetSectionHelp(name, lines) <NEW_LINE> name = line[3:] <NEW_LINE> lines = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lines.append(line) <NEW_LINE> <DEDENT> <DEDENT> self._SetSectionHelp(name, lines) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def FlagGroupSortKey(flags): <NEW_LINE> <INDENT> return [len(flags) > 1] + sorted([flag.option_strings for flag in flags]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def IsHidden(arg): <NEW_LINE> <INDENT> return arg.help == argparse.SUPPRESS <NEW_LINE> <DEDENT> def IsValidSubPath(self, sub_command_path): <NEW_LINE> <INDENT> return self._root_command.IsValidSubPath(sub_command_path) <NEW_LINE> <DEDENT> def GetPositionalArgs(self): <NEW_LINE> <INDENT> return self._command.ai.positional_args <NEW_LINE> <DEDENT> def GetFlagGroups(self): <NEW_LINE> <INDENT> has_global_flags = False <NEW_LINE> groups = {} <NEW_LINE> for flag in (self._command.ai.flag_args + self._command.ai.ancestor_flag_args): <NEW_LINE> <INDENT> if flag.is_global and not self._is_root: <NEW_LINE> <INDENT> has_global_flags = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> group_id = self._command.ai.mutex_groups.get( flag.dest, self._command.ai.argument_groups.get(flag.dest, flag.dest)) <NEW_LINE> if group_id not in groups: <NEW_LINE> <INDENT> groups[group_id] = [] <NEW_LINE> <DEDENT> groups[group_id].append(flag) <NEW_LINE> <DEDENT> <DEDENT> return groups, self._command.ai.group_attr, has_global_flags | Command help markdown document generator.
Attributes:
_command: The CommandCommon instance for command.
_root_command: The root CLI command instance.
_subcommands: The dict of subcommand help indexed by subcommand name.
_subgroups: The dict of subgroup help indexed by subcommand name. | 62599067460517430c432c17 |
class WinningEntriesView(ListView, JingoTemplateMixin): <NEW_LINE> <INDENT> template_name = 'challenges/winning.html' <NEW_LINE> context_object_name = 'entries' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(WinningEntriesView, self).get_context_data(**kwargs) <NEW_LINE> context.update(project=self.project, challenge=self.challenge) <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> self.project = get_object_or_404(Project, slug=self.kwargs['project']) <NEW_LINE> self.challenge = get_object_or_404(self.project.challenge_set, slug=self.kwargs['slug']) <NEW_LINE> submissions = (Submission.objects.visible(self.request.user) .filter(phase__challenge=self.challenge) .filter(is_winner=True)) <NEW_LINE> return submissions | Show entries that have been marked as winners. | 62599067baa26c4b54d50a2a |
class Stow(AutotoolsPackage, GNUMirrorPackage): <NEW_LINE> <INDENT> homepage = "https://www.gnu.org/software/stow/" <NEW_LINE> gnu_mirror_path = "stow/stow-2.2.2.tar.bz2" <NEW_LINE> version('2.3.1', sha256='26a6cfdfdaca0eea742db5487798c15fcd01889dc86bc5aa62614ec9415a422f') <NEW_LINE> version('2.2.2', sha256='a0022034960e47a8d23dffb822689f061f7a2d9101c9835cf11bf251597aa6fd') <NEW_LINE> version('2.2.0', sha256='86bc30fe1d322a5c80ff3bd7580c2758149aad7c3bbfa18b48a9d95c25d66b05') <NEW_LINE> version('2.1.3', sha256='2dff605c801fee9fb7d0fef6988bbb8a0511fad469129b20cae60e0544ba1443') <NEW_LINE> version('2.1.2', sha256='dda4231dab409d906c5de7f6a706a765e6532768ebbffe34e1823e3371f891f9') <NEW_LINE> version('2.1.1', sha256='8bdd21bb2ef6edf5812bf671e64cdd584d92d547d932406cef179646ea6d1998') <NEW_LINE> version('2.1.0', sha256='f0e909034fd072b1f5289abb771133d5c4e88d82d4da84195891c53d9b0de5ca') <NEW_LINE> depends_on('[email protected]:') | GNU Stow: a symlink farm manager
GNU Stow is a symlink farm manager which takes distinct
packages of software and/or data located in separate
directories on the filesystem, and makes them appear to be
installed in the same place. | 62599067cc0a2c111447c692 |
class IRCLineModel(BaseModel): <NEW_LINE> <INDENT> buffer = p.ForeignKeyField(IRCBufferModel, related_name='lines', on_delete='CASCADE') <NEW_LINE> timestamp = p.DateTimeField(default=datetime.datetime.utcnow) <NEW_LINE> user = p.ForeignKeyField(IRCUserModel, null=True, on_delete='CASCADE') <NEW_LINE> nick = p.TextField(null=True) <NEW_LINE> kind = p.CharField(max_length=20, default='message', choices=LINE_TYPES) <NEW_LINE> content = p.TextField() <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> d = shortcuts.model_to_dict(self, recurse=False) <NEW_LINE> d['timestamp'] = d['timestamp'].replace(tzinfo=datetime.timezone.utc).timestamp() <NEW_LINE> return d | Models anything that might be displayed in a buffer.
Typically this will be messages, notices, CTCP ACTIONs, joins, quits, mode changes, topic changes, etc. | 62599067435de62698e9d58e |
class Streams(Collection): <NEW_LINE> <INDENT> def __init__(self, profile): <NEW_LINE> <INDENT> super(Streams, self).__init__(profile) <NEW_LINE> self._meta_data['allowed_lazy_attributes'] = [Stream] <NEW_LINE> self._meta_data['attribute_registry'] = {'tm:ltm:profile:stream:streamstate': Stream} | BIG-IP® Stream profile collection. | 62599067a8370b77170f1b52 |
class LevelLogger(BaseLogger): <NEW_LINE> <INDENT> default_level = 0 <NEW_LINE> default_file = "normal.log" <NEW_LINE> def __init__(self, *, level=None, file=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.level = pick(level, self.default_level) <NEW_LINE> self.file = pick(file, self.default_file) <NEW_LINE> <DEDENT> @check_bypass <NEW_LINE> def logger(self, *output, file=None, level=None, display=None, write=None, sep=None, split=None, use_utc=None, ts_format=None, print_ts=None, encoding=None, errors=None, **kwargs): <NEW_LINE> <INDENT> sep = pick(sep, self.separator) <NEW_LINE> encoding = pick(encoding, self.encoding) <NEW_LINE> errors = pick(errors, self.errors) <NEW_LINE> split = self.bypassed.get("splitter", pick(split, self.split)) <NEW_LINE> display = self.bypassed.get("display", pick(display, self.display)) <NEW_LINE> write = self.bypassed.get("write", pick(write, self.write)) <NEW_LINE> timestamp = self._get_timestamp(use_utc, ts_format) <NEW_LINE> logall = self.bypassed.get("logall") <NEW_LINE> if display: <NEW_LINE> <INDENT> self._print(*output, sep=sep, use_utc=use_utc, split=split, ts_format=ts_format, print_ts=print_ts, errors=errors) <NEW_LINE> <DEDENT> if write: <NEW_LINE> <INDENT> output = sep.join(str(x) for x in output).splitlines() <NEW_LINE> alines = [x for x in self.logfiles if x in self.bypassers("all")[0]] <NEW_LINE> getter = [file] <NEW_LINE> if logall: <NEW_LINE> <INDENT> getter.append(logall) <NEW_LINE> <DEDENT> for log in getter: <NEW_LINE> <INDENT> if (log == logall and type not in alines) or log is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> atypes = "type.{0} - ".format(type) if log == logall else "" <NEW_LINE> with open(log, "a", encoding=encoding, errors=errors) as f: <NEW_LINE> <INDENT> for writer in output: <NEW_LINE> <INDENT> f.write(timestamp + atypes + writer + "\n") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def logger_new(self, *output, level=None, **kwargs): <NEW_LINE> <INDENT> level = self.bypassed.get("level", level) <NEW_LINE> if level is not None and level >= self.level: <NEW_LINE> <INDENT> super().logger(*output, **kwargs) | Implement levelled logging.
"level":
Number specifying the default level at which lines
will be logged.
Default: 0
Bypassers arguments:
"level":
Bypasser to override the "level" parameter given to
the logger method. The resulting value must be a
number or None. | 6259906791f36d47f2231a51 |
class convGroup(object): <NEW_LINE> <INDENT> def __init__(self,rng,input,filter_shapes,finalpoolsize=(2,2)): <NEW_LINE> <INDENT> self.sublayer0 = LeNetConvPoolLayer( rng, input=input, filter_shape=filter_shapes[0], poolsize=(1,1) ) <NEW_LINE> self.sublayer1 = LeNetConvPoolLayer( rng, input=self.sublayer0.output, filter_shape=filter_shapes[1], poolsize=finalpoolsize ) <NEW_LINE> self.params = self.sublayer0.params + self.sublayer1.params <NEW_LINE> self.output = self.sublayer1.output | Group of convolutional layers with a max pool at the end | 62599067fff4ab517ebcef9f |
class UpdatePwdView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def post(self,request): <NEW_LINE> <INDENT> modify_form = ResetForm(request.POST) <NEW_LINE> if modify_form.is_valid(): <NEW_LINE> <INDENT> pwd1 = request.POST.get('password1','') <NEW_LINE> pwd2 = request.POST.get('password2','') <NEW_LINE> if pwd1!=pwd2: <NEW_LINE> <INDENT> return HttpResponse('{"status":"fail","msg":"两次密码不一致"}', content_type='application/json') <NEW_LINE> <DEDENT> user = request.user <NEW_LINE> user.password = make_password(pwd1) <NEW_LINE> user.save() <NEW_LINE> return HttpResponse('{"status":"success","msg":"修改成功"}', content_type='application/json') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponse(json.dumps(modify_form.errors), content_type='application/json') | 个人中心修改密码 | 625990677c178a314d78e7ae |
class ActionItemProcessorBase(object): <NEW_LINE> <INDENT> @transaction.commit_on_success <NEW_LINE> def perform_action(self, action_item): <NEW_LINE> <INDENT> method_name = action_item.action.replace('-', '_').lower() <NEW_LINE> method_instance = getattr(self, method_name, None) <NEW_LINE> if callable(method_instance): <NEW_LINE> <INDENT> result = method_instance(action_item.target) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = False <NEW_LINE> <DEDENT> return result | Each ActionItem processor extends this class.
A Processor class defines methods named after the action they handle for
a given ActionItem instance (i.e. `ActionItem.action`.) The method name is
simply the lowercased ActionItem.action name with dashes replaced by
underscores. The method must return `True` for a successful completion of the
task and `False` otherwise.
For example, CampaignProcessor.generate_tickets is responsible for handling
the ActionItem whose category is 'campaign' and action is 'generate-tickets'. | 625990673539df3088ecda23 |
class User(Person): <NEW_LINE> <INDENT> def __init__(self, name="", language="", domain="", service="", duration=0): <NEW_LINE> <INDENT> super().__init__(name, language, domain) <NEW_LINE> self._service = service <NEW_LINE> self._duration = duration <NEW_LINE> <DEDENT> def setService(self, service): <NEW_LINE> <INDENT> self._service = service <NEW_LINE> <DEDENT> def getService(self): <NEW_LINE> <INDENT> return self._service <NEW_LINE> <DEDENT> def setDuration(self, duration): <NEW_LINE> <INDENT> self._duration = duration <NEW_LINE> <DEDENT> def getDuration(self): <NEW_LINE> <INDENT> return str(self._duration) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return super().__str__() + str((self.getService(), self.getDuration())) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return super().__eq__(other) and (self.getService().lower(), int(self.getDuration())) == (other.getService().lower(), int(other.getDuration())) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.getService() < other.getService() | User class is responsible for defining a single User that derives from Person | 62599067dd821e528d6da543 |
class Discriminator(nn.Module): <NEW_LINE> <INDENT> def __init__(self, image_shape): <NEW_LINE> <INDENT> super(Discriminator, self).__init__() <NEW_LINE> self.model = nn.Sequential( nn.Linear(int(np.prod(image_shape)), 512), nn.LeakyReLU(0.2, inplace=True), nn.Linear(512, 256), nn.LeakyReLU(0.2, inplace=True), nn.Linear(256, 1), nn.Sigmoid() ) <NEW_LINE> <DEDENT> def forward(self, img): <NEW_LINE> <INDENT> flat = img.view(img.size(0), -1) <NEW_LINE> validity = self.model(flat) <NEW_LINE> return validity | Discriminator network of the generative adversarial network. | 625990671f037a2d8b9e542d |
class TwilioDispatcher: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.client = TwilioRestClient( TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN ) <NEW_LINE> <DEDENT> def send_to_number(self, to_number, text): <NEW_LINE> <INDENT> number = number_repo.get_by_number(to_number) <NEW_LINE> if not number: <NEW_LINE> <INDENT> raise NotFoundError('Number {num} not found'.format(num=to_number)) <NEW_LINE> <DEDENT> sender = sender_repo.get_by_id(number.sender_id) <NEW_LINE> try: <NEW_LINE> <INDENT> message = self.client.messages.create( body=text, to=number.number, from_=sender.number ) <NEW_LINE> <DEDENT> except TwilioRestException as e: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> def send_to_subscription(self, message): <NEW_LINE> <INDENT> senders = sender_repo.get_all() <NEW_LINE> failed = {} <NEW_LINE> for sender in senders: <NEW_LINE> <INDENT> numbers = number_repo.get_many_by_kwargs( subscription_id=message.subscription_id, sender_id=sender.id ) <NEW_LINE> for number in numbers: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.send_to_number(number.number, message.text) <NEW_LINE> <DEDENT> except (TwilioRestException, NotFoundError) as e: <NEW_LINE> <INDENT> failed[number.number] = str(e) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return failed | Sends text messages using Twilio API. | 625990677047854f46340b38 |
class MemberBinaryOpExprNode(MemberOperatorExprNode): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(MemberBinaryOpExprNode, self).__init__() | Node class representing a binary operator expression | 625990674527f215b58eb565 |
class RouteFilterRuleListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[RouteFilterRule]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(RouteFilterRuleListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None) | Response for the ListRouteFilterRules API service call.
:param value: Gets a list of RouteFilterRules in a resource group.
:type value: list[~azure.mgmt.network.v2018_04_01.models.RouteFilterRule]
:param next_link: The URL to get the next set of results.
:type next_link: str | 625990675fdd1c0f98e5f70a |
class PropGroup: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.list_prop = [] <NEW_LINE> <DEDENT> def apply_props(self, painter): <NEW_LINE> <INDENT> pass | Абстрактный класс свойств | 62599067d486a94d0ba2d743 |
class DataReader(): <NEW_LINE> <INDENT> def __init__(self, filename, filetype=None, instrument=None): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.filetype = filetype <NEW_LINE> self.instrument = instrument <NEW_LINE> self.data = self.read(filename, filetype, instrument) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def read(self, filename, filetype, instrument): <NEW_LINE> <INDENT> if instrument == 'Element': <NEW_LINE> <INDENT> skipfooter = 4 <NEW_LINE> header = 1 <NEW_LINE> drop = 9 <NEW_LINE> <DEDENT> elif instrument == 'Agilent': <NEW_LINE> <INDENT> skipfooter = 4 <NEW_LINE> header = 3 <NEW_LINE> drop = 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> skipfooter = 0 <NEW_LINE> header = 0 <NEW_LINE> drop = 0 <NEW_LINE> <DEDENT> if filetype == 'xlsx': <NEW_LINE> <INDENT> imported = pd.ExcelFile(filename) <NEW_LINE> data = imported.parse( 0, index_col=0, skipfooter=skipfooter, header=header) <NEW_LINE> data = data.drop(data.index[:drop], axis=0) <NEW_LINE> <DEDENT> elif filetype == 'csv': <NEW_LINE> <INDENT> data = pd.read_csv(filename, sep=',', index_col=0, skipfooter=skipfooter, header=header, engine='python') <NEW_LINE> <DEDENT> elif filetype == 'asc': <NEW_LINE> <INDENT> data = pd.read_csv(filename, sep='\t', index_col=0, skipfooter=skipfooter, header=header, engine='python') <NEW_LINE> data = data.drop(data.index[:drop], axis=0) <NEW_LINE> data.dropna(axis=1, how='all', inplace=True) <NEW_LINE> data = data.apply(pd.to_numeric, errors='coerce') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warnings.warn('File type not supported.') <NEW_LINE> <DEDENT> return data | Reads data into pandas Dataframe from a file.
Parameters
----------
filename: str
Path to file to read.
filetype : str
Type of the file to read. If not specified, csv is used.
Possible options are csv, xlsx and asc.
instrument : str
Type of the instrument used for measurement. If not specified, raw data is expected.
Possible options are Agilent, and Element. | 62599067e5267d203ee6cf80 |
class ParserConfig(object): <NEW_LINE> <INDENT> def __init__(self, reference_resolver, duplicates, duplicate_of, tree, index, reverse_index, base_dir, code_url_prefix): <NEW_LINE> <INDENT> self.reference_resolver = reference_resolver <NEW_LINE> self.duplicates = duplicates <NEW_LINE> self.duplicate_of = duplicate_of <NEW_LINE> self.tree = tree <NEW_LINE> self.reverse_index = reverse_index <NEW_LINE> self.index = index <NEW_LINE> self.base_dir = base_dir <NEW_LINE> self.code_url_prefix = code_url_prefix <NEW_LINE> <DEDENT> def py_name_to_object(self, full_name): <NEW_LINE> <INDENT> return self.index[full_name] | Stores all indexes required to parse the docs. | 625990673317a56b869bf105 |
class TestCreateCloudJobResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCreateCloudJobResponse(self): <NEW_LINE> <INDENT> pass | CreateCloudJobResponse unit test stubs | 6259906799cbb53fe683266a |
class LocalNode(object): <NEW_LINE> <INDENT> implements(resources.INode) <NEW_LINE> def __init__(self, nodeName, slurmd, port, reactor): <NEW_LINE> <INDENT> self.nodeName = nodeName <NEW_LINE> self.port = port <NEW_LINE> self.hostname = 'localhost' <NEW_LINE> self.log = logging.Logger(__name__, system=self.nodeName) <NEW_LINE> self.slurmd = slurmd <NEW_LINE> self.reactor = reactor <NEW_LINE> self.started = defer.Deferred() <NEW_LINE> self.stopped = defer.Deferred() <NEW_LINE> self.process = LocalNode.SlurmdProtocol(self) <NEW_LINE> <DEDENT> def isRunning(self): <NEW_LINE> <INDENT> return self.process.status == LocalNode.SlurmdProtocol.STARTED <NEW_LINE> <DEDENT> def terminate(self): <NEW_LINE> <INDENT> if self.process.status != LocalNode.SlurmdProtocol.STARTED: <NEW_LINE> <INDENT> raise RuntimeError('Can only terminate a node in the RUNNING ' 'status') <NEW_LINE> <DEDENT> self.process.status = LocalNode.SlurmdProtocol.TERMINATING <NEW_LINE> self.process.transport.signalProcess('KILL') <NEW_LINE> return self.stopped <NEW_LINE> <DEDENT> def spawn(self): <NEW_LINE> <INDENT> if self.process.status != LocalNode.SlurmdProtocol.WAITING: <NEW_LINE> <INDENT> raise RuntimeError('Can only spawn a node in the WAITING status') <NEW_LINE> <DEDENT> self.log.debug('Spawning new slurmd process') <NEW_LINE> self.process.status = LocalNode.SlurmdProtocol.STARTED <NEW_LINE> formatArgs = { 'nodeName': self.nodeName, 'hostname': self.hostname, 'port': self.port, } <NEW_LINE> args = ['sh', '-c', self.slurmd.format(**formatArgs)] <NEW_LINE> self.reactor.spawnProcess(self.process, 'sh', args) <NEW_LINE> return self.started <NEW_LINE> <DEDENT> def getConfigEntry(self): <NEW_LINE> <INDENT> return 'NodeName={self.nodeName} NodeHostname={self.hostname} ' 'Port={self.port}'.format(self=self) <NEW_LINE> <DEDENT> def release(self): <NEW_LINE> <INDENT> if self.isRunning(): <NEW_LINE> <INDENT> return self.terminate() <NEW_LINE> <DEDENT> return defer.succeed(self) <NEW_LINE> <DEDENT> class SlurmdProtocol(protocol.ProcessProtocol): <NEW_LINE> <INDENT> WAITING, STARTED, TERMINATING, STOPPED = range(4) <NEW_LINE> def __init__(self, node): <NEW_LINE> <INDENT> self.node = node <NEW_LINE> self.status = LocalNode.SlurmdProtocol.WAITING <NEW_LINE> <DEDENT> def connectionMade(self): <NEW_LINE> <INDENT> self.transport.closeStdin() <NEW_LINE> self.node.log.info('New slurmd process started with PID {0}', self.transport.pid) <NEW_LINE> self.node.started.callback(self.node) <NEW_LINE> <DEDENT> def outReceived(self, data): <NEW_LINE> <INDENT> self.node.log.debug(data.rstrip()) <NEW_LINE> <DEDENT> def errReceived(self, data): <NEW_LINE> <INDENT> self.node.log.debug(data.rstrip()) <NEW_LINE> <DEDENT> def processEnded(self, reason): <NEW_LINE> <INDENT> if self.status == LocalNode.SlurmdProtocol.TERMINATING: <NEW_LINE> <INDENT> self.node.log.debug('Process exited normally ({0!r})', reason) <NEW_LINE> self.status = LocalNode.SlurmdProtocol.STOPPED <NEW_LINE> self.node.stopped.callback(self.node) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.node.log.warn('Process quit unexpectedly ({0!r})', reason) <NEW_LINE> self.status = LocalNode.SlurmdProtocol.STOPPED <NEW_LINE> self.node.stopped.errback(reason) | A class implementing the ``INode`` interface which runs a ``slurmd``
process locally using SLURM's multiple daemons support. | 62599067b7558d5895464af2 |
class EclipseIDETests(LargeFrameworkTests): <NEW_LINE> <INDENT> TIMEOUT_INSTALL_PROGRESS = 120 <NEW_LINE> TIMEOUT_START = 60 <NEW_LINE> TIMEOUT_STOP = 60 <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.installed_path = os.path.expanduser("~/tools/ide/eclipse") <NEW_LINE> self.desktop_filename = "eclipse.desktop" <NEW_LINE> <DEDENT> @property <NEW_LINE> def arch_option(self): <NEW_LINE> <INDENT> return platform.machine() <NEW_LINE> <DEDENT> def test_default_eclipse_ide_install(self): <NEW_LINE> <INDENT> self.child = pexpect.spawnu(self.command('{} ide eclipse'.format(UMAKE))) <NEW_LINE> self.expect_and_no_warn("Choose installation path: {}".format(self.installed_path)) <NEW_LINE> self.child.sendline("") <NEW_LINE> self.expect_and_no_warn("Installation done", timeout=self.TIMEOUT_INSTALL_PROGRESS) <NEW_LINE> self.wait_and_no_warn() <NEW_LINE> self.assertTrue(self.launcher_exists_and_is_pinned(self.desktop_filename)) <NEW_LINE> self.assert_exec_exists() <NEW_LINE> self.assert_icon_exists() <NEW_LINE> proc = subprocess.Popen(self.command_as_list(self.exec_path), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) <NEW_LINE> if self.arch_option == "x86_64": <NEW_LINE> <INDENT> self.check_and_kill_process(["java", self.arch_option, self.installed_path], wait_before=self.TIMEOUT_START, send_sigkill=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.check_and_kill_process([self.exec_path], wait_before=self.TIMEOUT_START, send_sigkill=True) <NEW_LINE> <DEDENT> proc.wait(self.TIMEOUT_STOP) <NEW_LINE> self.child = pexpect.spawnu(self.command('{} ide eclipse'.format(UMAKE))) <NEW_LINE> self.expect_and_no_warn("Eclipse is already installed.*\[.*\] ") <NEW_LINE> self.child.sendline() <NEW_LINE> self.wait_and_no_warn() | The Eclipse distribution from the IDE collection. | 62599067d486a94d0ba2d744 |
class PerformanceTest(object): <NEW_LINE> <INDENT> def __init__(self, model, data, split_data=False): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.model = model <NEW_LINE> if split_data: <NEW_LINE> <INDENT> self.train_set, self.test_set = model.split_data(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.train_set, self.test_set = data, None <NEW_LINE> <DEDENT> <DEDENT> def run(self, filter_results_fun=None): <NEW_LINE> <INDENT> self.model.train(self.train_set) <NEW_LINE> return self <NEW_LINE> <DEDENT> def get_results(self, filter_results_fun=None): <NEW_LINE> <INDENT> if self.test_set is not None: <NEW_LINE> <INDENT> return get_test_result(self.test_set, self.model, filter_fun=filter_results_fun) <NEW_LINE> <DEDENT> return get_train_result(self.train_set, self.model, filter_fun=filter_results_fun) | Represents model performance test.
:param model: Insance of the model to test.
:type model: :class:`models.Model`
:param data: Data to use for the test.
:type data: :class:`pandas.DataFrame`
:param split_data: Whether to split the data between
test set and train set. Default is :obj:`False`.
:type split_data: callable | 625990678da39b475be04970 |
class AttributeIdError(NMLException): <NEW_LINE> <INDENT> pass | Attribute `identifier` must be a persistent globally unique URI. | 625990674e4d562566373b8d |
class ConnectionError(TransportError): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return 'ConnectionError(%s) caused by: %s(%s)' % ( self.error, self.info.__class__.__name__, self.info) | Connection error.
Error raised when there was an exception while talking to
ES. Original exception from the underlying Connection
implementation is available as .info. | 62599067009cb60464d02cbf |
class AuthBackend(object): <NEW_LINE> <INDENT> hasher = bcrypt <NEW_LINE> def get_user(self, username, password): <NEW_LINE> <INDENT> record = self.read_user(username) <NEW_LINE> if not record: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not self.hasher.verify(password, record[self.password_field]): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> del record[self.password_field] <NEW_LINE> return record <NEW_LINE> <DEDENT> def create_user(self, username, password, data={}): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def read_user(self, username): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def update_user(self, user, data): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def delete_user(self, user): <NEW_LINE> <INDENT> raise NotImplemented() | Basic auth backend.
Use one of its subclasses or make your own. | 625990670a50d4780f706983 |
class AccountUpdate(TagUpdate): <NEW_LINE> <INDENT> _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'location': {'key': 'location', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, tags: Optional[Dict[str, str]] = None, identity: Optional["ManagedServiceIdentity"] = None, location: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(AccountUpdate, self).__init__(tags=tags, **kwargs) <NEW_LINE> self.identity = identity <NEW_LINE> self.location = location | Request payload used to update and existing Accounts.
:param tags: A set of tags. List of key value pairs that describe the resource. This will
overwrite the existing tags.
:type tags: dict[str, str]
:param identity: The type of identity used for the resource.
:type identity: ~device_update.models.ManagedServiceIdentity
:param location: The geo-location where the resource lives.
:type location: str | 62599067d7e4931a7ef3d749 |
class CSVExtract(Node): <NEW_LINE> <INDENT> def run( self, f, compression=None, open_flags="r", chunksize=None, nrows=None, reader=csv.DictReader, **kwargs ): <NEW_LINE> <INDENT> if "b" in open_flags: <NEW_LINE> <INDENT> raise Exception("Can not use binary open mode with CSVExtract") <NEW_LINE> <DEDENT> f, _, close = open_filepath_or_buffer( f, open_flags=open_flags, compression=compression ) <NEW_LINE> try: <NEW_LINE> <INDENT> reader = reader(f, **kwargs) <NEW_LINE> if chunksize: <NEW_LINE> <INDENT> for chunk in read_chunks(reader, chunksize, limit=nrows): <NEW_LINE> <INDENT> self.push(chunk) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> rows = [] <NEW_LINE> for i, row in enumerate(reader): <NEW_LINE> <INDENT> if nrows and i >= nrows: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> rows.append(row) <NEW_LINE> <DEDENT> self.push(rows) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if close: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f.close() <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass | Extract data from a CSV | 625990677b180e01f3e49c27 |
class _FunctionLikeDelimitedShorthand(basic.DelimitedShorthand, metaclass=AutoEscapingCacher): <NEW_LINE> <INDENT> separator = '|' <NEW_LINE> assignment_operator = '=' <NEW_LINE> _python_identifier = r'[^\d\W]\w*' <NEW_LINE> @classmethod <NEW_LINE> def swallow(cls, function): <NEW_LINE> <INDENT> return cls(function) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def register(cls, function): <NEW_LINE> <INDENT> cls.swallow(function) <NEW_LINE> return function | Base class for further conveniences. | 625990677b25080760ed88a5 |
class SerializedStore(Store): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def _get(self, key: bytes) -> Optional[bytes]: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _set(self, key: bytes, value: Optional[bytes]) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _del(self, key: bytes) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _iterkeys(self) -> Iterator[bytes]: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _itervalues(self) -> Iterator[bytes]: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _iteritems(self) -> Iterator[Tuple[bytes, bytes]]: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _size(self) -> int: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _contains(self, key: bytes) -> bool: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _clear(self) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> def apply_changelog_batch(self, batch: Iterable[EventT], to_key: Callable[[Any], Any], to_value: Callable[[Any], Any]) -> None: <NEW_LINE> <INDENT> for event in batch: <NEW_LINE> <INDENT> key = event.message.key <NEW_LINE> if key is None: <NEW_LINE> <INDENT> raise TypeError( f'Changelog entry is missing key: {event.message}') <NEW_LINE> <DEDENT> value = event.message.value <NEW_LINE> if value is None: <NEW_LINE> <INDENT> self._del(key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._set(key, value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getitem__(self, key: Any) -> Any: <NEW_LINE> <INDENT> value = self._get(self._encode_key(key)) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> return self._decode_value(value) <NEW_LINE> <DEDENT> def __setitem__(self, key: Any, value: Any) -> None: <NEW_LINE> <INDENT> return self._set(self._encode_key(key), self._encode_value(value)) <NEW_LINE> <DEDENT> def __delitem__(self, key: Any) -> None: <NEW_LINE> <INDENT> return self._del(self._encode_key(key)) <NEW_LINE> <DEDENT> def __iter__(self) -> Iterator: <NEW_LINE> <INDENT> yield from self._keys_decoded() <NEW_LINE> <DEDENT> def __len__(self) -> int: <NEW_LINE> <INDENT> return self._size() <NEW_LINE> <DEDENT> def __contains__(self, key: Any) -> bool: <NEW_LINE> <INDENT> return self._contains(self._encode_key(key)) <NEW_LINE> <DEDENT> def keys(self) -> KeysView: <NEW_LINE> <INDENT> return _SerializedStoreKeysView(self) <NEW_LINE> <DEDENT> def _keys_decoded(self) -> Iterator: <NEW_LINE> <INDENT> for key in self._iterkeys(): <NEW_LINE> <INDENT> yield self._decode_key(key) <NEW_LINE> <DEDENT> <DEDENT> def values(self) -> ValuesView: <NEW_LINE> <INDENT> return _SerializedStoreValuesView(self) <NEW_LINE> <DEDENT> def _values_decoded(self) -> Iterator: <NEW_LINE> <INDENT> for value in self._itervalues(): <NEW_LINE> <INDENT> yield self._decode_value(value) <NEW_LINE> <DEDENT> <DEDENT> def items(self) -> ItemsView: <NEW_LINE> <INDENT> return _SerializedStoreItemsView(self) <NEW_LINE> <DEDENT> def _items_decoded(self) -> Iterator[Tuple[Any, Any]]: <NEW_LINE> <INDENT> for key, value in self._iteritems(): <NEW_LINE> <INDENT> yield self._decode_key(key), self._decode_value(value) <NEW_LINE> <DEDENT> <DEDENT> def clear(self) -> None: <NEW_LINE> <INDENT> self._clear() | Base class for table storage drivers requiring serialization. | 625990674e4d562566373b8e |
class Constraint: <NEW_LINE> <INDENT> type_var = 0 <NEW_LINE> op = 0 <NEW_LINE> target = Undefined(Type) <NEW_LINE> def __repr__(self) -> str: <NEW_LINE> <INDENT> op_str = '<:' <NEW_LINE> if self.op == SUPERTYPE_OF: <NEW_LINE> <INDENT> op_str = ':>' <NEW_LINE> <DEDENT> return '{} {} {}'.format(self.type_var, op_str, self.target) <NEW_LINE> <DEDENT> def __init__(self, type_var: int, op: int, target: Type) -> None: <NEW_LINE> <INDENT> self.type_var = type_var <NEW_LINE> self.op = op <NEW_LINE> self.target = target | A representation of a type constraint.
It can be either T <: type or T :> type (T is a type variable). | 625990677cff6e4e811b71cf |
class CommentViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Comment.objects.all() <NEW_LINE> serializer_class = CommentSerializer <NEW_LINE> lookup_field = 'comment_id' <NEW_LINE> permission_classes = [permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly] <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(owner=self.request.user) | This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions. | 62599067d486a94d0ba2d746 |
class VitaReader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._streams = {} <NEW_LINE> <DEDENT> def fromstream(self, stream_file): <NEW_LINE> <INDENT> for stream_id, vita_packet_data in _packetize_vita_stream(stream_file): <NEW_LINE> <INDENT> if stream_id not in self._streams: <NEW_LINE> <INDENT> self._streams[stream_id] = StringIO() <NEW_LINE> <DEDENT> self._streams[stream_id].write(vita_packet_data) <NEW_LINE> <DEDENT> <DEDENT> def asarrays(self): <NEW_LINE> <INDENT> return { hex(stream_id): np.fromstring(sio.getvalue(), dtype="<i2") for stream_id, sio in self._streams.iteritems() } | A utility class that reads from a file-like object into one
or more NumPy arrays containing samples. | 62599067f548e778e596cd12 |
class CgInstance: <NEW_LINE> <INDENT> def __init__(self, mod, iname, cname): <NEW_LINE> <INDENT> self.mod = mod <NEW_LINE> self.iname = iname <NEW_LINE> self.cname = cname <NEW_LINE> self.wireports = [] <NEW_LINE> self.genmap = dict() <NEW_LINE> <DEDENT> def add_port_to(self, pname, eqval): <NEW_LINE> <INDENT> at = Attrib(names.give('ow_{0}_{1}'.format(self.iname, pname)), None) <NEW_LINE> if eqval: <NEW_LINE> <INDENT> at.set_eqdef(eqval) <NEW_LINE> <DEDENT> self.mod.add_eqs([at]) <NEW_LINE> at.get_core().mark_as_necessary() <NEW_LINE> wp = ModWirePort(ModWirePort.IN, pname, NId(at.get_core())) <NEW_LINE> self.wireports.append(wp) <NEW_LINE> return at.get_core() <NEW_LINE> <DEDENT> def add_port_from(self, pname, dtype): <NEW_LINE> <INDENT> at = Attrib(names.give('iw_{0}_{1}'.format(self.iname, pname)), dtype) <NEW_LINE> at.get_core().mark_as_necessary() <NEW_LINE> at.get_core().set_allow_subst(False) <NEW_LINE> wp = ModWirePort(ModWirePort.OUT, pname, NId(at.get_core())) <NEW_LINE> self.wireports.append(wp) <NEW_LINE> return at.get_core() <NEW_LINE> <DEDENT> def add_genmap(self, gm): <NEW_LINE> <INDENT> self.genmap.update(gm) <NEW_LINE> <DEDENT> def vhdl_print_instantiation(self): <NEW_LINE> <INDENT> sl = ['{0} : {1}'.format(self.iname, self.cname)] <NEW_LINE> if len(self.genmap) > 0: <NEW_LINE> <INDENT> sl.append('generic map (') <NEW_LINE> j = 0 <NEW_LINE> for k in self.genmap.keys(): <NEW_LINE> <INDENT> j += 1 <NEW_LINE> if j < len(self.genmap): <NEW_LINE> <INDENT> comma = ',' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> comma = '' <NEW_LINE> <DEDENT> sl.append((' '*4)+'{0} => {1}{2}'.format(k, self.genmap[k], comma)) <NEW_LINE> <DEDENT> sl.append(')') <NEW_LINE> <DEDENT> sl.append('port map (') <NEW_LINE> for wp in self.wireports: <NEW_LINE> <INDENT> sl.extend(wp.vhdl_print_instance_port()) <NEW_LINE> <DEDENT> sl.append((' '*4)+'clk => clk, rst => rst') <NEW_LINE> sl.append(');') <NEW_LINE> sl.append('') <NEW_LINE> return sl | Codegen: component instance | 625990673cc13d1c6d466ecc |
class NeedTransferRet: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.I64, 'amt', None, None, ), (2, TType.I64, 'count', None, None, ), ) <NEW_LINE> def __init__(self, amt=None, count=None,): <NEW_LINE> <INDENT> self.amt = amt <NEW_LINE> self.count = count <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I64: <NEW_LINE> <INDENT> self.amt = iprot.readI64(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I64: <NEW_LINE> <INDENT> self.count = iprot.readI64(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('NeedTransferRet') <NEW_LINE> if self.amt is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('amt', TType.I64, 1) <NEW_LINE> oprot.writeI64(self.amt) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.count is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('count', TType.I64, 2) <NEW_LINE> oprot.writeI64(self.count) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.amt is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field amt is unset!') <NEW_LINE> <DEDENT> if self.count is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field count is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- amt
- count | 62599067f7d966606f74947e |
class TxSegWit(namedtuple("Tx", "version marker flag inputs outputs " "witness locktime")): <NEW_LINE> <INDENT> pass | Class representing a SegWit transaction. | 625990674a966d76dd5f067c |
class ValidateHandler(BaseHandler): <NEW_LINE> <INDENT> name = "validator" <NEW_LINE> kwargs = { "GET": { "url": {"type": str, "location": "query", "required": True} }, "POST": { "url": {"type": str, "location": "form"} } } <NEW_LINE> async def get(self): <NEW_LINE> <INDENT> if self.request.body: <NEW_LINE> <INDENT> raise BadRequest(details="GET takes no request body.") <NEW_LINE> <DEDENT> raw = await self.download(self.args.url) <NEW_LINE> self.validate(raw) <NEW_LINE> <DEDENT> async def post(self): <NEW_LINE> <INDENT> if self.args.url: <NEW_LINE> <INDENT> raw = await self.download(self.args.url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raw = self.request.body <NEW_LINE> <DEDENT> self.validate(raw) <NEW_LINE> <DEDENT> async def download(self, url): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> file = await download_async(url) <NEW_LINE> <DEDENT> except DownloadError as err: <NEW_LINE> <INDENT> raise BadRequest(details=str(err)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return file.raw <NEW_LINE> <DEDENT> <DEDENT> def validate(self, raw): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> smartapi = SmartAPI(SmartAPI.VALIDATION_ONLY) <NEW_LINE> smartapi.raw = raw <NEW_LINE> smartapi.validate() <NEW_LINE> <DEDENT> except (ControllerError, AssertionError) as err: <NEW_LINE> <INDENT> raise BadRequest(details=str(err)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.finish({ 'success': True, 'details': f'valid SmartAPI ({smartapi.version}) metadata.' }) | Validate a Swagger/OpenAPI document.
Support three types of requests.
GET /api/validate?url=<url>
POST /api/validate
url=<url>
POST /api/validate
{
"openapi": "3.0.0",
...
} | 62599067462c4b4f79dbd18e |
class Projectile(pg.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.vel = (5, 0) <NEW_LINE> self.rect = pg.Rect(x, y, 40, 10) <NEW_LINE> <DEDENT> def update(self, game): <NEW_LINE> <INDENT> self.rect.move_ip(self.vel[0], self.vel[1]) <NEW_LINE> if self.rect.left > SCREEN_RES['x']: <NEW_LINE> <INDENT> game.score += 1 <NEW_LINE> self.kill() <NEW_LINE> <DEDENT> pg.draw.rect(game.screen, BLACK, self.rect) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def make_projectiles(cls, rate): <NEW_LINE> <INDENT> n = 1 <NEW_LINE> while True: <NEW_LINE> <INDENT> if not n % rate: <NEW_LINE> <INDENT> yield cls(0, random.randint( int(SCREEN_RES['y'] * 2 / 3), SCREEN_RES['y'] - 10)) <NEW_LINE> <DEDENT> n += 1 <NEW_LINE> yield | Models a flying projectile in the game | 62599067796e427e5384feff |
class ActorSystemMessage(object): <NEW_LINE> <INDENT> pass | Base class for all ActorSystem Messages for easier isinstance
identification. | 625990677c178a314d78e7b0 |
class ConnectionIntersection(Intersection): <NEW_LINE> <INDENT> HEARTBEAT_TIMEOUT = 30 <NEW_LINE> send_control_messages = False <NEW_LINE> def __init__(self, bind_address='tcp://0.0.0.0:7007', **kwargs): <NEW_LINE> <INDENT> super(ConnectionIntersection, self).__init__(**kwargs) <NEW_LINE> self.process_statistics = {} <NEW_LINE> self.ramp_socks = {} <NEW_LINE> self.process_address_to_uuid = {} <NEW_LINE> self.queue_processes = {} <NEW_LINE> self.process_id_to_name = {} <NEW_LINE> self.bind_address = bind_address <NEW_LINE> <DEDENT> def process(self, message): <NEW_LINE> <INDENT> connection_updates = message.content <NEW_LINE> self.process_id_to_name[connection_updates['meta']['id']] = connection_updates['meta']['name'] <NEW_LINE> for queue, consumers in connection_updates['streams'].items(): <NEW_LINE> <INDENT> if queue not in self.queue_processes: <NEW_LINE> <INDENT> self.queue_processes[queue] = { 'streams': [], 'stream_heartbeats': {}, 'grouping': None } <NEW_LINE> <DEDENT> for consumer in consumers: <NEW_LINE> <INDENT> if consumer not in self.queue_processes[queue]['streams']: <NEW_LINE> <INDENT> self.process_address_to_uuid[consumer] = connection_updates['meta']['id'] <NEW_LINE> self.queue_processes[queue]['streams'].append(consumer) <NEW_LINE> self.queue_processes[queue]['grouping'] = connection_updates['meta']['grouping'] <NEW_LINE> <DEDENT> self.queue_processes[queue]['stream_heartbeats'][consumer] = { 'heartbeat': current_heartbeat(), 'process_name': connection_updates['meta']['name'], 'process_id': connection_updates['meta']['id'] } <NEW_LINE> logger.debug("Received heartbeat from %s on queue %s - current value %s" % (consumer, queue, self.queue_processes[queue]['stream_heartbeats'][consumer])) <NEW_LINE> <DEDENT> <DEDENT> yield <NEW_LINE> <DEDENT> def connection_thread(self, context=None, **kwargs): <NEW_LINE> <INDENT> while not self.receive_port: <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> <DEDENT> broadcast_connection_sock = context.socket(zmq.PUB) <NEW_LINE> broadcast_connection_sock.bind(self.bind_address) <NEW_LINE> set_timeouts_on_socket(broadcast_connection_sock) <NEW_LINE> self.queue_processes['_update_connections'] = { 'streams': ['tcp://%s:%s' % (get_ip(), self.receive_port)], 'grouping': None, 'stream_heartbeats': {} } <NEW_LINE> while True: <NEW_LINE> <INDENT> for queue, consumers in self.queue_processes.items(): <NEW_LINE> <INDENT> for consumer, heartbeat_info in consumers['stream_heartbeats'].items(): <NEW_LINE> <INDENT> if current_heartbeat() > (heartbeat_info['heartbeat'] + self.HEARTBEAT_TIMEOUT): <NEW_LINE> <INDENT> logger.warn("Removing %s from %s due to missing heartbeat" % (consumer, queue)) <NEW_LINE> self.queue_processes[queue]['streams'].remove(consumer) <NEW_LINE> self.queue_processes[queue]['stream_heartbeats'].pop(consumer) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> broadcast_connection_sock.send_json(self.queue_processes) <NEW_LINE> logger.debug("Announced %s", self.queue_processes) <NEW_LINE> time.sleep(5) | Responsible to receiving and publishing information about the different message endpoints. Every ramp/intersection
will subscribe to this information and use it for routing the messages correctly.
While implemented with the intersection interface, it is currently only possibly to run this as a single instance
as it has internal state which is not shared.
It should be possible to extend this class and override it to store state in external systems such as Consul,
Zookeper or something else. | 62599067fff4ab517ebcefa4 |
class testReCaptcha(unittest.TestCase): <NEW_LINE> <INDENT> layer = INTEGRATION_RECAPTCHA_TESTING <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.portal = self.layer['portal'] <NEW_LINE> setRoles(self.portal, TEST_USER_ID, ['Manager']) <NEW_LINE> self.portal.invokeFactory('Survey', 's1') <NEW_LINE> self.s1 = getattr(self.portal, 's1') <NEW_LINE> self.s1.invokeFactory('Survey Text Question', 'stq1') <NEW_LINE> self.s1.setShowCaptcha(True) <NEW_LINE> recapcha_settings = getRecaptchaSettings() <NEW_LINE> recapcha_settings.public_key = 'foo' <NEW_LINE> recapcha_settings.private_key = 'bar' <NEW_LINE> self.layer['request'].set('ACTUAL_URL', self.s1.absolute_url()) <NEW_LINE> <DEDENT> def testIncludeReCaptcha(self): <NEW_LINE> <INDENT> result = self.s1.survey_view(REQUEST=Request()) <NEW_LINE> assert '<label for="recaptcha_response_field">Protection from spam</label>' in result <NEW_LINE> <DEDENT> def testValidationReCaptcha(self): <NEW_LINE> <INDENT> self.layer['request'].form['stq1'] = 'test' <NEW_LINE> dummy_controller_state = ControllerState( id='survey_view', context=self.s1, button='submit', status='success', errors={}, next_action=None, ) <NEW_LINE> controller = self.portal.portal_form_controller <NEW_LINE> controller_state = controller.validate( dummy_controller_state, self.layer['request'], ['validate_survey', ] ) <NEW_LINE> assert controller_state.getErrors() == {}, "Validation error raised: %s" % controller_state.getErrors() | Ensure captcha works correctly | 625990678e71fb1e983bd24e |
class CmdBlindHelp(Command): <NEW_LINE> <INDENT> key = "help" <NEW_LINE> aliases = "h" <NEW_LINE> locks = "cmd:all()" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> self.caller.msg("You are beyond help ... until you can see again.") | Help function while in the blinded state
Usage:
help | 62599067dd821e528d6da545 |
class TripsStatisticsView(APIView): <NEW_LINE> <INDENT> model = Trip <NEW_LINE> def get(self, request, **kwargs): <NEW_LINE> <INDENT> country_list = Country.objects.exclude(schema_name='public').all() <NEW_LINE> results = [] <NEW_LINE> for country in country_list: <NEW_LINE> <INDENT> connection.set_tenant(country) <NEW_LINE> country_planned_count = Trip.objects.filter( status=Trip.PLANNED ).count() <NEW_LINE> country_completed_count = Trip.objects.filter( status=Trip.COMPLETED ).count() <NEW_LINE> country_approved_count = Trip.objects.filter( status=Trip.APPROVED ).count() <NEW_LINE> country_all_count = Trip.objects.count() <NEW_LINE> section_list = Section.objects.values_list('name', flat=True) <NEW_LINE> section_results = [] <NEW_LINE> for section in section_list: <NEW_LINE> <INDENT> section_completed_count = Trip.objects.filter( section__name=section, status=Trip.COMPLETED ).count() <NEW_LINE> section_approved_count = Trip.objects.filter( section__name=section, status=Trip.APPROVED ).count() <NEW_LINE> section_planned_count = Trip.objects.filter( section__name=section, status=Trip.PLANNED ).count() <NEW_LINE> section_total_count = Trip.objects.filter( section__name=section ).count() <NEW_LINE> section_results.append({ "name": section, "completed": section_completed_count, "approved": section_approved_count, "planned": section_planned_count, "total": section_total_count }) <NEW_LINE> <DEDENT> trips_by_country = { 'planned': country_planned_count, 'completed': country_completed_count, 'approved': country_approved_count, 'total': country_all_count, } <NEW_LINE> results.append({ 'countryName': country.name, 'totals': trips_by_country, 'sections': section_results }) <NEW_LINE> <DEDENT> return Response(results) | Get the list of all Trips in all countries | 62599067009cb60464d02cc2 |
class Body14(object): <NEW_LINE> <INDENT> swagger_types = { 'description': 'str', 'name': 'str' } <NEW_LINE> attribute_map = { 'description': 'description', 'name': 'name' } <NEW_LINE> def __init__(self, description=None, name=None): <NEW_LINE> <INDENT> self._description = None <NEW_LINE> self._name = None <NEW_LINE> self.discriminator = None <NEW_LINE> if description is not None: <NEW_LINE> <INDENT> self.description = description <NEW_LINE> <DEDENT> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> @description.setter <NEW_LINE> def description(self, description): <NEW_LINE> <INDENT> self._description = description <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Body14): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906797e22403b383c696 |
class DistributedOperationStore(dict): <NEW_LINE> <INDENT> def __init__(self, manager): <NEW_LINE> <INDENT> self._manager = manager <NEW_LINE> <DEDENT> def store(self, *args, **kwargs): <NEW_LINE> <INDENT> operation_id = str(uuid.uuid4()) <NEW_LINE> self[operation_id] = kwargs <NEW_LINE> kwargs['submit_moment'] = time.time() <NEW_LINE> state = constants.OperationState.init.value <NEW_LINE> kwargs['state'] = state <NEW_LINE> kwargs['args'] = args <NEW_LINE> helpers.publish(self, constants.OPERATION_NEWS_TOPIC, operation_id=operation_id, **kwargs) <NEW_LINE> return operation_id <NEW_LINE> <DEDENT> def update(self, operation_id=None, publish=True, **info): <NEW_LINE> <INDENT> if not operation_id and 'operation_id' not in info: <NEW_LINE> <INDENT> raise Exception("Operation id is not provided, can't update the store.") <NEW_LINE> <DEDENT> if operation_id not in self: <NEW_LINE> <INDENT> self[operation_id] = info <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[operation_id].update(info) <NEW_LINE> <DEDENT> if 'state' in self[operation_id] and 'submit_moment' in self[operation_id]: <NEW_LINE> <INDENT> if self[operation_id]['state'] == constants.OperationState.done.value: <NEW_LINE> <INDENT> self[operation_id]['duration'] = time.time() - self[operation_id]['submit_moment'] <NEW_LINE> <DEDENT> <DEDENT> if publish: <NEW_LINE> <INDENT> info['operation_id'] = operation_id <NEW_LINE> helpers.publish(self, constants.OPERATION_NEWS_TOPIC, **info) | Responsible for generating operation request ids and notify peers about
operation updates and keep requested operation state details. | 6259906701c39578d7f142f9 |
class LDAModel(LinearRegression): <NEW_LINE> <INDENT> def _pre_processing_x(self, X): <NEW_LINE> <INDENT> X = self.standardize(X) <NEW_LINE> return X <NEW_LINE> <DEDENT> def train(self): <NEW_LINE> <INDENT> X = self.train_x <NEW_LINE> y = self.train_y <NEW_LINE> K = self.n_class <NEW_LINE> p = self.p <NEW_LINE> self.Mu = np.zeros((K, p)) <NEW_LINE> self.Pi = np.zeros((K, 1)) <NEW_LINE> self.Sigma_hat = np.zeros((p, p)) <NEW_LINE> for k in range(K): <NEW_LINE> <INDENT> mask = (y == k+1) <NEW_LINE> N_k = sum(mask) <NEW_LINE> X_k = X[mask.flatten(), :] <NEW_LINE> self.Pi[k] = N_k / self.N <NEW_LINE> self.Mu[k] = np.sum(X_k, axis=0).reshape((1, -1)) / N_k <NEW_LINE> self.Sigma_hat = self.Sigma_hat + ((X_k - self.Mu[k]).T @ (X_k - self.Mu[k])) / (self.N - K) <NEW_LINE> <DEDENT> <DEDENT> def linear_discriminant_func(self, x, k): <NEW_LINE> <INDENT> mu_k = self.Mu[k] <NEW_LINE> pi_k = self.Pi[k] <NEW_LINE> sigma_inv = self.math.pinv(self.Sigma_hat) <NEW_LINE> result = mu_k @ sigma_inv @ x.T - (mu_k @ sigma_inv @ mu_k.T)/2 + log(pi_k) <NEW_LINE> return result <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> X = self._pre_processing_x(X) <NEW_LINE> N = X.shape[0] <NEW_LINE> Y = np.zeros((N, self.n_class)) <NEW_LINE> for k in range(self.n_class): <NEW_LINE> <INDENT> delta_k = self.linear_discriminant_func(X, k) <NEW_LINE> Y[:, k] = delta_k <NEW_LINE> <DEDENT> y_hat = Y.argmax(axis=1).reshape((-1, 1)) + 1 <NEW_LINE> return y_hat | Linear Discriminant Analysis
from page 106 | 625990677047854f46340b3d |
class TestGitLoaderFromArchive(CommonGitLoaderTests): <NEW_LINE> <INDENT> @pytest.fixture(autouse=True) <NEW_LINE> def init(self, swh_storage, datadir, tmp_path): <NEW_LINE> <INDENT> archive_name = "testrepo" <NEW_LINE> archive_path = os.path.join(datadir, f"{archive_name}.tgz") <NEW_LINE> self.repo_url = archive_path <NEW_LINE> self.loader = GitLoaderFromArchive( swh_storage, url=self.repo_url, archive_path=archive_path, visit_date=datetime.datetime( 2016, 5, 3, 15, 16, 32, tzinfo=datetime.timezone.utc ), ) | Tests for GitLoaderFromArchive. Only tests common scenario. | 625990673d592f4c4edbc667 |
class TestHashing(unittest.TestCase): <NEW_LINE> <INDENT> HASH_COUNT = 1000 <NEW_LINE> def test_hashes(self): <NEW_LINE> <INDENT> for i in xrange(self.HASH_COUNT): <NEW_LINE> <INDENT> start = ''.join([ random.choice(string.lowercase) for _ in xrange( random.randint(10, 20) ) ]) <NEW_LINE> h = routing.Hash(value=start) <NEW_LINE> self.assertEqual(routing.Hash.pack_hash(str(h)), h.parts) | Tests that packing and unpacking hashes works.
| 625990675fdd1c0f98e5f70e |
class RequestedVisibility(bb.Union): <NEW_LINE> <INDENT> _catch_all = None <NEW_LINE> public = None <NEW_LINE> team_only = None <NEW_LINE> password = None <NEW_LINE> def is_public(self): <NEW_LINE> <INDENT> return self._tag == 'public' <NEW_LINE> <DEDENT> def is_team_only(self): <NEW_LINE> <INDENT> return self._tag == 'team_only' <NEW_LINE> <DEDENT> def is_password(self): <NEW_LINE> <INDENT> return self._tag == 'password' <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, processor): <NEW_LINE> <INDENT> super(RequestedVisibility, self)._process_custom_annotations(annotation_type, processor) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'RequestedVisibility(%r, %r)' % (self._tag, self._value) | The access permission that can be requested by the caller for the shared
link. Note that the final resolved visibility of the shared link takes into
account other aspects, such as team and shared folder settings. Check the
:class:`ResolvedVisibility` for more info on the possible resolved
visibility values of shared links.
This class acts as a tagged union. Only one of the ``is_*`` methods will
return true. To get the associated value of a tag (if one exists), use the
corresponding ``get_*`` method.
:ivar public: Anyone who has received the link can access it. No login
required.
:ivar team_only: Only members of the same team can access the link. Login is
required.
:ivar password: A link-specific password is required to access the link.
Login is not required. | 62599067435de62698e9d593 |
class DynamicFieldsSerializer(DynamicFieldsSerializerMixin, serializers.Serializer): <NEW_LINE> <INDENT> pass | A Serializer that takes an additional 'fields' and 'exclude' arguments that
controls which fields should be displayed. | 625990673317a56b869bf107 |
class Chain: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> if(os.path.exists(os.path.join(os.getcwd(),'storage', 'q.bc'))): <NEW_LINE> <INDENT> self.qbc = Chain.__read_chain_from_disc() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.qbc = [Chain.__bang()] <NEW_LINE> self.save() <NEW_LINE> <DEDENT> self.current_quant = self.qbc[0] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __bang(self): <NEW_LINE> <INDENT> return Quant(0, date.datetime.now(), "Small Bang", "0", "1") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __create_next_quant(self, last_quant, data): <NEW_LINE> <INDENT> this_index = last_quant.index + 1 <NEW_LINE> this_timestamp = date.datetime.now() <NEW_LINE> this_data = data <NEW_LINE> last_hash = last_quant.hash <NEW_LINE> last_proof = last_quant.proof <NEW_LINE> return Quant(this_index, this_timestamp, this_data, last_hash, last_proof) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __read_chain_from_disc(self): <NEW_LINE> <INDENT> with open(os.path.join(os.getcwd(),'storage', 'q.bc'), 'rb') as qbc_file: <NEW_LINE> <INDENT> return pickle.load(qbc_file) <NEW_LINE> <DEDENT> <DEDENT> def get_chain_stats(self): <NEW_LINE> <INDENT> sha = hasher.sha256() <NEW_LINE> sha.update(self.get_chain("json")) <NEW_LINE> return json.dumps({ "length": len(self.get_chain()), "hash": sha.hexdigest() }) <NEW_LINE> <DEDENT> def create_quant(self, data): <NEW_LINE> <INDENT> new_quant = Chain.__create_next_quant(self.current_quant, data) <NEW_LINE> self.qbc.append(new_quant) <NEW_LINE> self.current_quant = self.qbc[len(self.qbc) - 1] <NEW_LINE> Chain.save(self) <NEW_LINE> return new_quant <NEW_LINE> <DEDENT> def add_quant(self, quant): <NEW_LINE> <INDENT> self.qbc.append(quant) <NEW_LINE> self.current_quant = self.qbc[len(self.qbc) - 1] <NEW_LINE> Chain.save(self) <NEW_LINE> <DEDENT> def get_current_quant(self): <NEW_LINE> <INDENT> return self.current_quant <NEW_LINE> <DEDENT> def get_chain(self, format="default"): <NEW_LINE> <INDENT> return { "json": json.dumps([{ "index": str(quant.index), "timestamp": str(quant.timestamp), "data": str(quant.data), "hash": quant.hash, "proof": str(quant.proof) } for quant in self.qbc]), "serialized": pickle.dumps(self.qbc) }.get(format, self.qbc) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> with open(os.path.join(os.getcwd(),'storage', 'q.bc'), 'wb') as fp: <NEW_LINE> <INDENT> pickle.dump(self.qbc, fp) <NEW_LINE> <DEDENT> <DEDENT> def load(self): <NEW_LINE> <INDENT> self.qbc = self.__read_chain_from_disc() <NEW_LINE> <DEDENT> def get_remote_node_chain(self, host): <NEW_LINE> <INDENT> remote_chain = QBCN.read_chain(host) <NEW_LINE> self.qbc = pickle.loads(remote_chain) <NEW_LINE> self.save() | QBC structure implementation | 6259906766673b3332c31b86 |
class ChangeUsernameView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> form = ChangeUsernameForm <NEW_LINE> template_name = 'registration/username_change_form.html' <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> form = self.form() <NEW_LINE> return render(request, template_name=self.template_name, context={'form': form}) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> form = self.form(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> new_username = form.cleaned_data['new_username'] <NEW_LINE> if not User.objects.filter(username=new_username): <NEW_LINE> <INDENT> request.user.username = new_username <NEW_LINE> request.user.save() <NEW_LINE> messages.success(request, 'Имя пользователя было успешно изменено') <NEW_LINE> return redirect(reverse('core:index', args=[request.user.get_username()])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.error(request, 'Выбранное вами имя пользователя уже используется') <NEW_LINE> return redirect(reverse('core:username_change')) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return render(request, template_name=self.template_name, context={'form': form}) | Change username view | 625990677d847024c075db62 |
class TThreadedServer(TServer): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> TServer.__init__(self, *args) <NEW_LINE> self.daemon = kwargs.get("daemon", False) <NEW_LINE> <DEDENT> def serve(self): <NEW_LINE> <INDENT> self.serverTransport.listen() <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> client = self.serverTransport.accept() <NEW_LINE> if not client: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> t = threading.Thread(target=self.handle, args=(client,)) <NEW_LINE> t.setDaemon(self.daemon) <NEW_LINE> t.start() <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except Exception as x: <NEW_LINE> <INDENT> logger.exception(x) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def handle(self, client): <NEW_LINE> <INDENT> itrans = self.inputTransportFactory.getTransport(client) <NEW_LINE> otrans = self.outputTransportFactory.getTransport(client) <NEW_LINE> iprot = self.inputProtocolFactory.getProtocol(itrans) <NEW_LINE> oprot = self.outputProtocolFactory.getProtocol(otrans) <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self.processor.process(iprot, oprot) <NEW_LINE> <DEDENT> <DEDENT> except TTransport.TTransportException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except Exception as x: <NEW_LINE> <INDENT> logger.exception(x) <NEW_LINE> <DEDENT> itrans.close() <NEW_LINE> otrans.close() | Threaded server that spawns a new thread per each connection. | 625990677cff6e4e811b71d1 |
class Solution: <NEW_LINE> <INDENT> def productExcludeItself(self, A): <NEW_LINE> <INDENT> length = len(A) <NEW_LINE> if not A: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> l = [1] * length <NEW_LINE> r = [1] * length <NEW_LINE> for i in range(1, len(A)): <NEW_LINE> <INDENT> l[i] = l[i-1] * A[i-1] <NEW_LINE> <DEDENT> for i in range(0, len(A) - 1)[::-1]: <NEW_LINE> <INDENT> r[i] = r[i + 1] * A[i + 1] <NEW_LINE> <DEDENT> res = [0] * len(A) <NEW_LINE> for i in range(len(A)): <NEW_LINE> <INDENT> res[i] = l[i] * r[i] <NEW_LINE> <DEDENT> return res | @param A: Given an integers array A
@return: An integer array B and B[i]= A[0] * ... * A[i-1] * A[i+1] * ... * A[n-1] | 625990672ae34c7f260ac871 |
class MediaSnapshotByTimeOffsetItem(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Definition = None <NEW_LINE> self.PicInfoSet = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Definition = params.get("Definition") <NEW_LINE> if params.get("PicInfoSet") is not None: <NEW_LINE> <INDENT> self.PicInfoSet = [] <NEW_LINE> for item in params.get("PicInfoSet"): <NEW_LINE> <INDENT> obj = MediaSnapshotByTimePicInfoItem() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.PicInfoSet.append(obj) | Information of time point screenshot in VOD file
| 6259906716aa5153ce401c63 |
class Join_PDF(object): <NEW_LINE> <INDENT> def __init__(self, list_number: int, location='C:/Users/manue/OneDrive - VANRENTA, S.A. DE C.V/FINANZAS/BURZATILIZACION/VRTCB20/ENDOSO PAGARES/DOCUMENTO ESCANEADO/separados/', destination='C:/Users/manue/OneDrive - VANRENTA, S.A. DE C.V/FINANZAS/BURZATILIZACION/VRTCB20/ENDOSO PAGARES/DOCUMENTO ESCANEADO/'): <NEW_LINE> <INDENT> self.list = list_number <NEW_LINE> self.location = location <NEW_LINE> self.destination = destination <NEW_LINE> <DEDENT> def join(self): <NEW_LINE> <INDENT> os.chdir(self.location) <NEW_LINE> pdf2merge = [] <NEW_LINE> for filename in os.listdir('.'): <NEW_LINE> <INDENT> if filename.endswith('.pdf'): <NEW_LINE> <INDENT> pdf2merge.append(filename) <NEW_LINE> <DEDENT> <DEDENT> pdfWriter = PdfFileWriter() <NEW_LINE> for filename in pdf2merge: <NEW_LINE> <INDENT> pdfFileObj = open(filename, 'rb') <NEW_LINE> pdfReader = PdfFileReader(pdfFileObj) <NEW_LINE> for pageNum in range(pdfReader.numPages): <NEW_LINE> <INDENT> pageObj = pdfReader.getPage(pageNum) <NEW_LINE> pdfWriter.addPage(pageObj) <NEW_LINE> pdfOutput = open(f'{self.destination}lista_{self.list}.pdf', 'wb') <NEW_LINE> <DEDENT> pdfFileObj.close() <NEW_LINE> <DEDENT> pdfWriter.write(pdfOutput) <NEW_LINE> pdfOutput.close() | class to join pdf files into a single one | 62599067f7d966606f74947f |
class DatastoreIndexesXmlToYaml(base.Command): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> parser.add_argument( 'xml_file', help='Path to the datastore-indexes.xml file.') <NEW_LINE> parser.add_argument( '--generated-indexes-file', help=('If specified, include the auto-generated xml file too, and ' 'merge the resulting entries appropriately. Note that this file ' 'is usually named ' '`WEB-INF/appengine-generated/datastore-indexes-auto.xml`.')) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> src = os.path.abspath(args.xml_file) <NEW_LINE> dst = os.path.join(os.path.dirname(src), 'index.yaml') <NEW_LINE> auto_src = None <NEW_LINE> if args.generated_indexes_file: <NEW_LINE> <INDENT> auto_src = os.path.abspath(args.generated_indexes_file) <NEW_LINE> <DEDENT> entry = migrate_config.REGISTRY['datastore-indexes-xml-to-yaml'] <NEW_LINE> migrate_config.Run(entry, src=src, dst=dst, auto_src=auto_src) | Convert a datastore-indexes.xml file to index.yaml. | 62599067aad79263cf42ff42 |
class AggregatedBaseline(Baseline): <NEW_LINE> <INDENT> def __init__(self, baselines, scope='aggregated-baseline', summary_labels=()): <NEW_LINE> <INDENT> self.baselines = dict() <NEW_LINE> for name in sorted(baselines): <NEW_LINE> <INDENT> self.baselines[name] = Baseline.from_spec( spec=baselines[name], kwargs=dict(summary_labels=summary_labels)) <NEW_LINE> <DEDENT> self.linear = Linear(size=1, bias=0.0, scope='prediction', summary_labels=summary_labels) <NEW_LINE> super(AggregatedBaseline, self).__init__(scope, summary_labels) <NEW_LINE> <DEDENT> def tf_predict(self, states, internals, update): <NEW_LINE> <INDENT> predictions = list() <NEW_LINE> for name in sorted(states): <NEW_LINE> <INDENT> prediction = self.baselines[name].predict(states=states[name], internals=internals, update=update) <NEW_LINE> predictions.append(prediction) <NEW_LINE> <DEDENT> predictions = tf.stack(values=predictions, axis=1) <NEW_LINE> prediction = self.linear.apply(x=predictions) <NEW_LINE> return tf.squeeze(input=prediction, axis=1) <NEW_LINE> <DEDENT> def tf_regularization_loss(self): <NEW_LINE> <INDENT> regularization_loss = super(AggregatedBaseline, self).tf_regularization_loss() <NEW_LINE> if regularization_loss is None: <NEW_LINE> <INDENT> losses = list() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> losses = [regularization_loss] <NEW_LINE> <DEDENT> for name in sorted(self.baselines): <NEW_LINE> <INDENT> regularization_loss = self.baselines[name].regularization_loss() <NEW_LINE> if regularization_loss is not None: <NEW_LINE> <INDENT> losses.append(regularization_loss) <NEW_LINE> <DEDENT> <DEDENT> regularization_loss = self.linear.regularization_loss() <NEW_LINE> if regularization_loss is not None: <NEW_LINE> <INDENT> losses.append(regularization_loss) <NEW_LINE> <DEDENT> if len(losses) > 0: <NEW_LINE> <INDENT> return tf.add_n(inputs=losses) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_variables(self, include_nontrainable=False): <NEW_LINE> <INDENT> baseline_variables = super(AggregatedBaseline, self).get_variables(include_nontrainable=include_nontrainable) <NEW_LINE> baselines_variables = [ variable for name in sorted(self.baselines) for variable in self.baselines[name].get_variables(include_nontrainable=include_nontrainable) ] <NEW_LINE> linear_variables = self.linear.get_variables(include_nontrainable=include_nontrainable) <NEW_LINE> return baseline_variables + baselines_variables + linear_variables | Baseline which aggregates per-state baselines. | 62599067460517430c432c1a |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.