code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class FactorizationMachine(QuadraticLayer): <NEW_LINE> <INDENT> def __init__(self, input_size, factorization_size=8, act="identity", **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.factorization_size = factorization_size <NEW_LINE> self.input_size = input_size <NEW_LINE> self.trainer = None <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> self.h = self.params.get("h", shape=(input_size,), dtype=np.float32) <NEW_LINE> if factorization_size > 0: <NEW_LINE> <INDENT> self.V = self.params.get("V", shape=(factorization_size, input_size), dtype=np.float32) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.V = self.params.get("V", shape=(1, input_size), dtype=np.float32) <NEW_LINE> <DEDENT> self.bias = self.params.get("bias", shape=(1,), dtype=np.float32) <NEW_LINE> <DEDENT> self.act = act <NEW_LINE> <DEDENT> def hybrid_forward(self, F, x, h, V, bias): <NEW_LINE> <INDENT> if self.factorization_size <= 0: <NEW_LINE> <INDENT> return bias + F.dot(x, h) <NEW_LINE> <DEDENT> Q = VtoQ(V, F) <NEW_LINE> Qx = F.FullyConnected(x, weight=Q, bias=None, no_bias=True, num_hidden=self.input_size) <NEW_LINE> act = {"identity": F.identity, "sigmoid": F.sigmoid, "tanh": F.tanh}[self.act] <NEW_LINE> return act(bias + F.dot(x, h) + F.sum(x*Qx, axis=1)) <NEW_LINE> <DEDENT> def get_bhQ(self): <NEW_LINE> <INDENT> V = nd.zeros(self.V.shape) if self.factorization_size == 0 else self.V.data() <NEW_LINE> return self.bias.data().asscalar(), self.h.data().asnumpy(), VtoQ(V, nd).asnumpy()
Factorization Machine as a neural network layer. Args: input_size (int): The dimension of input value. factorization_size (int (<=input_size)): The rank of decomposition of interaction terms. act (string, optional): Name of activation function applied on FM output: "identity", "sigmoid", or "tanh". (default="identity") **kwargs:
6259904d30dc7b76659a0c63
class KnittedSweaterDecorator(ClothingDecorator): <NEW_LINE> <INDENT> def __init__(self, person): <NEW_LINE> <INDENT> super().__init__(person) <NEW_LINE> <DEDENT> def decorate(self): <NEW_LINE> <INDENT> print("一件紫红色针织毛衣")
针织毛衣装饰器
6259904d3c8af77a43b68955
class CustomPeriod(proto.Message): <NEW_LINE> <INDENT> start_date = proto.Field( proto.MESSAGE, number=1, message=date_pb2.Date, ) <NEW_LINE> end_date = proto.Field( proto.MESSAGE, number=2, message=date_pb2.Date, )
All date times begin at 12 AM US and Canadian Pacific Time (UTC-8). Attributes: start_date (google.type.date_pb2.Date): Required. The start date must be after January 1, 2017. end_date (google.type.date_pb2.Date): Optional. The end date of the time period. Budgets with elapsed end date won't be processed. If unset, specifies to track all usage incurred since the start_date.
6259904db5575c28eb7136e1
class HashTagDecorator: <NEW_LINE> <INDENT> SEARCH_RE = re.compile(r'#\w+') <NEW_LINE> def replace(self, match, block_type): <NEW_LINE> <INDENT> if block_type == BLOCK_TYPES.CODE: <NEW_LINE> <INDENT> return match.group(0) <NEW_LINE> <DEDENT> return DOM.create_element( 'span', {'class': 'hash_tag'}, match.group(0) )
Wrap hash tags in spans with specific class.
6259904db830903b9686ee92
class LeagueData(CoreData): <NEW_LINE> <INDENT> _dto_type = LeagueDto <NEW_LINE> _renamed = {"leagueId": "id"} <NEW_LINE> def __call__(self, **kwargs): <NEW_LINE> <INDENT> if "entries" in kwargs: <NEW_LINE> <INDENT> self.entries = [LeagueEntryData(**entry) for entry in kwargs.pop("entries")] <NEW_LINE> <DEDENT> super().__call__(**kwargs) <NEW_LINE> return self
Contains the data for one League which has many entries.
6259904d82261d6c527308de
class MetaQuery(BaseQuery): <NEW_LINE> <INDENT> def __init__(self, inner): <NEW_LINE> <INDENT> self._inner = inner <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return internal.ReprPrettyPrinter().meta_query(self, []) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<MetaQuery %s>" % str(self) <NEW_LINE> <DEDENT> def _finalize_query(self, root, opts): <NEW_LINE> <INDENT> root.type = p.Query.META <NEW_LINE> self._inner._write_meta_query(root.meta_query, opts)
Queries that create, destroy, or examine databases or tables rather than working with actual data are instances of :class:`MetaQuery`.
6259904d23e79379d538d92e
class EncodingStyle_(SamlBase): <NEW_LINE> <INDENT> c_tag = 'encodingStyle' <NEW_LINE> c_namespace = NAMESPACE <NEW_LINE> c_children = SamlBase.c_children.copy() <NEW_LINE> c_attributes = SamlBase.c_attributes.copy() <NEW_LINE> c_child_order = SamlBase.c_child_order[:] <NEW_LINE> c_cardinality = SamlBase.c_cardinality.copy()
The http://schemas.xmlsoap.org/wsdl/soap/:encodingStyle element
6259904d1f037a2d8b9e5284
class UnsupportedMediaType(HTTPError): <NEW_LINE> <INDENT> pass
docstring for UnsupportedMediaType
6259904da219f33f346c7c32
class roleViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = role.objects.all() <NEW_LINE> serializer_class = roleSerializer
API endpoint that allows users to be viewed or edited.
6259904d8a43f66fc4bf35c7
class SocosException(Exception): <NEW_LINE> <INDENT> pass
General socos exception
6259904dd4950a0f3b11185b
class PerFieldAnalyzerTestCase(PyLuceneTestCase): <NEW_LINE> <INDENT> def testPerField(self): <NEW_LINE> <INDENT> perField = HashMap() <NEW_LINE> perField.put("special", SimpleAnalyzer()) <NEW_LINE> analyzer = PerFieldAnalyzerWrapper(WhitespaceAnalyzer(), perField) <NEW_LINE> text = "Qwerty" <NEW_LINE> tokenStream = analyzer.tokenStream("field", StringReader(text)) <NEW_LINE> tokenStream.reset() <NEW_LINE> termAtt = tokenStream.getAttribute(CharTermAttribute.class_) <NEW_LINE> self.assertTrue(tokenStream.incrementToken()) <NEW_LINE> self.assertEqual("Qwerty", termAtt.toString(), "WhitespaceAnalyzer does not lowercase") <NEW_LINE> tokenStream = analyzer.tokenStream("special", StringReader(text)) <NEW_LINE> tokenStream.reset() <NEW_LINE> termAtt = tokenStream.getAttribute(CharTermAttribute.class_) <NEW_LINE> self.assertTrue(tokenStream.incrementToken()) <NEW_LINE> self.assertEqual("qwerty", termAtt.toString(), "SimpleAnalyzer lowercases")
Unit tests ported from Java Lucene
6259904db57a9660fecd2ead
class SparseVector(Vector): <NEW_LINE> <INDENT> def __init__(self, values, indices, length = 0, zero_test = lambda x : (x == 0)): <NEW_LINE> <INDENT> super(SparseVector, self).__init__(values, zero_test) <NEW_LINE> self.value=values <NEW_LINE> self.indice=indices <NEW_LINE> self.len=length <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.len <NEW_LINE> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> if i in self.indice: <NEW_LINE> <INDENT> return self.value[self.indice.index(i)] <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, i, val): <NEW_LINE> <INDENT> if i in self.indice: <NEW_LINE> <INDENT> self.value[self.indice.index(i)]=val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.indice.append(i) <NEW_LINE> self.value.append(val) <NEW_LINE> <DEDENT> <DEDENT> def is_zero(self): <NEW_LINE> <INDENT> count=0 <NEW_LINE> ref=len(self.value) <NEW_LINE> for a in self.value: <NEW_LINE> <INDENT> if a==0: <NEW_LINE> <INDENT> count+=1 <NEW_LINE> <DEDENT> <DEDENT> if (count==ref): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def split(self): <NEW_LINE> <INDENT> obj=Vector(self.values,zero_test) <NEW_LINE> split=obj.split() <NEW_LINE> return split <NEW_LINE> <DEDENT> def merge(self, vector): <NEW_LINE> <INDENT> app=[] <NEW_LINE> ref=0 <NEW_LINE> temp=0 <NEW_LINE> while(ref<len(vector)): <NEW_LINE> <INDENT> app.append(vector[ref]) <NEW_LINE> ref=ref+1 <NEW_LINE> <DEDENT> while(temp<len(self.value)): <NEW_LINE> <INDENT> app.append(self.value[temp]) <NEW_LINE> temp=temp+1
Vector that has very few non-zero entries Values and corresponding indices are kept in separate lists
6259904d8da39b475be04621
class ModalitiesReplacement(Transformer): <NEW_LINE> <INDENT> def __init__(self, column, replacement=dict()): <NEW_LINE> <INDENT> self.column = column <NEW_LINE> self.replacement = replacement <NEW_LINE> <DEDENT> def transform(self, data): <NEW_LINE> <INDENT> data = data.copy() <NEW_LINE> data[self.column] = data[self.column].astype(object) <NEW_LINE> data[self.column].replace(to_replace=self.replacement, inplace=True) <NEW_LINE> return data
Transformer which replaces modalities present in column Parameters ---------- column : str column on which modalities must be replaced replacement : dict, optional, default is dict() replacement pattern where keys are original modalities and value is replacement Attributes ---------- column : str column on which modalities must be replaced replacement : dict replacement pattern where keys are original modalities and value is replacement
6259904dec188e330fdf9ccf
class ShiftUserField(fields.Raw): <NEW_LINE> <INDENT> def format(self, value): <NEW_LINE> <INDENT> u = User.query.get(value) <NEW_LINE> if u.name: <NEW_LINE> <INDENT> return u.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return u.email
get the user's name or email as a shift field
6259904dac7a0e7691f7390c
class updateAccount_args(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'accountInfo', (AccountInfo, AccountInfo.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, accountInfo=None,): <NEW_LINE> <INDENT> self.accountInfo = accountInfo <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.accountInfo = AccountInfo() <NEW_LINE> self.accountInfo.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('updateAccount_args') <NEW_LINE> if self.accountInfo is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('accountInfo', TType.STRUCT, 1) <NEW_LINE> self.accountInfo.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - accountInfo
6259904d596a897236128fc7
class PrivateLinkConnectionApprovalRequestResource(ProxyOnlyResource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, } <NEW_LINE> def __init__( self, *, kind: Optional[str] = None, private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, **kwargs ): <NEW_LINE> <INDENT> super(PrivateLinkConnectionApprovalRequestResource, self).__init__(kind=kind, **kwargs) <NEW_LINE> self.private_link_service_connection_state = private_link_service_connection_state
Private Endpoint Connection Approval ARM resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource Id. :vartype id: str :ivar name: Resource Name. :vartype name: str :ivar kind: Kind of resource. :vartype kind: str :ivar type: Resource type. :vartype type: str :ivar private_link_service_connection_state: The state of a private link connection. :vartype private_link_service_connection_state: ~azure.mgmt.web.v2020_12_01.models.PrivateLinkConnectionState
6259904dbe383301e0254c4c
class PersonResponsibleJson(object): <NEW_LINE> <INDENT> def __init__(self, id = None, name = ''): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'id: {0} name {1}'.format(self.id, self.name) <NEW_LINE> <DEDENT> def getAllAPI(): <NEW_LINE> <INDENT> list_person_responsible = PersonResponsibleAPI().get_all() <NEW_LINE> schema = PersonResponsibleSchema() <NEW_LINE> results = schema.load(list_person_responsible, many=True) <NEW_LINE> return results[0] <NEW_LINE> <DEDENT> def setPersonResponsible(self): <NEW_LINE> <INDENT> schema = PersonResponsibleSchema(only=['name']) <NEW_LINE> jsonPerResp = schema.dump(self) <NEW_LINE> resultsCreation = PersonResponsibleAPI().set_person_responsible(jsonData = jsonPerResp.data) <NEW_LINE> schema = PersonResponsibleSchema() <NEW_LINE> results = schema.load(resultsCreation) <NEW_LINE> return results[0]
This class manage the object and is used to map them into json format
6259904da8ecb03325872643
class MyClass: <NEW_LINE> <INDENT> i = 12345 <NEW_LINE> def f(self): <NEW_LINE> <INDENT> return 'hello world'
实例
6259904de76e3b2f99fd9e37
class RemoteData(DataBase, RemoteFileMixin): <NEW_LINE> <INDENT> _T = TypeVar("_T", bound="RemoteData") <NEW_LINE> def __init__( self, remote_path: str, *, timestamp: Optional[float] = None, url: Optional[URL] = None, cache_path: str = "", ) -> None: <NEW_LINE> <INDENT> DataBase.__init__(self, timestamp) <NEW_LINE> RemoteFileMixin.__init__( self, remote_path, url=url, cache_path=cache_path, ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_response_body( cls: Type[_T], body: Dict[str, Any], *, url: Optional[URL] = None, cache_path: str = "", ) -> _T: <NEW_LINE> <INDENT> data = cls( body["remotePath"], timestamp=body.get("timestamp"), url=url, cache_path=cache_path, ) <NEW_LINE> data.label._loads(body["label"]) <NEW_LINE> return data
RemoteData is a combination of a specific tensorbay dataset file and its label. It contains the file remote path, label information of the file and the file metadata, such as timestamp. A RemoteData instance contains one or several types of labels. Arguments: remote_path: The file remote path. timestamp: The timestamp for the file. url: The URL instance used to get and update url. cache_path: The path to store the cache. Attributes: path: The file remote path. timestamp: The timestamp for the file. label: The :class:`~tensorbay.label.label.Label` instance that contains all the label information of the file. url: The :class:`~tensorbay.utility.file.URL` instance used to get and update url.
6259904d3cc13d1c6d466b6a
class WussStructureTests(StructureStringTests): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.Struct = WussStructure <NEW_LINE> super(WussStructureTests,self).setUp() <NEW_LINE> self.WussNoPairs = WussStructure('.-_,~:') <NEW_LINE> self.WussOneHelix = WussStructure('[-{<(__)-->}]',-0.01) <NEW_LINE> self.WussTwoHelix = WussStructure('{[.]}(<>).',1.11) <NEW_LINE> self.WussThreeHelix = WussStructure('::(<<({__}),,([(__)])-->>)') <NEW_LINE> self.WussPseudo = WussStructure('<<__AA>>_aa::') <NEW_LINE> <DEDENT> def test_wuss_toPairs(self): <NEW_LINE> <INDENT> self.assertEqual(self.WussNoPairs.toPairs(),[]) <NEW_LINE> self.assertEqualItems(self.WussOneHelix.toPairs(), [(0,12),(2,11),(3,10),(4,7)]) <NEW_LINE> self.assertEqualItems(self.WussTwoHelix.toPairs(), [(0,4),(1,3),(5,8),(6,7)]) <NEW_LINE> self.assertEqualItems(self.WussThreeHelix.toPairs(), [(2,25),(3,24),(4,23),(5,10),(6,9),(13,20),(14,19),(15,18)]) <NEW_LINE> self.assertEqualItems(self.WussPseudo.toPairs(), [(0,7),(1,6)]) <NEW_LINE> <DEDENT> def test_wuss_toPartners(self): <NEW_LINE> <INDENT> self.assertEqual(self.WussNoPairs.toPartners(),[None]*6) <NEW_LINE> self.assertEqualItems(self.WussThreeHelix.toPartners(), [None,None,25,24,23,10,9,None,None,6,5,None,None,20,19, 18,None,None,15,14,13,None,None,4,3,2]) <NEW_LINE> self.assertEqualItems(self.WussPseudo.toPartners(), [7,6,None,None,None,None,1,0,None,None,None,None,None])
Test that WussStructure methods and properties work
6259904dd53ae8145f919894
class GeneralTestCase(LiveServerTestCase): <NEW_LINE> <INDENT> selenium = None <NEW_LINE> created_user = None <NEW_LINE> login_required = True <NEW_LINE> def setUp(self) -> None: <NEW_LINE> <INDENT> self.setup_fixtures() <NEW_LINE> if self.login_required: <NEW_LINE> <INDENT> force_login(self.created_user, self.selenium, self.live_server_url) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super().setUpClass() <NEW_LINE> cls.setup_selenium_driver() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls.selenium.quit() <NEW_LINE> super().tearDownClass() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def setup_fixtures(cls): <NEW_LINE> <INDENT> cls.created_user = User.objects.create_superuser( username=USERNAME, password=PASSWORD, first_name=FIRST_NAME ) <NEW_LINE> with open(os.path.join(TESTS_ROOT, './test_fixtures.json')) as fd: <NEW_LINE> <INDENT> fixtures = json.load(fd) <NEW_LINE> <DEDENT> for category in fixtures: <NEW_LINE> <INDENT> instance, _ = Category.objects.get_or_create(name=category['name']) <NEW_LINE> for product in category['products']: <NEW_LINE> <INDENT> _ = Product.objects.get_or_create(category=instance, **product) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def setup_selenium_driver(cls): <NEW_LINE> <INDENT> options = webdriver.ChromeOptions() <NEW_LINE> options.add_argument("window-size=1920x1480") <NEW_LINE> options.add_argument("disable-infobars") <NEW_LINE> options.add_argument("disable-extensions") <NEW_LINE> options.add_argument("headless") <NEW_LINE> cls.selenium = webdriver.Chrome(ChromeDriverManager().install(), chrome_options=options) <NEW_LINE> cls.selenium.implicitly_wait(10)
Parent class to setup the selenium tests.
6259904db57a9660fecd2eae
class DjangoObjectPermissionsFilter(BaseFilterBackend): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> assert guardian, 'Using DjangoObjectPermissionsFilter, but django-guardian is not installed' <NEW_LINE> <DEDENT> perm_format = '%(app_label)s.view_%(model_name)s' <NEW_LINE> def filter_queryset(self, request, queryset, view): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> model_cls = queryset.model <NEW_LINE> kwargs = { 'app_label': model_cls._meta.app_label, 'model_name': model_cls._meta.module_name } <NEW_LINE> permission = self.perm_format % kwargs <NEW_LINE> return guardian.shortcuts.get_objects_for_user(user, permission, queryset)
A filter backend that limits results to those where the requesting user has read object level permissions.
6259904d507cdc57c63a61d2
@total_ordering <NEW_LINE> class InformationElementList(object): <NEW_LINE> <INDENT> def __init__(self, iterable = None): <NEW_LINE> <INDENT> self.inner = [] <NEW_LINE> if iterable: <NEW_LINE> <INDENT> for x in iterable: <NEW_LINE> <INDENT> self.append(x) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.inner) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.inner == other.inner <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.inner < other.inner <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "InformationElementList(" + ",".join((repr(x) for x in self.inner)) + ")" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "\n".join((str(x) for x in self.inner)) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> if not self.hashcache: <NEW_LINE> <INDENT> self.hashcache = reduce(operator.xor, (hash(x) for x in self.inner)) <NEW_LINE> <DEDENT> return self.hashcache <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.inner) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.inner[key] <NEW_LINE> <DEDENT> def index(self, x): <NEW_LINE> <INDENT> return self.inner.index(x) <NEW_LINE> <DEDENT> def append(self, ie): <NEW_LINE> <INDENT> self.inner.append(ie) <NEW_LINE> self.hashcache = None
A hashable ordered list of Information Elements. Used internally by templates, and to specify the order of tuples to the tuple append and iterator interfaces. Get an instance by calling :func:`spec_list`
6259904dcad5886f8bdc5a97
class ExtremeInitialization(Initialization): <NEW_LINE> <INDENT> def __init__(self, distance: dist.DistanceMetric): <NEW_LINE> <INDENT> self.distance = distance <NEW_LINE> <DEDENT> def __call__(self, data: Data, number_of_centroids: int) -> Centroids: <NEW_LINE> <INDENT> _validate(data, number_of_centroids) <NEW_LINE> residuals = _find_residuals(data) <NEW_LINE> centroids = np.nan * np.zeros((number_of_centroids, data.shape[1])) <NEW_LINE> centroids[0] = data[np.argmax(residuals)] <NEW_LINE> distances = np.inf * np.ones((data.shape[0], )) <NEW_LINE> for i in range(1, number_of_centroids): <NEW_LINE> <INDENT> current_distance = self.distance(data, centroids[np.newaxis, i - 1]) <NEW_LINE> distances[:] = np.minimum(current_distance.ravel(), distances) <NEW_LINE> centroids[i] = data[np.argmax(distances)] <NEW_LINE> <DEDENT> return centroids
Initializes k-means by picking extreme points
6259904d0c0af96317c5777a
class _PrefetchToDeviceEagerIterator(iterator_ops.EagerIterator): <NEW_LINE> <INDENT> def __init__(self, input_dataset, device, buffer_size): <NEW_LINE> <INDENT> with ops.device("/device:CPU:0"): <NEW_LINE> <INDENT> super(_PrefetchToDeviceEagerIterator, self).__init__(input_dataset) <NEW_LINE> input_iterator_handle = core_gen_dataset_ops.iterator_to_string_handle( self._resource) <NEW_LINE> <DEDENT> self._device = device <NEW_LINE> @function.Defun(dtypes.string) <NEW_LINE> def _prefetch_fn(handle): <NEW_LINE> <INDENT> remote_iterator = iterator_ops.Iterator.from_string_handle( handle, self.output_types, self.output_shapes, self.output_classes) <NEW_LINE> ret = remote_iterator.get_next() <NEW_LINE> return nest.flatten(sparse.serialize_sparse_tensors(ret)) <NEW_LINE> <DEDENT> _prefetch_fn.add_to_graph(None) <NEW_LINE> with ops.device(device): <NEW_LINE> <INDENT> self._buffering_resource = function_buffering_resource( f=_prefetch_fn, output_types=self._flat_output_types, target_device=gen_dataset_ops.iterator_get_device(self._resource), string_arg=input_iterator_handle, buffer_size=buffer_size, shared_name=iterator_ops._generate_shared_name( "function_buffer_resource")) <NEW_LINE> <DEDENT> <DEDENT> def _next_internal(self): <NEW_LINE> <INDENT> with context.execution_mode(context.SYNC): <NEW_LINE> <INDENT> with ops.device(self._device): <NEW_LINE> <INDENT> ret = gen_dataset_ops.function_buffering_resource_get_next( function_buffer_resource=self._buffering_resource, output_types=self._flat_output_types) <NEW_LINE> <DEDENT> return sparse.deserialize_sparse_tensors( nest.pack_sequence_as(self._output_types, ret), self._output_types, self._output_shapes, self._output_classes)
A replacement for `tf.data.Iterator` that prefetches to another device. Args: input_dataset: The input dataset one_shot: If true, we make a one shot iterator that's already initialized. device: A fully specified device string where we want to prefetch to buffer_size: Size of the prefetching buffer. shared_name: (Optional.) If non-empty, the returned iterator will be shared under the given name across multiple sessions that share the same devices (e.g. when using a remote server). Returns: An Iterator type object.
6259904d8e05c05ec3f6f874
class ListTopWafDataResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TopTypeData = None <NEW_LINE> self.TopIpData = None <NEW_LINE> self.TopUrlData = None <NEW_LINE> self.TopDomainData = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("TopTypeData") is not None: <NEW_LINE> <INDENT> self.TopTypeData = [] <NEW_LINE> for item in params.get("TopTypeData"): <NEW_LINE> <INDENT> obj = ScdnTypeData() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.TopTypeData.append(obj) <NEW_LINE> <DEDENT> <DEDENT> if params.get("TopIpData") is not None: <NEW_LINE> <INDENT> self.TopIpData = [] <NEW_LINE> for item in params.get("TopIpData"): <NEW_LINE> <INDENT> obj = ScdnTopData() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.TopIpData.append(obj) <NEW_LINE> <DEDENT> <DEDENT> if params.get("TopUrlData") is not None: <NEW_LINE> <INDENT> self.TopUrlData = [] <NEW_LINE> for item in params.get("TopUrlData"): <NEW_LINE> <INDENT> obj = ScdnTopUrlData() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.TopUrlData.append(obj) <NEW_LINE> <DEDENT> <DEDENT> if params.get("TopDomainData") is not None: <NEW_LINE> <INDENT> self.TopDomainData = [] <NEW_LINE> for item in params.get("TopDomainData"): <NEW_LINE> <INDENT> obj = ScdnTopDomainData() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.TopDomainData.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId")
ListTopWafData返回参数结构体
6259904db5575c28eb7136e3
class Solution(object): <NEW_LINE> <INDENT> def rightSideView(self, root): <NEW_LINE> <INDENT> if not root: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> l = [] <NEW_LINE> q = [root] <NEW_LINE> while q: <NEW_LINE> <INDENT> l.append(q[-1].val) <NEW_LINE> new_q = [] <NEW_LINE> for node in q: <NEW_LINE> <INDENT> if node.left: <NEW_LINE> <INDENT> new_q.append(node.left) <NEW_LINE> <DEDENT> if node.right: <NEW_LINE> <INDENT> new_q.append(node.right) <NEW_LINE> <DEDENT> <DEDENT> q = new_q <NEW_LINE> <DEDENT> return l
给定一棵二叉树,想象自己站在它的右侧,按照从顶部到底部的顺序,返回从右侧所能看到的节点值。 示例: 输入: [1,2,3,null,5,null,4] 输出: [1, 3, 4] 解释: 1 <--- / 2 3 <--- \ 5 4 <---
6259904d23e79379d538d931
class By(object): <NEW_LINE> <INDENT> id = "id" <NEW_LINE> xpath = "xpath" <NEW_LINE> link = "link text" <NEW_LINE> plink = "partial link text" <NEW_LINE> name = "name" <NEW_LINE> tag = "tag name" <NEW_LINE> cls = "class name" <NEW_LINE> css = "css selector" <NEW_LINE> aid = 'accessibility id' <NEW_LINE> ui = '-android uiautomator'
Set of supported locator strategies
6259904db830903b9686ee94
class Comment(Base): <NEW_LINE> <INDENT> __tablename__ = 'comments' <NEW_LINE> __exclude_columns__ = tuple() <NEW_LINE> __get_by__ = ('id',) <NEW_LINE> karma = Column(Integer, default=0) <NEW_LINE> karma_critpath = Column(Integer, default=0) <NEW_LINE> text = Column(UnicodeText, nullable=False) <NEW_LINE> timestamp = Column(DateTime, default=datetime.utcnow) <NEW_LINE> update_id = Column(Integer, ForeignKey('updates.id'), nullable=False, index=True) <NEW_LINE> user_id = Column(Integer, ForeignKey('users.id'), nullable=False) <NEW_LINE> def url(self) -> str: <NEW_LINE> <INDENT> url = self.update.get_url() + '#comment-' + str(self.id) <NEW_LINE> return url <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_testcase_feedback(self) -> typing.List[TestCaseKarma]: <NEW_LINE> <INDENT> feedbacks = self.testcase_feedback <NEW_LINE> unique_feedbacks = set() <NEW_LINE> filtered_feedbacks = list() <NEW_LINE> for feedback in feedbacks: <NEW_LINE> <INDENT> if feedback.testcase.name not in unique_feedbacks: <NEW_LINE> <INDENT> unique_feedbacks.add(feedback.testcase.name) <NEW_LINE> filtered_feedbacks.append(feedback) <NEW_LINE> <DEDENT> <DEDENT> return filtered_feedbacks <NEW_LINE> <DEDENT> @property <NEW_LINE> def rss_title(self) -> str: <NEW_LINE> <INDENT> return "{} comment #{}".format(self.update.alias, self.id) <NEW_LINE> <DEDENT> def __json__(self, *args, **kwargs) -> dict: <NEW_LINE> <INDENT> result = super(Comment, self).__json__(*args, **kwargs) <NEW_LINE> if result['user']: <NEW_LINE> <INDENT> result['author'] = result['user']['name'] <NEW_LINE> <DEDENT> result['update_alias'] = result['update']['alias'] <NEW_LINE> result['update']['karma'] = self.update.karma <NEW_LINE> return result <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> karma = '0' <NEW_LINE> if self.karma != 0: <NEW_LINE> <INDENT> karma = '%+d' % (self.karma,) <NEW_LINE> <DEDENT> return "%s - %s (karma: %s)\n%s" % (self.user.name, self.timestamp, karma, self.text)
An update comment. Attributes: karma (int): The karma associated with this comment. Defaults to 0. karma_critpath (int): The critpath karma associated with this comment. Defaults to 0. **DEPRECATED** no longer used in the UI text (str): The text of the comment. timestamp (datetime.datetime): The time the comment was created. Defaults to the return value of datetime.utcnow(). update (Update): The update that this comment pertains to. user (User): The user who wrote this comment.
6259904dcb5e8a47e493cba0
class field_dict(dict): <NEW_LINE> <INDENT> def __init__(self, rank = 0): <NEW_LINE> <INDENT> self._rank = rank <NEW_LINE> self._run = False <NEW_LINE> self._accessedFields = {} <NEW_LINE> self._returnedFields = {} <NEW_LINE> dict.__init__(self) <NEW_LINE> <DEDENT> def readyToRun(self): <NEW_LINE> <INDENT> self._run = True <NEW_LINE> <DEDENT> def __getitem__(self, field): <NEW_LINE> <INDENT> coeff = dict.__getitem__(self, field) <NEW_LINE> if self._run: <NEW_LINE> <INDENT> self._accessedFields[coeff.count()] = (self._rank, field) <NEW_LINE> <DEDENT> return coeff <NEW_LINE> <DEDENT> def __setitem__(self, field, coeff): <NEW_LINE> <INDENT> assert isinstance(coeff, Coefficient), "Only Coefficients can be written back to state" <NEW_LINE> if self._run: <NEW_LINE> <INDENT> self._returnedFields[coeff.count()] = field <NEW_LINE> <DEDENT> dict.__setitem__(self, field, coeff)
Used for the state.xxx_fields dicts. It functions like a regular dict, but remembers which keys are set and get when in the 'run' mode.
6259904de76e3b2f99fd9e38
class DrawLine(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.enabled = False <NEW_LINE> self.shape = "Line" <NEW_LINE> self.intersecting_contours = deque() <NEW_LINE> <DEDENT> def onMouseDown(self, x, y, button, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onMouseDownMap(self, x, y, button, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onMouseUp(self, x, y, button, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onMouseUpMap(self, x, y, button, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onMouseMove(self, x, y, button, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onMouseMoveMap(self, x, y, button, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onDblClick(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onKeyDown(self, keycode, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onKeyUp(self, keycode, shift): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def deactivate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onLine(self, line_geometry): <NEW_LINE> <INDENT> self.intersecting_contours = deque() <NEW_LINE> intersection_order = {} <NEW_LINE> startpt = arcpy.Geometry("point", arcpy.Point(line_geometry.firstPoint.X, line_geometry.firstPoint.Y),arcpy.Describe(select_layer.selectedlayer).spatialReference ) <NEW_LINE> with arcpy.da.SearchCursor(arcpy.SelectLayerByLocation_management(select_layer.selectedlayer, 'intersect', line_geometry), ['OID@','Shape@']) as cursor: <NEW_LINE> <INDENT> for row in cursor: <NEW_LINE> <INDENT> distance = startpt.distanceTo(row[1]) <NEW_LINE> intersection_order[distance] = row[0] <NEW_LINE> <DEDENT> <DEDENT> for key in sorted(intersection_order.iterkeys()): <NEW_LINE> <INDENT> self.intersecting_contours.append(intersection_order[key])
Implementation for contour_attributor.drawline (Tool)
6259904d0a366e3fb87dde19
class ICollectiveCropimageLayer(Interface): <NEW_LINE> <INDENT> pass
Marker interface for browserlayer.
6259904d23e79379d538d932
class SetTileSize(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "render.autotilesize_set" <NEW_LINE> bl_label = "Set" <NEW_LINE> @classmethod <NEW_LINE> def poll(clss, context): <NEW_LINE> <INDENT> return ats_poll(context) <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> if do_set_tile_size(context): <NEW_LINE> <INDENT> return {'FINISHED'} <NEW_LINE> <DEDENT> return {'CANCELLED'}
The first render may not obey the tile-size set here
6259904d004d5f362081fa02
class DevicePluginMessageCollection(list): <NEW_LINE> <INDENT> def __init__(self, messages, priority=PRIO_NORMAL): <NEW_LINE> <INDENT> super(DevicePluginMessageCollection, self).__init__(messages) <NEW_LINE> self.priority = priority
Zero or more messages from a device plugin, to be consumed one by one by a service on the manager server. Return this instead of a naked {} or a DevicePluginMessage if you need to return multiple messages from one callback.
6259904d498bea3a75a58f54
class Furniture(Inventory): <NEW_LINE> <INDENT> inventory_type = "Furniture" <NEW_LINE> def __init__(self, product_code, description, market_price, rental_price, material, size): <NEW_LINE> <INDENT> Inventory.__init__(self, product_code, description, market_price, rental_price) <NEW_LINE> self.material = material <NEW_LINE> self.size = size <NEW_LINE> <DEDENT> def return_as_dictionary(self): <NEW_LINE> <INDENT> output = Inventory.return_as_dictionary(self) <NEW_LINE> output['material'] = self.material <NEW_LINE> output['size'] = self.size <NEW_LINE> return output <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def sort_key(item): <NEW_LINE> <INDENT> return (item.description, item.product_code)
Represent an furniture
6259904d50485f2cf55dc3c1
class EmpiresDatGzip: <NEW_LINE> <INDENT> def __init__(self, datfile_name): <NEW_LINE> <INDENT> self.fname = datfile_name <NEW_LINE> dbg("reading empires2*.dat from %s..." % self.fname, lvl=1) <NEW_LINE> filename = file_get_path(self.fname, write=False) <NEW_LINE> f = file_open(filename, binary=True, write=False) <NEW_LINE> dbg("decompressing data from %s" % filename, lvl=2) <NEW_LINE> compressed_data = f.read() <NEW_LINE> self.content = zlib.decompress(compressed_data, -15) <NEW_LINE> f.close() <NEW_LINE> self.compressed_size = len(compressed_data) <NEW_LINE> self.decompressed_size = len(self.content) <NEW_LINE> del compressed_data <NEW_LINE> dbg("length of compressed data: %d = %d kB" % (self.compressed_size, self.compressed_size / 1024), lvl=2) <NEW_LINE> dbg("length of decompressed data: %d = %d kB" % (self.decompressed_size, self.decompressed_size / 1024), lvl=2) <NEW_LINE> <DEDENT> def raw_dump(self, filename): <NEW_LINE> <INDENT> rawfile_writepath = file_get_path(filename, write=True) <NEW_LINE> dbg("saving uncompressed %s file to %s" % (self.fname, rawfile_writepath), 1) <NEW_LINE> file_write(rawfile_writepath, self.content)
uncompresses the gzip'd empires dat.
6259904d07f4c71912bb0869
class ClusterControllerStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.CreateCluster = channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster", request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString, response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, ) <NEW_LINE> self.UpdateCluster = channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/UpdateCluster", request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.UpdateClusterRequest.SerializeToString, response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, ) <NEW_LINE> self.DeleteCluster = channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/DeleteCluster", request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DeleteClusterRequest.SerializeToString, response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, ) <NEW_LINE> self.GetCluster = channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/GetCluster", request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.GetClusterRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.Cluster.FromString, ) <NEW_LINE> self.ListClusters = channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/ListClusters", request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersResponse.FromString, ) <NEW_LINE> self.DiagnoseCluster = channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster", request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.SerializeToString, response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, )
The ClusterControllerService provides methods to manage clusters of Compute Engine instances.
6259904d76d4e153a661dc92
class BarcodeMSI(BarcodeI2of5): <NEW_LINE> <INDENT> codeName = "MSI" <NEW_LINE> def __init__(self,**kw): <NEW_LINE> <INDENT> from reportlab.graphics.barcode.common import MSI <NEW_LINE> _BarcodeWidget.__init__(self,MSI,1234,**kw)
MSI is used for inventory control in retail applications. There are several methods for calculating check digits so we do not implement one.
6259904d91af0d3eaad3b259
class CQIReportingEvent(events.EventBase): <NEW_LINE> <INDENT> def __init__(self, candidate_sigpower, curr_sigpower): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.candidate_sigpower = candidate_sigpower <NEW_LINE> self.curr_sigpower = curr_sigpower
Events reported by each AP.
6259904d0c0af96317c5777b
class EperTestCase(unittest.TestCase): <NEW_LINE> <INDENT> imaging_value = 100 <NEW_LINE> overscan_value = 1 <NEW_LINE> overscans = 2 <NEW_LINE> verbose = False
Base class for eper TestCase classes.
6259904d097d151d1a2c24a4
class Srv(object): <NEW_LINE> <INDENT> def __init__(self, port, host=''): <NEW_LINE> <INDENT> sck = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self.port = port <NEW_LINE> sck.bind((host, port)) <NEW_LINE> sck.listen(1) <NEW_LINE> self._sck = sck <NEW_LINE> self._cnt = None <NEW_LINE> thread = threading.Thread(target=self._start_listening) <NEW_LINE> thread.start() <NEW_LINE> self._thread = thread <NEW_LINE> <DEDENT> def _start_listening(self): <NEW_LINE> <INDENT> self._cnt = self._sck.accept()[0] <NEW_LINE> <DEDENT> def wait_cnt(self, timeout=120.): <NEW_LINE> <INDENT> self._thread.join(timeout) <NEW_LINE> return (not self._thread.isAlive()) <NEW_LINE> <DEDENT> def release_cnt(self): <NEW_LINE> <INDENT> if self.has_cnt(): <NEW_LINE> <INDENT> assert self._cnt.recv(1024) == "WAITING" <NEW_LINE> self._cnt.send("RELEASE") <NEW_LINE> <DEDENT> <DEDENT> def has_cnt(self): <NEW_LINE> <INDENT> return (self._cnt is not None) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if not self.has_cnt(): <NEW_LINE> <INDENT> cnt = Cnt(self.port) <NEW_LINE> self.wait_cnt() <NEW_LINE> cnt.close() <NEW_LINE> <DEDENT> self._sck.close()
Listen to the Cnt connection and reply.
6259904dac7a0e7691f73910
class NoSession(Error): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.message = message
Execption raised when there is no existing API session Attributes: message (str): explanation of the error
6259904d63d6d428bbee3c00
@node(params=['BRAINSTools_hash', 'ANTs_hash'], deps=['dwi']) <NEW_LINE> class DwiEd(NrrdOutput): <NEW_LINE> <INDENT> def static_build(self): <NEW_LINE> <INDENT> with BRAINSTools.env(self.BRAINSTools_hash), ANTs.env(self.ANTs_hash): <NEW_LINE> <INDENT> eddy_py['-i', self.dwi, '-o', self.output(), '--force', '-n', NCPU] & LOG
Eddy current correction. Accepts nrrd only.
6259904db830903b9686ee95
class CloudifyBaseLoggingHandler(logging.Handler): <NEW_LINE> <INDENT> def __init__(self, ctx, out_func, message_context_builder): <NEW_LINE> <INDENT> logging.Handler.__init__(self) <NEW_LINE> self.context = message_context_builder(ctx) <NEW_LINE> if _is_system_workflow(ctx): <NEW_LINE> <INDENT> out_func = stdout_log_out <NEW_LINE> <DEDENT> elif out_func is None: <NEW_LINE> <INDENT> out_func = amqp_log_out <NEW_LINE> <DEDENT> self.out_func = out_func <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> message = self.format(record) <NEW_LINE> log = { 'context': self.context, 'logger': record.name, 'level': record.levelname.lower(), 'message': { 'text': message } } <NEW_LINE> self.out_func(log)
A base handler class for writing log messages to RabbitMQ
6259904de76e3b2f99fd9e39
class Breeze(object): <NEW_LINE> <INDENT> def __init__(self, location): <NEW_LINE> <INDENT> self.location = location <NEW_LINE> self.known = False
Encapsulates a breeze.
6259904dd6c5a102081e3552
@dataclass <NEW_LINE> class ChannelSectionResponse(BaseApiResponse): <NEW_LINE> <INDENT> items: Optional[List[ChannelSection]] = field(default=None, repr=False)
A class representing the channel section's retrieve response info. Refer: https://developers.google.com/youtube/v3/docs/channelSections/list?#properties_1
6259904d3eb6a72ae038ba91
class topdown_lateral_module(nn.Module): <NEW_LINE> <INDENT> def __init__(self, dim_in_top, dim_in_lateral): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.dim_in_top = dim_in_top <NEW_LINE> self.dim_in_lateral = dim_in_lateral <NEW_LINE> self.dim_out = dim_in_top <NEW_LINE> if cfg.FPN.USE_GN: <NEW_LINE> <INDENT> self.conv_lateral = nn.Sequential( nn.Conv2d(dim_in_lateral, self.dim_out, 1, 1, 0, bias=False), nn.GroupNorm(net_utils.get_group_gn(self.dim_out), self.dim_out, eps=cfg.GROUP_NORM.EPSILON) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.conv_lateral = nn.Conv2d(dim_in_lateral, self.dim_out, 1, 1, 0) <NEW_LINE> <DEDENT> self._init_weights() <NEW_LINE> <DEDENT> def _init_weights(self): <NEW_LINE> <INDENT> if cfg.FPN.USE_GN: <NEW_LINE> <INDENT> conv = self.conv_lateral[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conv = self.conv_lateral <NEW_LINE> <DEDENT> if cfg.FPN.ZERO_INIT_LATERAL: <NEW_LINE> <INDENT> init.constant_(conv.weight, 0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mynn.init.XavierFill(conv.weight) <NEW_LINE> <DEDENT> if conv.bias is not None: <NEW_LINE> <INDENT> init.constant_(conv.bias, 0) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, top_blob, lateral_blob): <NEW_LINE> <INDENT> lat = self.conv_lateral(lateral_blob) <NEW_LINE> td = F.upsample(top_blob, scale_factor=2, mode='nearest') <NEW_LINE> return lat + td
Add a top-down lateral module.
6259904d507cdc57c63a61d6
class Monitor(problem.OptimizationFunction): <NEW_LINE> <INDENT> def get_data(self, param: parametrization.Parametrization): <NEW_LINE> <INDENT> return self.calculate_objective_function(param)
Defines a monitor. Monitors behave exactly as functions but do not implement differentiation. This enables using the graph evaluation engine to share computation when multiple monitors need to evaluated simultaneously. This is mainly used to rename `calculate_objective_function` to `get_data`, the latter of which is more appropriate for a monitor.
6259904d50485f2cf55dc3c3
class GenwebCoreControlPanel(controlpanel.ControlPanelFormWrapper): <NEW_LINE> <INDENT> form = GenwebCoreControlPanelSettingsForm
Genweb Core settings control panel
6259904d8e71fb1e983bcefc
class LoginView(View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> if 'username' in request.COOKIES: <NEW_LINE> <INDENT> username = request.COOKIES.get('username') <NEW_LINE> checked = 'checked' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> username = '' <NEW_LINE> checked = '' <NEW_LINE> <DEDENT> return render(request, 'login.html', {'username': username, 'checkd': checked}) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> username = request.POST.get('username') <NEW_LINE> password = request.POST.get('pwd') <NEW_LINE> remmeber = request.POST.get('remmeber') <NEW_LINE> if not all([username, password]): <NEW_LINE> <INDENT> return render(request, 'login.html', {'errmsg':'数据信息不完整'}) <NEW_LINE> <DEDENT> user = authenticate(username=username, password=password) <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> if user.is_active: <NEW_LINE> <INDENT> next_url = request.GET.get('next', reverse('goods:index')) <NEW_LINE> res = redirect(next_url) <NEW_LINE> if remmeber == 'on': <NEW_LINE> <INDENT> res.set_cookie('username', username, max_age=7*24*3600) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res.delete_cookie('username') <NEW_LINE> <DEDENT> login(request, user) <NEW_LINE> return res <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render(request, 'login.html', {'errmsg':'用户未激活'}) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> render(request, 'login.html', {'errmsg':'用户名或密码错误'})
登录
6259904d8da39b475be04627
class Solution: <NEW_LINE> <INDENT> def twoSum(self, nums, target): <NEW_LINE> <INDENT> l = 0 <NEW_LINE> r = len(nums) - 1 <NEW_LINE> while l < r: <NEW_LINE> <INDENT> if nums[l] + nums[r] == target: <NEW_LINE> <INDENT> return [l + 1, r + 1] <NEW_LINE> <DEDENT> if nums[l] + nums[r] < target: <NEW_LINE> <INDENT> l += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r -= 1
@param: nums: an array of Integer @param: target: target = nums[index1] + nums[index2] @return: [index1 + 1, index2 + 1] (index1 < index2)
6259904db57a9660fecd2eb3
class LineShape(Shape): <NEW_LINE> <INDENT> def __init__(self, pen, points): <NEW_LINE> <INDENT> Shape.__init__(self) <NEW_LINE> self.pen = pen.copy() <NEW_LINE> self.points = points <NEW_LINE> <DEDENT> def draw(self, painter, highlight=False): <NEW_LINE> <INDENT> pen = self.select_pen(highlight) <NEW_LINE> painter.setPen( QPen( QBrush(QColor.fromRgbF(*pen.color)), pen.linewidth, pen.dash, Qt.SquareCap, Qt.MiterJoin, ) ) <NEW_LINE> x0, y0 = self.points[0] <NEW_LINE> for x1, y1 in self.points[1:]: <NEW_LINE> <INDENT> painter.drawLine(QPointF(x0, y0), QPointF(x1, y1)) <NEW_LINE> x0 = x1 <NEW_LINE> y0 = y1
Used to draw a line with QPainter.
6259904dcad5886f8bdc5a99
class VenueImage(ImageModel): <NEW_LINE> <INDENT> original_image = models.ImageField(upload_to='venue_images') <NEW_LINE> num_views = models.PositiveIntegerField(editable=False, default=0) <NEW_LINE> class IKOptions: <NEW_LINE> <INDENT> spec_module = 'venues.specs' <NEW_LINE> cache_dir = 'venue_images' <NEW_LINE> image_field = 'original_image' <NEW_LINE> save_count_as = 'num_views'
A venue's image
6259904d6fece00bbacccdf0
class Rectangle: <NEW_LINE> <INDENT> number_of_instances = 0 <NEW_LINE> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> Rectangle.number_of_instances += 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("height must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("height must be >= 0") <NEW_LINE> <DEDENT> self.__height = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("width must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("width must be >= 0") <NEW_LINE> <DEDENT> self.__width = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__width * self.__height <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.__width == 0 or self.__height == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 2 * (self.__width + self.__height) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> new = "" <NEW_LINE> for i in range(self.__height): <NEW_LINE> <INDENT> for j in range(self.__width): <NEW_LINE> <INDENT> new += '#' <NEW_LINE> <DEDENT> if i != self.__height - 1: <NEW_LINE> <INDENT> new += '\n' <NEW_LINE> <DEDENT> <DEDENT> return new <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Rectangle({:d},{:d})'.format(self.__width, self.__height) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print("Bye rectangle...") <NEW_LINE> Rectangle.number_of_instances -= 1
creates a class Rectangle
6259904dd10714528d69f0a9
class ITriggersQueueItem(ABC): <NEW_LINE> <INDENT> pass
Storage service queue item interface @package FastyBird:MiniServer! @module queue @author Adam Kadlec <[email protected]>
6259904dac7a0e7691f73912
@singleton <NEW_LINE> class Settings(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.timeout = 10 <NEW_LINE> self.path_errors = 'errors' <NEW_LINE> self.path_log = 'log' <NEW_LINE> self.path_sqlite = 'instance' <NEW_LINE> self.name_database = 'base' <NEW_LINE> self.print_table=None <NEW_LINE> config = configparser.ConfigParser() <NEW_LINE> try: <NEW_LINE> <INDENT> config.read( os.path.join( os.path.dirname(__file__), "settings.ini")) <NEW_LINE> self.timeout = int(config.get('app', "TIMEOUT")) <NEW_LINE> self.path_errors = config.get('app', "PATH_ERRORS") <NEW_LINE> self.path_log = config.get('app', "PATH_LOG") <NEW_LINE> self.path_sqlite = config.get('app', "PATH_SQLITE") <NEW_LINE> self.name_database = config.get('app', "NAME_DATABASE") <NEW_LINE> <DEDENT> except configparser.Error as e: <NEW_LINE> <INDENT> print(str(e)) <NEW_LINE> <DEDENT> if not os.path.exists(self.path_sqlite): <NEW_LINE> <INDENT> os.makedirs(self.path_sqlite) <NEW_LINE> <DEDENT> if not os.path.exists(self.path_errors): <NEW_LINE> <INDENT> os.makedirs(self.path_errors) <NEW_LINE> <DEDENT> if not os.path.exists(self.path_log): <NEW_LINE> <INDENT> os.makedirs(self.path_log) <NEW_LINE> <DEDENT> <DEDENT> def get_settings_dict(self): <NEW_LINE> <INDENT> return {"TIMEOUT": self.timeout, "PATH_ERRORS": self.path_errors, "PATH_LOG": self.path_log, "PATH_SQLITE": self.path_sqlite, "NAME_DATABASE": self.name_database}
Класс считывает настройки и хранит их
6259904dec188e330fdf9cd6
class ZCave(grok.Model): <NEW_LINE> <INDENT> pass
we call this `ZCave` because we want to test that we do not depend on alphabetical order
6259904da79ad1619776b4b8
class SNP(_SNP): <NEW_LINE> <INDENT> def __new__(cls, name, chromosome, position, genotype, variation=None, strand=None): <NEW_LINE> <INDENT> return super(SNP, cls).__new__(cls, name, variation, chromosome, int(position), strand, genotype)
A wrapper for SNP data, provided by various formats.
6259904db830903b9686ee96
class WaitingList(Base): <NEW_LINE> <INDENT> __tablename__ = 'waitinglist' <NEW_LINE> email = sqla.Column(sqla.Unicode(255), primary_key=True) <NEW_LINE> ipaddr = sqla.Column(sqla.Unicode(255)) <NEW_LINE> requested = sqla.Column(sqla.DateTime) <NEW_LINE> accepted = sqla.Column(sqla.DateTime) <NEW_LINE> completed = sqla.Column(sqla.Boolean, default=False) <NEW_LINE> def __init__(self, email, ipaddr=None): <NEW_LINE> <INDENT> super(Base, self).__init__() <NEW_LINE> self.email = email <NEW_LINE> self.ipaddr = ipaddr <NEW_LINE> self.requested = datetime.datetime.now()
Log the email, time and ipaddr If we get another request from same ipaddr
6259904d379a373c97d9a462
class _PartialStringStats(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.total_bytes_length = 0 <NEW_LINE> self.total_num_values = 0
Holds partial statistics needed to compute the string statistics for a single feature.
6259904d3eb6a72ae038ba93
class GoalSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Goal <NEW_LINE> fields = ('id', 'name', 'date_created', 'date_modified') <NEW_LINE> read_only_fields = ('date_created', 'date_modified')
Serializer to map the Model instance into JSON format.
6259904d29b78933be26aade
class ParserErrorsDialog(Gtk.Dialog): <NEW_LINE> <INDENT> def __init__(self, error_logs): <NEW_LINE> <INDENT> GObject.GObject.__init__(self, title='Parser Errors', buttons=(Gtk.STOCK_CLOSE, Gtk.ResponseType.ACCEPT)) <NEW_LINE> self._error_logs = None <NEW_LINE> self.tree_store = Gtk.TreeStore(str) <NEW_LINE> self.update_tree_store(error_logs) <NEW_LINE> column = Gtk.TreeViewColumn('XML Parser Errors by Filename') <NEW_LINE> renderer = Gtk.CellRendererText() <NEW_LINE> column.pack_start(renderer, True) <NEW_LINE> column.add_attribute(renderer, 'text', 0) <NEW_LINE> column.set_sort_column_id(0) <NEW_LINE> self.tree_view = tree_view = Gtk.TreeView(self.tree_store) <NEW_LINE> tree_view.set_enable_search(False) <NEW_LINE> tree_view.set_search_column(-1) <NEW_LINE> tree_view.set_reorderable(False) <NEW_LINE> tree_view.set_headers_visible(False) <NEW_LINE> tree_view.get_selection().set_mode(Gtk.SelectionMode.NONE) <NEW_LINE> tree_view.append_column(column) <NEW_LINE> for row in self.tree_store: <NEW_LINE> <INDENT> tree_view.expand_row(row.path, False) <NEW_LINE> <DEDENT> scrolled_window = Gtk.ScrolledWindow() <NEW_LINE> scrolled_window.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) <NEW_LINE> scrolled_window.add(tree_view) <NEW_LINE> self.vbox.pack_start(scrolled_window, True) <NEW_LINE> self.set_size_request(2*MIN_DIALOG_WIDTH, MIN_DIALOG_HEIGHT) <NEW_LINE> self.show_all() <NEW_LINE> <DEDENT> def update_tree_store(self, error_logs): <NEW_LINE> <INDENT> self.tree_store.clear() <NEW_LINE> self._error_logs = error_logs <NEW_LINE> for filename, errors in six.iteritems(error_logs): <NEW_LINE> <INDENT> parent = self.tree_store.append(None, [str(filename)]) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(filename, 'r') as fp: <NEW_LINE> <INDENT> code = fp.readlines() <NEW_LINE> <DEDENT> <DEDENT> except EnvironmentError: <NEW_LINE> <INDENT> code = None <NEW_LINE> <DEDENT> for error in errors: <NEW_LINE> <INDENT> em = self.tree_store.append(parent, ["Line {e.line}: {e.message}".format(e=error)]) <NEW_LINE> if code: <NEW_LINE> <INDENT> self.tree_store.append(em, ["\n".join( "{} {}{}".format(line, code[line - 1].replace("\t", " ").strip("\n"), " " * 20 + "<!-- ERROR -->" if line == error.line else "") for line in range(error.line - 2, error.line + 3) if 0 < line <= len(code) )]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> response = Gtk.Dialog.run(self) <NEW_LINE> self.destroy() <NEW_LINE> return response == Gtk.ResponseType.ACCEPT
A dialog for viewing parser errors
6259904d76e4537e8c3f09bd
@six.add_metaclass(abc.ABCMeta) <NEW_LINE> class ExtensionDriver(object): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def initialize(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def extension_alias(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_create_network(self, session, data, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_create_subnet(self, session, data, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_create_port(self, session, data, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_update_network(self, session, data, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_update_subnet(self, session, data, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_update_port(self, session, data, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def extend_network_dict(self, session, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def extend_subnet_dict(self, session, result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def extend_port_dict(self, session, result): <NEW_LINE> <INDENT> pass
Define stable abstract interface for ML2 extension drivers. An extension driver extends the core resources implemented by the ML2 plugin with additional attributes. Methods that process create and update operations for these resources validate and persist values for extended attributes supplied through the API. Other methods extend the resource dictionaries returned from the API operations with the values of the extended attributes.
6259904d3cc13d1c6d466b70
class CustomLogger(object): <NEW_LINE> <INDENT> def __init__(self, logfilename): <NEW_LINE> <INDENT> self.logfilename = logfilename <NEW_LINE> self.msgs = [] <NEW_LINE> <DEDENT> def log(self, msg, timestamp=None): <NEW_LINE> <INDENT> if timestamp is None: <NEW_LINE> <INDENT> timestamp = time.time() <NEW_LINE> <DEDENT> self.msgs.append((timestamp, msg)) <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> handled = [] <NEW_LINE> try: <NEW_LINE> <INDENT> fhandler = open(self.logfilename, 'a') <NEW_LINE> <DEDENT> except IOError as open_error: <NEW_LINE> <INDENT> self.log('Failed to open file') <NEW_LINE> raise open_error <NEW_LINE> <DEDENT> for index, entry in enumerate(self.msgs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fhandler.write(str(entry) + '\n') <NEW_LINE> handled.append(index) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> self.log('Could not write to file') <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> fhandler.close() <NEW_LINE> for index in handled[::-1]: <NEW_LINE> <INDENT> del self.msgs[index]
Logs information into file. Attributes: close(): closes program
6259904d1f037a2d8b9e5288
class PasteComment(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'paste_comments' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) <NEW_LINE> paste_id = db.Column(db.Integer, db.ForeignKey('pastes.id'), nullable=False) <NEW_LINE> content = db.Column(db.Text, nullable=False) <NEW_LINE> created_time = db.Column(db.DateTime, nullable=False) <NEW_LINE> modified_time = db.Column(db.DateTime, nullable=False) <NEW_LINE> user = db.relationship(User, backref=db.backref('paste_comments')) <NEW_LINE> def __init__(self, user_id, paste_id, content): <NEW_LINE> <INDENT> self.user_id = user_id <NEW_LINE> self.paste_id = paste_id <NEW_LINE> self.content = content <NEW_LINE> self.created_time = self.modified_time = datetime.now() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<PasteComment %s>" % self.id
评论表
6259904d96565a6dacd2d9a5
class NetworkRuleSet(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'bypass': {'key': 'bypass', 'type': 'str'}, 'default_action': {'key': 'defaultAction', 'type': 'str'}, 'ip_rules': {'key': 'ipRules', 'type': '[IPRule]'}, 'virtual_network_rules': {'key': 'virtualNetworkRules', 'type': '[VirtualNetworkRule]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(NetworkRuleSet, self).__init__(**kwargs) <NEW_LINE> self.bypass = kwargs.get('bypass', None) <NEW_LINE> self.default_action = kwargs.get('default_action', None) <NEW_LINE> self.ip_rules = kwargs.get('ip_rules', None) <NEW_LINE> self.virtual_network_rules = kwargs.get('virtual_network_rules', None)
A set of rules governing the network accessibility of a vault. :param bypass: Tells what traffic can bypass network rules. This can be 'AzureServices' or 'None'. If not specified the default is 'AzureServices'. Possible values include: "AzureServices", "None". :type bypass: str or ~azure.mgmt.keyvault.v2018_02_14.models.NetworkRuleBypassOptions :param default_action: The default action when no rule from ipRules and from virtualNetworkRules match. This is only used after the bypass property has been evaluated. Possible values include: "Allow", "Deny". :type default_action: str or ~azure.mgmt.keyvault.v2018_02_14.models.NetworkRuleAction :param ip_rules: The list of IP address rules. :type ip_rules: list[~azure.mgmt.keyvault.v2018_02_14.models.IPRule] :param virtual_network_rules: The list of virtual network rules. :type virtual_network_rules: list[~azure.mgmt.keyvault.v2018_02_14.models.VirtualNetworkRule]
6259904d50485f2cf55dc3c4
class ChatForm(Form): <NEW_LINE> <INDENT> message = TextField(label="Chat:", validators=[Length(min=1, max=MAX_CHAT)])
User enters a chat message
6259904d8a43f66fc4bf35cf
class RetrieveHGMModel(SaveandRetrieveModel): <NEW_LINE> <INDENT> def __init__(self, filename=None): <NEW_LINE> <INDENT> SaveandRetrieveModel.__init__(self, filename=filename) <NEW_LINE> <DEDENT> def GetModel(self): <NEW_LINE> <INDENT> if self._filename is None: <NEW_LINE> <INDENT> raise Exception("Please enter the filename") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> model = sumpf.modules.SignalFile(filename=self._filename, file_format=self._file_format).GetSignal() <NEW_LINE> label = model.GetLabels()[0] <NEW_LINE> nonlinear_functions, aliasingcomp, aliasingcomp_loc = decode_label(label=label) <NEW_LINE> filter_kernels = [] <NEW_LINE> for i in range(len(model.GetChannels())): <NEW_LINE> <INDENT> kernel = sumpf.modules.SplitSignal(data=model, channels=[i]).GetOutput() <NEW_LINE> filter_kernels.append(kernel) <NEW_LINE> <DEDENT> model = nlsp.HammersteinGroupModel(nonlinear_functions=nonlinear_functions, filter_impulseresponses=filter_kernels, aliasing_compensation=aliasingcomp(), downsampling_position=aliasingcomp_loc) <NEW_LINE> return model
Retrieve the model from a specific file location.
6259904d07f4c71912bb086d
class QNetwork(nn.Module): <NEW_LINE> <INDENT> def __init__(self, state_size, action_size, hidden_layers, seed=42): <NEW_LINE> <INDENT> super(QNetwork, self).__init__() <NEW_LINE> self.seed = torch.manual_seed(seed) <NEW_LINE> layer_sizes = [state_size] + hidden_layers <NEW_LINE> layer_dims = zip(layer_sizes[:-1], layer_sizes[1:]) <NEW_LINE> self.hidden_layers = nn.ModuleList([nn.Linear(h1, h2) for h1, h2 in layer_dims]) <NEW_LINE> self.output = nn.Linear(layer_sizes[-1], action_size) <NEW_LINE> <DEDENT> def forward(self, state): <NEW_LINE> <INDENT> x = state <NEW_LINE> for layer in self.hidden_layers: <NEW_LINE> <INDENT> x = F.leaky_relu(layer(x)) <NEW_LINE> <DEDENT> output = self.output(x) <NEW_LINE> return output
Actor (Policy) Model.
6259904dd99f1b3c44d06ad2
class ScheduleQueryHandler(base.BaseHandler): <NEW_LINE> <INDENT> @access_control.OwnerRestricted <NEW_LINE> @access_control.ValidXsrfTokenRequired <NEW_LINE> @access_control.ActiveGaSuperProxyUser <NEW_LINE> def post(self): <NEW_LINE> <INDENT> query_id = self.request.get('query_id') <NEW_LINE> api_query = query_helper.GetApiQuery(query_id) <NEW_LINE> if api_query: <NEW_LINE> <INDENT> schedule_helper.SetApiQueryScheduleStatus(api_query) <NEW_LINE> schedule_helper.ScheduleApiQuery(api_query, randomize=True, countdown=0) <NEW_LINE> api_query_links = template_helper.GetLinksForTemplate( api_query, self.request.host_url) <NEW_LINE> self.redirect(api_query_links.get('manage_link', '/')) <NEW_LINE> return <NEW_LINE> <DEDENT> self.redirect(co.LINKS['owner_index'])
Handles the scheduling of API Queries. Starting and stopping.
6259904d009cb60464d02971
class PopupBreadcrumbTestCase(PopupBaseTestCase): <NEW_LINE> <INDENT> pat = ('http://nohost/plone%%srefbrowser_popup?fieldName=%s&' 'fieldRealName=%s&at_url=/plone/layer1/layer2/ref') <NEW_LINE> def test_breadcrumbs(self): <NEW_LINE> <INDENT> fieldname = 'multiRef3' <NEW_LINE> self.request.set('at_url', '/plone/layer1/layer2/ref') <NEW_LINE> self.request.set('fieldName', fieldname) <NEW_LINE> self.request.set('fieldRealName', fieldname) <NEW_LINE> popup = self._getPopup() <NEW_LINE> bc = popup.breadcrumbs() <NEW_LINE> path = '' <NEW_LINE> pat = self.pat % (fieldname, fieldname) <NEW_LINE> for compare, bc in zip([('', 'Home'), ('layer1', 'Layer1'), ('layer2', 'Layer2'), ('ref', 'ref')], bc): <NEW_LINE> <INDENT> path += compare[0] + '/' <NEW_LINE> assert bc['absolute_url'] == pat % path <NEW_LINE> assert bc['Title'] == compare[1] <NEW_LINE> <DEDENT> <DEDENT> def test_startup(self): <NEW_LINE> <INDENT> fieldname = 'multiRef3' <NEW_LINE> self.request.set('at_url', '/plone/layer1/layer2/ref') <NEW_LINE> self.request.set('fieldName', fieldname) <NEW_LINE> self.request.set('fieldRealName', fieldname) <NEW_LINE> popup = self._getPopup(obj=self.portal.news) <NEW_LINE> bc = popup.breadcrumbs() <NEW_LINE> self.assertEqual(len(bc), 2) <NEW_LINE> self.assertEqual(bc[0]['Title'], 'Home') <NEW_LINE> self.assertEqual(bc[1]['Title'], 'News') <NEW_LINE> <DEDENT> def test_restrictedbrowsing(self): <NEW_LINE> <INDENT> fieldname = 'multiRef3' <NEW_LINE> self.request.set('at_url', '/plone/layer1/layer2/ref') <NEW_LINE> self.request.set('fieldName', fieldname) <NEW_LINE> self.request.set('fieldRealName', fieldname) <NEW_LINE> widget = self.folder.ref.getField(fieldname).widget <NEW_LINE> widget.restrict_browsing_to_startup_directory = 1 <NEW_LINE> popup = self._getPopup(obj=self.portal.layer1.layer2) <NEW_LINE> widget.startup_directory = 'layer1/layer2' <NEW_LINE> bc = popup.breadcrumbs() <NEW_LINE> self.assertEqual(len(bc), 1) <NEW_LINE> self.assertEqual(bc[0]['Title'], 'Layer2') <NEW_LINE> widget.restrict_browsing_to_startup_directory = 0 <NEW_LINE> <DEDENT> def test_isNotSelf(self): <NEW_LINE> <INDENT> catalog = getToolByName(self.portal, 'portal_catalog') <NEW_LINE> fieldname = 'multiRef3' <NEW_LINE> self.request.set('at_url', '/plone/layer1/layer2/ref') <NEW_LINE> self.request.set('fieldName', fieldname) <NEW_LINE> self.request.set('fieldRealName', fieldname) <NEW_LINE> clip = self.folder.manage_copyObjects('ref') <NEW_LINE> self.folder.manage_pasteObjects(clip) <NEW_LINE> copy = self.folder['copy_of_ref'] <NEW_LINE> copy.reindexObject() <NEW_LINE> refbrain = catalog(id='ref')[0] <NEW_LINE> copybrain = catalog(id='copy_of_ref')[0] <NEW_LINE> popup = self._getPopup() <NEW_LINE> assert popup.isNotSelf(copybrain) == True <NEW_LINE> assert popup.isNotSelf(refbrain) == False
Test the popup breadcrumbs
6259904d26068e7796d4dd7d
class HPDaPhyDrvMap(HPHardDiskMap): <NEW_LINE> <INDENT> maptype = "HPDaPhyDrvMap" <NEW_LINE> modname = "ZenPacks.community.HPMon.cpqDaPhyDrv" <NEW_LINE> snmpGetTableMaps = ( GetTableMap('cpqDaPhyDrvTable', '.1.3.6.1.4.1.232.3.2.5.1.1', { '.3': 'description', '.4': 'FWRev', '.5': 'bay', '.6': 'status', '.45': 'size', '.48': 'hotPlug', '.51': 'serialNumber', '.59': 'rpm', '.60': 'diskType', } ), ) <NEW_LINE> diskTypes = {1: 'other', 2: 'SCSI', 3: 'SATA', 4: 'SAS', } <NEW_LINE> def process(self, device, results, log): <NEW_LINE> <INDENT> log.info('processing %s for device %s', self.name(), device.id) <NEW_LINE> getdata, tabledata = results <NEW_LINE> if not device.id in HPHardDiskMap.oms: <NEW_LINE> <INDENT> HPHardDiskMap.oms[device.id] = [] <NEW_LINE> <DEDENT> for oid, disk in tabledata.get('cpqDaPhyDrvTable', {}).iteritems(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> om = self.objectMap(disk) <NEW_LINE> om.snmpindex = oid.strip('.') <NEW_LINE> om.id = self.prepId("HardDisk%s"%om.snmpindex).replace('.','_') <NEW_LINE> if not getattr(om,'description',''):om.description='Unknown Disk' <NEW_LINE> om.setProductKey = MultiArgs(om.description, om.description.split()[0]) <NEW_LINE> om.diskType = self.diskTypes.get(getattr(om, 'diskType', 1), '%s (%d)' %(self.diskTypes[1], om.diskType)) <NEW_LINE> om.rpm = self.rpms.get(getattr(om,'rpm',1), getattr(om,'rpm',1)) <NEW_LINE> om.size = getattr(om, 'size', 0) * 1048576 <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> HPHardDiskMap.oms[device.id].append(om) <NEW_LINE> <DEDENT> return
Map HP/Compaq insight manager DA Hard Disk tables to model.
6259904d16aa5153ce401927
class Place(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> description = models.TextField(null=True) <NEW_LINE> address = models.CharField(max_length=255, null=True) <NEW_LINE> zipcode = models.CharField(max_length=10, blank=True, help_text="Code postal / Zipcode") <NEW_LINE> city = models.CharField(max_length=50, blank=True) <NEW_LINE> country = CountryField(default="") <NEW_LINE> latitude = models.DecimalField(max_digits=9, decimal_places=6, null=True, blank=True) <NEW_LINE> longitude = models.DecimalField(max_digits=9, decimal_places=6, null=True, blank=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'{0} ({1})'.format(self.name, selft.city)
Some place belonging to an organization
6259904dd6c5a102081e3557
class MongoInstrumentedAttribute(object): <NEW_LINE> <INDENT> def __init__(self, attr, db, *args): <NEW_LINE> <INDENT> self.__attr = attr <NEW_LINE> self.__db = db <NEW_LINE> if args: <NEW_LINE> <INDENT> self.original_class_value, = args <NEW_LINE> <DEDENT> <DEDENT> def __get__(self, entity, entity_class): <NEW_LINE> <INDENT> if not entity is None: <NEW_LINE> <INDENT> ref_val = entity.__mongo_refs__[self.__attr.entity_attr] <NEW_LINE> attr_entity_class = get_entity_class(self.__attr.attr_type) <NEW_LINE> if isinstance(ref_val, list): <NEW_LINE> <INDENT> value = [transform_outgoing(attr_entity_class, self.__db.dereference(el)) for el in ref_val] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = transform_outgoing(attr_entity_class, self.__db.dereference(ref_val)) <NEW_LINE> <DEDENT> setattr(entity, self.__attr.entity_attr, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = self <NEW_LINE> <DEDENT> return value
Lazy resolution of relation attributes through Mongo DB refs.
6259904dd53ae8145f91989c
class Minimizable(sgqlc.types.Interface): <NEW_LINE> <INDENT> __schema__ = github_schema <NEW_LINE> __field_names__ = ('is_minimized', 'minimized_reason', 'viewer_can_minimize') <NEW_LINE> is_minimized = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='isMinimized') <NEW_LINE> minimized_reason = sgqlc.types.Field(String, graphql_name='minimizedReason') <NEW_LINE> viewer_can_minimize = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='viewerCanMinimize')
Entities that can be minimized.
6259904df7d966606f7492d5
class BaseQuestionnaireTests(actions.TestBase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(BaseQuestionnaireTests, self).setUp() <NEW_LINE> actions.login(ADMIN_EMAIL, is_admin=True) <NEW_LINE> self.base = '/' + COURSE_NAME <NEW_LINE> test_course = actions.simple_add_course( COURSE_NAME, ADMIN_EMAIL, 'Questionnaire Test Course') <NEW_LINE> self.old_namespace = namespace_manager.get_namespace() <NEW_LINE> namespace_manager.set_namespace('ns_%s' % COURSE_NAME) <NEW_LINE> self.course = courses.Course(None, test_course) <NEW_LINE> test_unit = self.course.add_unit() <NEW_LINE> test_unit.availability = courses.AVAILABILITY_AVAILABLE <NEW_LINE> test_lesson = self.course.add_lesson(test_unit) <NEW_LINE> test_lesson.availability = courses.AVAILABILITY_AVAILABLE <NEW_LINE> test_lesson.title = 'This is a lesson that contains a form.' <NEW_LINE> test_lesson.objectives = '%s\n%s' % (TEST_FORM_HTML, QUESTIONNAIRE_TAG) <NEW_LINE> self.unit_id = test_unit.unit_id <NEW_LINE> self.lesson_id = test_lesson.lesson_id <NEW_LINE> self.course.save() <NEW_LINE> actions.logout() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> del sites.Registry.test_overrides[sites.GCB_COURSES_CONFIG.name] <NEW_LINE> namespace_manager.set_namespace(self.old_namespace) <NEW_LINE> super(BaseQuestionnaireTests, self).tearDown() <NEW_LINE> <DEDENT> def get_button(self): <NEW_LINE> <INDENT> dom = self.parse_html_string(self.get('unit?unit=%s&lesson=%s' % ( self.unit_id, self.lesson_id)).body) <NEW_LINE> return dom.find('.//button[@class="gcb-button questionnaire-button"]') <NEW_LINE> <DEDENT> def register(self): <NEW_LINE> <INDENT> user = actions.login(STUDENT_EMAIL, is_admin=False) <NEW_LINE> actions.register(self, STUDENT_NAME) <NEW_LINE> return models.Student.get_enrolled_student_by_user(user)
Tests for REST endpoint and tag renderer.
6259904d435de62698e9d242
class ElementSin(Node): <NEW_LINE> <INDENT> result_types = { ('Matrix',): Matrix, ('Vector',): Vector, ('Scalar',): Scalar } <NEW_LINE> operation_node_type = _v.operation_node_type.OPERATION_UNARY_SIN_TYPE <NEW_LINE> def _node_init(self): <NEW_LINE> <INDENT> self.shape = self.operands[0].shape
Represent the elementwise computation of ``sin`` on an object.
6259904d07d97122c42180de
class Guard(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.id = str(random.random())+str(time.time()) <NEW_LINE> <DEDENT> def post_read(self, req): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def post_write(self, req): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def remove_read(self, req): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def remove_write(self, req): <NEW_LINE> <INDENT> pass
The empty interface of a guard.
6259904d8e71fb1e983bcf00
class StrSQLCompiler(SQLCompiler): <NEW_LINE> <INDENT> def _fallback_column_name(self, column): <NEW_LINE> <INDENT> return "<name unknown>" <NEW_LINE> <DEDENT> def visit_getitem_binary(self, binary, operator, **kw): <NEW_LINE> <INDENT> return "%s[%s]" % ( self.process(binary.left, **kw), self.process(binary.right, **kw), ) <NEW_LINE> <DEDENT> def visit_json_getitem_op_binary(self, binary, operator, **kw): <NEW_LINE> <INDENT> return self.visit_getitem_binary(binary, operator, **kw) <NEW_LINE> <DEDENT> def visit_json_path_getitem_op_binary(self, binary, operator, **kw): <NEW_LINE> <INDENT> return self.visit_getitem_binary(binary, operator, **kw) <NEW_LINE> <DEDENT> def visit_sequence(self, seq, **kw): <NEW_LINE> <INDENT> return "<next sequence value: %s>" % self.preparer.format_sequence(seq) <NEW_LINE> <DEDENT> def returning_clause(self, stmt, returning_cols): <NEW_LINE> <INDENT> columns = [ self._label_select_column(None, c, True, False, {}) for c in elements._select_iterables(returning_cols) ] <NEW_LINE> return "RETURNING " + ", ".join(columns) <NEW_LINE> <DEDENT> def update_from_clause( self, update_stmt, from_table, extra_froms, from_hints, **kw ): <NEW_LINE> <INDENT> return "FROM " + ", ".join( t._compiler_dispatch(self, asfrom=True, fromhints=from_hints, **kw) for t in extra_froms ) <NEW_LINE> <DEDENT> def delete_extra_from_clause( self, update_stmt, from_table, extra_froms, from_hints, **kw ): <NEW_LINE> <INDENT> return ", " + ", ".join( t._compiler_dispatch(self, asfrom=True, fromhints=from_hints, **kw) for t in extra_froms )
A :class:`.SQLCompiler` subclass which allows a small selection of non-standard SQL features to render into a string value. The :class:`.StrSQLCompiler` is invoked whenever a Core expression element is directly stringified without calling upon the :meth:`_expression.ClauseElement.compile` method. It can render a limited set of non-standard SQL constructs to assist in basic stringification, however for more substantial custom or dialect-specific SQL constructs, it will be necessary to make use of :meth:`_expression.ClauseElement.compile` directly. .. seealso:: :ref:`faq_sql_expression_string`
6259904dcad5886f8bdc5a9b
class Meta: <NEW_LINE> <INDENT> interface = output.IOutput <NEW_LINE> label = 'null'
Handler meta-data
6259904d4428ac0f6e65996d
class SequenceIterator(object): <NEW_LINE> <INDENT> def __init__(self, handle, alphabet=generic_alphabet): <NEW_LINE> <INDENT> self.handle = handle <NEW_LINE> self.alphabet = alphabet <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> raise NotImplementedError("The subclass should implement the __next__ method.") <NEW_LINE> <DEDENT> if sys.version_info[0] < 3: <NEW_LINE> <INDENT> def next(self): <NEW_LINE> <INDENT> return self.__next__() <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.__next__, None)
Base class for building SeqRecord iterators. You should write a __next__ method to return SeqRecord objects. You may wish to redefine the __init__ method as well.
6259904dec188e330fdf9cda
class DictString(object): <NEW_LINE> <INDENT> def dict2data(self, dict): <NEW_LINE> <INDENT> return '\n'.join(['%s\t%s' % (key,value) for (key,value) in dict.items()]) <NEW_LINE> <DEDENT> def data2dict(self, string): <NEW_LINE> <INDENT> return dict([entry.split('\t') for entry in string.split('\n')])
Dictionary to data file strukture (string): url1 url2 url1 url2 ... and vice versa
6259904da8ecb0332587264d
class StrPacker(BytesPacker): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def pack_pk(user_pk): <NEW_LINE> <INDENT> user_pk = user_pk.encode() <NEW_LINE> length = len(user_pk) <NEW_LINE> if length > 255: <NEW_LINE> <INDENT> raise ValueError("Primary key is too large (%d UTF-8 bytes)" % length) <NEW_LINE> <DEDENT> return bytes([length]) + user_pk <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def unpack_pk(data): <NEW_LINE> <INDENT> length = data[0] <NEW_LINE> return data[1 : length + 1].decode(), data[length + 1 :]
Generic packer for strings, from 0 to 255 UTF-8 encoded bytes.
6259904dd486a94d0ba2d400
class AgeFilter(Q): <NEW_LINE> <INDENT> def __init__(self, field1_name, field2_name, operator, value): <NEW_LINE> <INDENT> self._field1_name = field1_name <NEW_LINE> self._field2_name = field2_name <NEW_LINE> self._operator = operator <NEW_LINE> self._value = value <NEW_LINE> super(AgeFilter, self).__init__() <NEW_LINE> <DEDENT> def add_to_query(self, query, used_aliases=None, negate=False): <NEW_LINE> <INDENT> alias = query.get_initial_alias() <NEW_LINE> opts = query.get_meta() <NEW_LINE> field1_name = self._field1_name <NEW_LINE> field2_name = self._field2_name <NEW_LINE> operator = self._operator <NEW_LINE> value = self._value <NEW_LINE> field1_parts = field1_name.split(LOOKUP_SEP) <NEW_LINE> field2_parts = field2_name.split(LOOKUP_SEP) <NEW_LINE> field1, _, _, joins1, _ = query.setup_joins(field1_parts, opts, alias) <NEW_LINE> field2, _, _, joins2, _ = query.setup_joins(field2_parts, opts, alias) <NEW_LINE> field1_alias = joins1[-1] <NEW_LINE> field2_alias = joins2[-1] <NEW_LINE> constraint = AgeConstraintNode( operator, value, field1.column, field1_alias, field2.column, field2_alias, negate, ) <NEW_LINE> query.where.add(constraint, AND)
Q subclass for adding age based queries Arguments: field1_name (will follow relationships) field2_name (will follow relationships) operator (should include value placeholder, e.g. "< %s" value Usage: queryset = queryset.filter( AgeFilter( 'related__model__field', 'other__field', '> %s' 18, ) )
6259904d0fa83653e46f631a
class CorrectDescriptions(QtWidgets.QDialog): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> if parent is None: <NEW_LINE> <INDENT> self.showprocesslog = print <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.showprocesslog = parent.showprocesslog <NEW_LINE> <DEDENT> self.indata = {} <NEW_LINE> self.outdata = {} <NEW_LINE> self.parent = parent <NEW_LINE> idir = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> tfile = os.path.join(idir, r'descriptions.txt') <NEW_LINE> self.textfile = QtWidgets.QLineEdit(tfile) <NEW_LINE> self.setupui() <NEW_LINE> <DEDENT> def setupui(self): <NEW_LINE> <INDENT> gridlayout_main = QtWidgets.QGridLayout(self) <NEW_LINE> buttonbox = QtWidgets.QDialogButtonBox() <NEW_LINE> pb_textfile = QtWidgets.QPushButton('Load Description List') <NEW_LINE> buttonbox.setOrientation(QtCore.Qt.Horizontal) <NEW_LINE> buttonbox.setCenterButtons(True) <NEW_LINE> buttonbox.setStandardButtons(buttonbox.Cancel | buttonbox.Ok) <NEW_LINE> self.setWindowTitle(r'Correct Descriptions') <NEW_LINE> gridlayout_main.addWidget(self.textfile, 0, 0, 1, 1) <NEW_LINE> gridlayout_main.addWidget(pb_textfile, 0, 1, 1, 1) <NEW_LINE> gridlayout_main.addWidget(buttonbox, 5, 1, 1, 3) <NEW_LINE> buttonbox.accepted.connect(self.accept) <NEW_LINE> buttonbox.rejected.connect(self.reject) <NEW_LINE> pb_textfile.pressed.connect(self.get_textfile) <NEW_LINE> <DEDENT> def get_textfile(self, filename=''): <NEW_LINE> <INDENT> ext = ('Description list (*.txt)') <NEW_LINE> if filename == '': <NEW_LINE> <INDENT> filename, _ = QtWidgets.QFileDialog.getOpenFileName( self.parent, 'Open File', '.', ext) <NEW_LINE> if filename == '': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> self.textfile.setText(filename) <NEW_LINE> <DEDENT> def settings(self, nodialog=False): <NEW_LINE> <INDENT> if 'Seis' not in self.indata: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> tmp = self.exec_() <NEW_LINE> if tmp != 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.acceptall() <NEW_LINE> return True <NEW_LINE> <DEDENT> def loadproj(self, projdata): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def saveproj(self): <NEW_LINE> <INDENT> projdata = {} <NEW_LINE> return projdata <NEW_LINE> <DEDENT> def acceptall(self): <NEW_LINE> <INDENT> filename = self.textfile.text() <NEW_LINE> with open(filename, encoding='utf-8') as fno: <NEW_LINE> <INDENT> tmp = fno.read() <NEW_LINE> <DEDENT> masterlist = tmp.split('\n') <NEW_LINE> data = self.indata['Seis'] <NEW_LINE> nomatch = [] <NEW_LINE> correction = [] <NEW_LINE> for i in data: <NEW_LINE> <INDENT> if '3' not in i: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> text = i['3'].region <NEW_LINE> cmatch = difflib.get_close_matches(text, masterlist, 1, cutoff=0.7) <NEW_LINE> if cmatch: <NEW_LINE> <INDENT> cmatch = cmatch[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> nomatch.append(text) <NEW_LINE> continue <NEW_LINE> <DEDENT> if cmatch != text: <NEW_LINE> <INDENT> correction.append(text+' to '+cmatch) <NEW_LINE> i['3'].region = cmatch <NEW_LINE> <DEDENT> <DEDENT> self.outdata['Seis'] = data
Correct SEISAN descriptions. This compares the descriptions found in SEISAN type 3 lines to a custom list. Attributes ---------- parent : parent reference to the parent routine indata : dictionary dictionary of input datasets outdata : dictionary dictionary of output datasets
6259904d0a50d4780f7067db
class OffersAccept(View): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get(request, pk): <NEW_LINE> <INDENT> offer = get_object_or_404(Offer, pk=pk) <NEW_LINE> if ( request.user.is_authenticated() and request.user.userprofile.is_administrator ): <NEW_LINE> <INDENT> offer.publish() <NEW_LINE> messages.info(request, "Oferta została zaakceptowana.") <NEW_LINE> return redirect(settings.ANGULAR_ROOT) <NEW_LINE> <DEDENT> return HttpResponseForbidden()
Class view responsible for acceptance of offers
6259904d23849d37ff8524fa
class NIC(models.Model): <NEW_LINE> <INDENT> name = models.CharField('网卡名称', max_length=128) <NEW_LINE> hwaddr = models.CharField('网卡mac地址', max_length=64) <NEW_LINE> netmask = models.CharField(max_length=64) <NEW_LINE> ipaddrs = models.CharField('ip地址', max_length=256) <NEW_LINE> up = models.BooleanField(default=False) <NEW_LINE> server = models.ForeignKey('Server', related_name='nic_list',on_delete=1) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "网卡表" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
网卡信息
6259904d507cdc57c63a61dc
class RoofSlope(BSElement): <NEW_LINE> <INDENT> element_type = "xs:string" <NEW_LINE> element_enumerations = [ "Flat", "Sloped", "Greater than 2 to 12", "Less than 2 to 12", "Other", "Unknown", ]
A descriptive value for tilt, when an exact numeric angle is not known.
6259904d45492302aabfd910
class ShowLldpTraffic(ShowLldpTrafficSchema): <NEW_LINE> <INDENT> cli_command = 'show lldp traffic' <NEW_LINE> exclude = ['frame_in' , 'frame_out', 'tlv_discard', 'tlv_unknown'] <NEW_LINE> def cli(self,output=None): <NEW_LINE> <INDENT> if output is None: <NEW_LINE> <INDENT> out = self.device.execute(self.cli_command) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = output <NEW_LINE> <DEDENT> ret_dict = {} <NEW_LINE> p1 = re.compile(r'^(?P<pattern>[\w\s]+): +(?P<value>\d+)$') <NEW_LINE> for line in out.splitlines(): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> m = p1.match(line) <NEW_LINE> if m: <NEW_LINE> <INDENT> group = m.groupdict() <NEW_LINE> if re.search('(Total +frames +out)', group['pattern']): <NEW_LINE> <INDENT> key = 'frame_out' <NEW_LINE> <DEDENT> elif re.search('(Total +entries +aged)', group['pattern']): <NEW_LINE> <INDENT> key = 'entries_aged_out' <NEW_LINE> <DEDENT> elif re.search('(Total +frames +in)', group['pattern']): <NEW_LINE> <INDENT> key = 'frame_in' <NEW_LINE> <DEDENT> elif re.search('(Total +frames +received +in +error)', group['pattern']): <NEW_LINE> <INDENT> key = 'frame_error_in' <NEW_LINE> <DEDENT> elif re.search('(Total +frames +discarded)', group['pattern']): <NEW_LINE> <INDENT> key = 'frame_discard' <NEW_LINE> <DEDENT> elif re.search('(Total +TLVs +discarded)', group['pattern']): <NEW_LINE> <INDENT> key = 'tlv_discard' <NEW_LINE> <DEDENT> elif re.search('(Total +TLVs +unrecognized)', group['pattern']): <NEW_LINE> <INDENT> key = 'tlv_unknown' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ret_dict[key] = int(group['value']) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> return ret_dict
Parser for show lldp traffic
6259904d462c4b4f79dbce3d
class Get_ip(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Get_ip, self).__init__() <NEW_LINE> self.url='http://www.xicidaili.com/nn/' <NEW_LINE> self.headers = { 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5', 'Accept-Encoding': 'gzip, deflate', 'Connection': 'keep-alive'} <NEW_LINE> self.session=requests.session() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> html=self.session.get(self.url,headers=self.headers).text <NEW_LINE> table=BeautifulSoup(html).find('table',attrs={'id':'ip_list'}).find_all('tr') <NEW_LINE> http_ips=[] <NEW_LINE> for item in table[1:]: <NEW_LINE> <INDENT> lists=item.find_all('td') <NEW_LINE> ip={'ip':'','port':''} <NEW_LINE> if lists[6].get_text()=='HTTP': <NEW_LINE> <INDENT> ip['ip']=lists[2].get_text() <NEW_LINE> ip['port']=lists[3].get_text() <NEW_LINE> http_ips.append(ip) <NEW_LINE> <DEDENT> <DEDENT> return http_ips
docstring for Get_ip
6259904d15baa723494633c9
class MapOidCbor(_types._OidKeysMixin, _types._CborValuesMixin, PersistentMap): <NEW_LINE> <INDENT> def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): <NEW_LINE> <INDENT> PersistentMap.__init__(self, slot=slot, compress=compress) <NEW_LINE> _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal)
Persistent map with OID (uint64) keys and CBOR values.
6259904dec188e330fdf9cdc
class NoHostsAvailable(RuntimeError): <NEW_LINE> <INDENT> pass
Exception raised when no hosts specified in KerberosConnectionPool are available.
6259904dd6c5a102081e355a
class NeosLibvirtVM(object): <NEW_LINE> <INDENT> def __init__(self, instance): <NEW_LINE> <INDENT> self._instance = instance <NEW_LINE> self.name = self._instance.name() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{0} ({1}): {2}>".format(self.__class__.__name__, self.status, self.name) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> return not bool(self._instance.create()) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> return not bool(self._instance.destroy()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> if bool(self._instance.state() == [1, 1]): <NEW_LINE> <INDENT> return 'running' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'stopped' <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def snapshots(self): <NEW_LINE> <INDENT> return len(self._instance.listAllSnapshots())
Initialize libvirt instance.
6259904dd53ae8145f91989f
class LessVariableListAPIView(LessVariableMixin, generics.ListAPIView): <NEW_LINE> <INDENT> serializer_class = LessVariableSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = LessVariable.objects.filter( account=self.account, cssfile=self.get_cssfile()) <NEW_LINE> return queryset
Lists a website css variables **Examples .. code-block:: http GET /api/themes/sitecss/variables/ HTTP/1.1 responds .. code-block:: json { "count": 1, "previous": null, "next": null, "results": [{ "name": "primary-color", "value": "#ff0000", "created_at": "20200530T00:00:00Z", "updated_at": "20200530T00:00:00Z" }] }
6259904d8a43f66fc4bf35d5
class Unique(model.Model): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def create(cls, value): <NEW_LINE> <INDENT> entity = cls(key=model.Key(cls, value)) <NEW_LINE> txn = lambda: entity.put() if not entity.key.get() else None <NEW_LINE> return model.transaction(txn) is not None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_multi(cls, values): <NEW_LINE> <INDENT> keys = [model.Key(cls, value) for value in values] <NEW_LINE> entities = [cls(key=key) for key in keys] <NEW_LINE> func = lambda e: e.put() if not e.key.get() else None <NEW_LINE> created = [model.transaction(lambda: func(e)) for e in entities] <NEW_LINE> if created != keys: <NEW_LINE> <INDENT> model.delete_multi(k for k in created if k) <NEW_LINE> return False, [k.id() for k in keys if k not in created] <NEW_LINE> <DEDENT> return True, [] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def delete_multi(cls, values): <NEW_LINE> <INDENT> return model.delete_multi(model.Key(cls, v) for v in values)
A model to store unique values. The only purpose of this model is to "reserve" values that must be unique within a given scope, as a workaround because datastore doesn't support the concept of uniqueness for entity properties. For example, suppose we have a model `User` with three properties that must be unique across a given group: `username`, `auth_id` and `email`:: class User(model.Model): username = model.StringProperty(required=True) auth_id = model.StringProperty(required=True) email = model.StringProperty(required=True) To ensure property uniqueness when creating a new `User`, we first create `Unique` records for those properties, and if everything goes well we can save the new `User` record:: @classmethod def create_user(cls, username, auth_id, email): # Assemble the unique values for a given class and attribute scope. uniques = [ 'User.username.%s' % username, 'User.auth_id.%s' % auth_id, 'User.email.%s' % email, ] # Create the unique username, auth_id and email. success, existing = Unique.create_multi(uniques) if success: # The unique values were created, so we can save the user. user = User(username=username, auth_id=auth_id, email=email) user.put() return user else: # At least one of the values is not unique. # Make a list of the property names that failed. props = [name.split('.', 2)[1] for name in uniques] raise ValueError('Properties %r are not unique.' % props) Based on the idea from http://squeeville.com/2009/01/30/add-a-unique-constraint-to-google-app-engine/
6259904d507cdc57c63a61de
class SshTunnel(object): <NEW_LINE> <INDENT> def __init__(self, host, local_port, remote_port, port_ssh=None, user=None, password=None, private_key=None, private_key_password=None, remote_address=None): <NEW_LINE> <INDENT> ssh_port = 22 <NEW_LINE> if port_ssh is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ssh_port = int(port_ssh) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise TypeError("ssh port should be an integer") <NEW_LINE> <DEDENT> <DEDENT> self.remote_address = "127.0.0.1" if remote_address is None else remote_address <NEW_LINE> self.local_port = local_port <NEW_LINE> self.remote_port = remote_port <NEW_LINE> self.log = logging.getLogger("niav") <NEW_LINE> logging.getLogger("paramiko").setLevel(logging.WARNING) <NEW_LINE> self.tunnel = None <NEW_LINE> self.host = host <NEW_LINE> self.port = ssh_port <NEW_LINE> self.user = "root" if user is None else user <NEW_LINE> self.password = password <NEW_LINE> self.private_key = private_key <NEW_LINE> self.private_key_password = private_key_password <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> if self.tunnel is None: <NEW_LINE> <INDENT> self.log.info("opening tunnel to '%s'" % self.host) <NEW_LINE> self.tunnel = SSHTunnelForwarder( self.host, ssh_username=self.user, ssh_password=self.password, local_bind_address=("127.0.0.1", self.local_port), remote_bind_address=(self.remote_address, self.remote_port), ssh_pkey=self.private_key, ssh_private_key_password=self.private_key_password, ) <NEW_LINE> self.log.info("tunnel to '%s' is initialized" % self.host) <NEW_LINE> <DEDENT> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.tunnel.start() <NEW_LINE> self.local_port = self.tunnel.local_bind_port <NEW_LINE> self.log.info("tunnel to '%s' is started (local bind port: %s)" % (self.host, self.tunnel.local_bind_port)) <NEW_LINE> return self.local_port <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.tunnel.stop() <NEW_LINE> self.log.info("tunnel to '%s' is stopped" % self.host) <NEW_LINE> <DEDENT> def get_local_port(self): <NEW_LINE> <INDENT> return self.local_port
SSH tunnel
6259904d50485f2cf55dc3cb
class SequentialDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, image_provider, image_indexes, config, stage='test', transforms=ToTensor()): <NEW_LINE> <INDENT> super(SequentialDataset, self).__init__(image_provider, image_indexes, config, stage, transforms=transforms) <NEW_LINE> self.good_tiles = [] <NEW_LINE> self.init_good_tiles() <NEW_LINE> self.keys.update({'sy', 'sx'}) <NEW_LINE> <DEDENT> def init_good_tiles(self): <NEW_LINE> <INDENT> self.good_tiles = [] <NEW_LINE> for im_idx in self.image_indexes: <NEW_LINE> <INDENT> item = self.image_provider[im_idx] <NEW_LINE> positions = self.cropper.cropper_positions(item.image) <NEW_LINE> if self.image_provider.has_alpha: <NEW_LINE> <INDENT> item = self.image_provider[im_idx] <NEW_LINE> alpha_generator = self.cropper.sequential_crops(item.alpha) <NEW_LINE> for idx, alpha in enumerate(alpha_generator): <NEW_LINE> <INDENT> if np.mean(alpha) > 5: <NEW_LINE> <INDENT> self.good_tiles.append((im_idx, *positions[idx])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for pos in positions: <NEW_LINE> <INDENT> self.good_tiles.append((im_idx, *pos)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> if idx >= self.__len__(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> im_idx, sx, sy = self.good_tiles[idx] <NEW_LINE> item = self.image_provider[im_idx] <NEW_LINE> im = self.cropper.crop_image(item.image, sx, sy) <NEW_LINE> im = self.transforms(im) <NEW_LINE> output = dict() <NEW_LINE> output['img_data'] = im <NEW_LINE> output['seg_label'] = segm.astype(np.int) <NEW_LINE> return output <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.good_tiles)
dataset for inference
6259904d8da39b475be0462f
class DataObject: <NEW_LINE> <INDENT> def __init__(self, from_dict=None): <NEW_LINE> <INDENT> _from_dict = from_dict is not None and isinstance(from_dict, dict) <NEW_LINE> self._lock = RLock() <NEW_LINE> self._initialized = False <NEW_LINE> if _from_dict and not self._initialized: <NEW_LINE> <INDENT> for key, val in from_dict.items(): <NEW_LINE> <INDENT> setattr(self, key, val) <NEW_LINE> <DEDENT> self._initialized = True <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> setattr(self, item, None) <NEW_LINE> <DEDENT> def __setattr__(self, attr, value): <NEW_LINE> <INDENT> if '_lock' == attr: <NEW_LINE> <INDENT> super().__setattr__(attr, value) <NEW_LINE> <DEDENT> with self._lock: <NEW_LINE> <INDENT> super().__setattr__(attr, value)
Generic object used to store data/settings as attributes. Args: from_dict (dict): Initialize object using dict.
6259904dcad5886f8bdc5a9d
class Sqlite(Download): <NEW_LINE> <INDENT> ext = 'sqlite' <NEW_LINE> def create(self, req): <NEW_LINE> <INDENT> print('+---------------------------------------------+') <NEW_LINE> print('| This download must be created "by hand".') <NEW_LINE> print('| Make sure a suitable file is available at') <NEW_LINE> print('|', self.abspath(req)) <NEW_LINE> print('| when the app is started.') <NEW_LINE> print('+---------------------------------------------+') <NEW_LINE> return
Generic download - no support for file creation.
6259904d7cff6e4e811b6e79
class ParentsChildsFullDataViewSet(BaseViewSet): <NEW_LINE> <INDENT> permission_code = 'parentschilds' <NEW_LINE> queryset = ParentsChilds.objects.all().select_related('parent', 'child', 'relationship', 'created_by','updated_by') <NEW_LINE> serializer_class = ParentsChildsFullDataSerializer <NEW_LINE> filter_class = ParentsChildsFullDataFilter <NEW_LINE> filter_backends = (OrderingFilter, DjangoFilterBackend)
Parents Childs views full data FILTROS: 'id': ['exact'], 'description':['exact', 'icontains'], 'type_diagnostic':['exact',], 'created_at': ['exact', 'year', 'year__gte', 'year__lte', 'month', 'month__lte', 'month__gte', 'day', 'day__lte', 'day__gte', 'year__in', 'month__in', 'day__in'], 'created_by__username': ['exact', 'icontains'], 'updated_at': ['exact', 'year', 'year__gte', 'year__lte', 'month', 'month__lte', 'month__gte', 'day', 'day__lte', 'day__gte', 'year__in', 'month__in', 'day__in'], 'updated_by__username': ['exact', 'icontains'],
6259904d3c8af77a43b6895d