code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class StructureError(Exception): <NEW_LINE> <INDENT> pass
Represents cases in which an algebraic structure was expected to have a certain property, or be of a certain type, but was not.
6259904e30dc7b76659a0c99
class super_aes(object): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> <DEDENT> def encrypt(self, in_path, out_path): <NEW_LINE> <INDENT> os.system(r".\Script\openssl.exe enc -aes-256-cbc -e -k {} -in {} -out {}".format(self.key, in_path, out_path) ) <NEW_LINE> os.remove(in_path) <NEW_LINE> <DEDENT> def decrypt(self, in_path, out_path): <NEW_LINE> <INDENT> os.system(r".\Script\openssl.exe enc -aes-256-cbc -d -k {} -in {} -out {}".format(self.key, in_path, out_path) ) <NEW_LINE> os.remove(in_path)
AES加密解密类,因为调用的是系统的openssl,如果不存在可以下载它并添加到系统环境变量 或者使用绝对、相对路径来指定具体位置
6259904e0a50d4780f7067ef
class Entry(models.Model): <NEW_LINE> <INDENT> topic = models.ForeignKey(Topic, on_delete=models.CASCADE) <NEW_LINE> text = models.TextField() <NEW_LINE> date_added = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = 'entries' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if len(self.text) > 50: <NEW_LINE> <INDENT> return self.text[:50] + "..." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.text
Something specific learned about a topic. Each entry needs to be associated with a particular topic. This is a many to one relationship because many entries will need to be associated with one Topic
6259904e4428ac0f6e659997
class PrivateIngredientsApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> self.user = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> self.client.force_authenticate(self.user) <NEW_LINE> <DEDENT> def test_retrieve_ingredient_list(self): <NEW_LINE> <INDENT> Ingredient.objects.create(user=self.user, name='Kale') <NEW_LINE> Ingredient.objects.create(user=self.user, name='Salt') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> ingredients = Ingredient.objects.all().order_by('-name') <NEW_LINE> serializer = IngredientSerializer(ingredients, many='True') <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data) <NEW_LINE> <DEDENT> def test_ingredients_limited_to_user(self): <NEW_LINE> <INDENT> user2 = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> Ingredient.objects.create(user=user2, name='Vinegar') <NEW_LINE> ingredient = Ingredient.objects.create(user=self.user, name='Tumeric') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(len(res.data), 1) <NEW_LINE> self.assertEqual(res.data[0]['name'], ingredient.name) <NEW_LINE> <DEDENT> def test_create_ingredient_successful(self): <NEW_LINE> <INDENT> payload = {'name': 'Cabbage'} <NEW_LINE> self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> exists = Ingredient.objects.filter( user=self.user, name=payload['name'] ).exists() <NEW_LINE> self.assertTrue(exists) <NEW_LINE> <DEDENT> def test_create_ingredient_invalid(self): <NEW_LINE> <INDENT> payload = {'name': ''} <NEW_LINE> res = self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
Test the private ingredients API
6259904e82261d6c527308f9
class AttachmentType(Enum): <NEW_LINE> <INDENT> FILE = 1 <NEW_LINE> ITEM = 2 <NEW_LINE> REFERENCE = 3
OData attachment type.
6259904e7d847024c075d838
class ZDT1to3_g: <NEW_LINE> <INDENT> def __init__(self, num_variables): <NEW_LINE> <INDENT> self.num_variables = num_variables <NEW_LINE> <DEDENT> def __call__(self, phenome): <NEW_LINE> <INDENT> n = len(phenome) <NEW_LINE> assert n == self.num_variables <NEW_LINE> temp_sum = 0.0 <NEW_LINE> for i in range(1, n): <NEW_LINE> <INDENT> temp_sum += phenome[i] / float(n - 1) <NEW_LINE> <DEDENT> return 1.0 + (9.0 * temp_sum)
The g function for ZDT1, ZDT2 and ZDT3.
6259904ee76e3b2f99fd9e65
class Repo(object): <NEW_LINE> <INDENT> def __init__(self, repo_uri): <NEW_LINE> <INDENT> self.repo_uri = repo_uri <NEW_LINE> cachedir = os.path.join(__opts__['cachedir'], 'hg_pillar') <NEW_LINE> hash_type = getattr(hashlib, __opts__.get('hash_type', 'md5')) <NEW_LINE> if six.PY2: <NEW_LINE> <INDENT> repo_hash = hash_type(repo_uri).hexdigest() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> repo_hash = hash_type(salt.utils.to_bytes(repo_uri)).hexdigest() <NEW_LINE> <DEDENT> self.working_dir = os.path.join(cachedir, repo_hash) <NEW_LINE> if not os.path.isdir(self.working_dir): <NEW_LINE> <INDENT> self.repo = hglib.clone(repo_uri, self.working_dir) <NEW_LINE> self.repo.open() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.repo = hglib.open(self.working_dir) <NEW_LINE> <DEDENT> <DEDENT> def pull(self): <NEW_LINE> <INDENT> log.debug('Updating hg repo from hg_pillar module (pull)') <NEW_LINE> self.repo.pull() <NEW_LINE> <DEDENT> def update(self, branch='default'): <NEW_LINE> <INDENT> log.debug('Updating hg repo from hg_pillar module (pull)') <NEW_LINE> self.repo.pull() <NEW_LINE> log.debug('Updating hg repo from hg_pillar module (update)') <NEW_LINE> self.repo.update(branch, clean=True) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.repo.close() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> self.close()
Deal with remote hg (mercurial) repository for Pillar
6259904eac7a0e7691f73940
class Plugin(CommandLookupMixin): <NEW_LINE> <INDENT> implements(IEridanusPlugin) <NEW_LINE> name = _NameDescriptor() <NEW_LINE> pluginName = _PluginNameDescriptor() <NEW_LINE> axiomCommands = ()
Simple plugin mixin.
6259904e498bea3a75a58f85
class WarpCTC(chainer.Chain): <NEW_LINE> <INDENT> def __init__(self, odim, eprojs, dropout_rate): <NEW_LINE> <INDENT> super(WarpCTC, self).__init__() <NEW_LINE> self.dropout_rate = dropout_rate <NEW_LINE> self.loss = None <NEW_LINE> with self.init_scope(): <NEW_LINE> <INDENT> self.ctc_lo = L.Linear(eprojs, odim) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, hs, ys): <NEW_LINE> <INDENT> self.loss = None <NEW_LINE> ilens = [x.shape[0] for x in hs] <NEW_LINE> olens = [x.shape[0] for x in ys] <NEW_LINE> y_hat = self.ctc_lo(F.dropout( F.pad_sequence(hs), ratio=self.dropout_rate), n_batch_axes=2) <NEW_LINE> y_hat = y_hat.transpose(1, 0, 2) <NEW_LINE> logging.info(self.__class__.__name__ + ' input lengths: ' + str(ilens)) <NEW_LINE> logging.info(self.__class__.__name__ + ' output lengths: ' + str(olens)) <NEW_LINE> from chainer_ctc.warpctc import ctc as warp_ctc <NEW_LINE> self.loss = warp_ctc(y_hat, ilens, [cuda.to_cpu(l.data) for l in ys])[0] <NEW_LINE> logging.info('ctc loss:' + str(self.loss.data)) <NEW_LINE> return self.loss <NEW_LINE> <DEDENT> def log_softmax(self, hs): <NEW_LINE> <INDENT> y_hat = self.ctc_lo(F.pad_sequence(hs), n_batch_axes=2) <NEW_LINE> return F.log_softmax(y_hat.reshape(-1, y_hat.shape[-1])).reshape(y_hat.shape) <NEW_LINE> <DEDENT> def argmax(self, hs_pad): <NEW_LINE> <INDENT> return F.argmax(self.ctc_lo(F.pad_sequence(hs_pad), n_batch_axes=2), axis=-1)
Chainer implementation of warp-ctc layer. Args: odim (int): The output dimension. eproj (int | None): Dimension of input vector from encoder. dropout_rate (float): Dropout rate.
6259904e45492302aabfd938
class TubPublisher(BaseClient): <NEW_LINE> <INDENT> def __init__(self, config_path='mqtt/brokers.yml', stage='test', debug=False): <NEW_LINE> <INDENT> config = BrokerConfig(BROKER, stage) <NEW_LINE> client_id = config.get_value(BROKER, stage, KEY_CLIENT_ID) <NEW_LINE> host = config.get_value(BROKER, stage, KEY_HOST) <NEW_LINE> port = config.get_value(BROKER, stage, KEY_PORT) <NEW_LINE> if port is not None: <NEW_LINE> <INDENT> port = int(port) <NEW_LINE> <DEDENT> user = config.get_value(BROKER, stage, KEY_USER) <NEW_LINE> password = config.get_value(BROKER, stage, KEY_PASSWORD) <NEW_LINE> self.topic = config.get_value(BROKER, stage, KEY_PUB_TOPIC) <NEW_LINE> super().__init__(host=host, port=port, client_id=client_id, user=user, password=password) <NEW_LINE> <DEDENT> def run(self, user_throttle, user_angle, cam_image_array, user_mode, timestamp): <NEW_LINE> <INDENT> msg_dict = Message(user_throttle, user_angle, cam_image_array, user_mode, timestamp) <NEW_LINE> ibm_msg_dict = { 'd': msg_dict } <NEW_LINE> super().publish(self.topic, ibm_msg_dict) <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> super().disconnect()
tubデータのJSON分のみMQTTサーバへpublishするクライアントクラス。 Vehiecleフレームワークへadd可能なpartクラスでもある。
6259904e004d5f362081fa1b
class Wrapper(object): <NEW_LINE> <INDENT> def __init__(self, picklefile=None): <NEW_LINE> <INDENT> if picklefile is None: <NEW_LINE> <INDENT> with open(DEFAULT_MODEL, 'rb') as infile: <NEW_LINE> <INDENT> self.model = pickle.load(infile) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.model = pickle.load(picklefile) <NEW_LINE> <DEDENT> self.model.options.verbosity = 0 <NEW_LINE> <DEDENT> def process(self, input): <NEW_LINE> <INDENT> return self.process_all([input])[0] <NEW_LINE> <DEDENT> def process_all(self, inputs): <NEW_LINE> <INDENT> insts = [Instance(i) for i in inputs] <NEW_LINE> return self.model.predict(insts, verbosity=0)
A wrapper class for pickled Learners.
6259904e435de62698e9d26c
class ExpectimaxAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def expectimax(self, gameState, agent, depth): <NEW_LINE> <INDENT> from decimal import Decimal <NEW_LINE> neg_inf = Decimal('-Infinity') <NEW_LINE> if agent==gameState.getNumAgents(): <NEW_LINE> <INDENT> return self.expectimax(gameState, 0, depth-1) <NEW_LINE> <DEDENT> if gameState.isWin() or gameState.isLose() or depth==0: <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> if agent!=0: <NEW_LINE> <INDENT> ret_sum = 0.0 <NEW_LINE> ret_size = 0 <NEW_LINE> for legal_action in gameState.getLegalActions(agent): <NEW_LINE> <INDENT> ret_value = float(self.expectimax( gameState.generateSuccessor(agent, legal_action), agent+1, depth )) <NEW_LINE> ret_sum += ret_value <NEW_LINE> ret_size += 1 <NEW_LINE> <DEDENT> return 3.5 * (float(ret_sum)/float(ret_size)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = neg_inf <NEW_LINE> for legal_action in gameState.getLegalActions(0): <NEW_LINE> <INDENT> ret_value = self.expectimax( gameState.generateSuccessor(0, legal_action), 1, depth ) <NEW_LINE> if ret_value>ret: <NEW_LINE> <INDENT> ret = ret_value <NEW_LINE> <DEDENT> <DEDENT> return 3.5 * float(ret) <NEW_LINE> <DEDENT> <DEDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> import random <NEW_LINE> from decimal import Decimal <NEW_LINE> neg_inf = Decimal('-Infinity') <NEW_LINE> compared = neg_inf <NEW_LINE> legal_actions = list() <NEW_LINE> for legal_action in gameState.getLegalActions(0): <NEW_LINE> <INDENT> temp = self.expectimax(gameState.generateSuccessor(0, legal_action), 1, self.depth) <NEW_LINE> legal_actions.append(legal_action) <NEW_LINE> if temp>compared: <NEW_LINE> <INDENT> compared = temp <NEW_LINE> <DEDENT> <DEDENT> final_legal_actions = list() <NEW_LINE> for legal_action in gameState.getLegalActions(0): <NEW_LINE> <INDENT> temp = self.expectimax(gameState.generateSuccessor(0, legal_action), 1, self.depth) <NEW_LINE> if temp==compared: <NEW_LINE> <INDENT> final_legal_actions.append(legal_action) <NEW_LINE> <DEDENT> <DEDENT> random_pos = random.randrange(len(final_legal_actions)) <NEW_LINE> return final_legal_actions[random_pos]
Your expectimax agent (question 4)
6259904e29b78933be26aaf5
class AesBlumenthal256(aesbase.AbstractAesBlumenthal): <NEW_LINE> <INDENT> serviceID = (1, 3, 6, 1, 4, 1, 9, 12, 6, 1, 2) <NEW_LINE> keySize = 32
AES 256 bit encryption (Internet draft) http://tools.ietf.org/html/draft-blumenthal-aes-usm-04
6259904ebaa26c4b54d50710
class JSONEncoder(BaseJSONEncoder): <NEW_LINE> <INDENT> default_model_iter = ModelFormatterIter <NEW_LINE> def default(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, BaseModel): <NEW_LINE> <INDENT> return self.default(self.default_model_iter(obj)) <NEW_LINE> <DEDENT> elif isinstance(obj, BaseFormatterIter): <NEW_LINE> <INDENT> return obj.format() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(JSONEncoder, self).default(obj)
Json encoder for Dirty Models
6259904eb57a9660fecd2ee2
class SentimentBatchResultItem(Model): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'score': {'key': 'score', 'type': 'float'}, 'statistics': {'key': 'statistics', 'type': 'DocumentStatistics'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(SentimentBatchResultItem, self).__init__(**kwargs) <NEW_LINE> self.id = kwargs.get('id', None) <NEW_LINE> self.score = kwargs.get('score', None) <NEW_LINE> self.statistics = kwargs.get('statistics', None)
SentimentBatchResultItem. :param id: Unique, non-empty document identifier. :type id: str :param score: A decimal number between 0 and 1 denoting the sentiment of the document. A score above 0.7 usually refers to a positive document while a score below 0.3 normally has a negative connotation. Mid values refer to neutral text. :type score: float :param statistics: (Optional) if showStats=true was specified in the request this field will contain information about the document payload. :type statistics: ~azure.cognitiveservices.language.textanalytics.models.DocumentStatistics
6259904e30dc7b76659a0c9b
class FakeTTYStdout(StringIO.StringIO): <NEW_LINE> <INDENT> def isatty(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if data.startswith('\r'): <NEW_LINE> <INDENT> self.seek(0) <NEW_LINE> data = data[1:] <NEW_LINE> <DEDENT> return StringIO.StringIO.write(self, data)
A Fake stdout that try to emulate a TTY device as much as possible.
6259904ecad5886f8bdc5ab1
class WorkerPool(object): <NEW_LINE> <INDENT> def __init__(self, queues, *args, **kwargs): <NEW_LINE> <INDENT> self._args = args <NEW_LINE> self._kwargs = kwargs <NEW_LINE> self._queues = queues <NEW_LINE> self._sentinel_worker = None <NEW_LINE> self.waiting_time = kwargs.pop("waiting_time", 10) <NEW_LINE> <DEDENT> def _make_worker(self, queue): <NEW_LINE> <INDENT> return Worker(queue, *self._args, **self._kwargs) <NEW_LINE> <DEDENT> def terminate(self): <NEW_LINE> <INDENT> os.kill(self._sentinel_worker.pid, signal.SIGINT) <NEW_LINE> self._sentinel_worker.join() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> logger = logging.getLogger("meepo.replicator.sentinel") <NEW_LINE> def _f(): <NEW_LINE> <INDENT> worker_map = { q: self._make_worker(q) for q in self._queues } <NEW_LINE> for _, worker in worker_map.items(): <NEW_LINE> <INDENT> worker.start() <NEW_LINE> <DEDENT> logger.info("starting sentinel...") <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> logger.debug("ping {} worker".format(self._args[0])) <NEW_LINE> dead = qsize = 0 <NEW_LINE> for queue, worker in worker_map.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> qsize += queue.qsize() <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> qsize = None <NEW_LINE> <DEDENT> if not worker.is_alive(): <NEW_LINE> <INDENT> dead += 1 <NEW_LINE> logger.warn( "{} worker {} dead, recreating...".format( self._args[0], worker.pid)) <NEW_LINE> worker_map[queue] = self._make_worker(queue) <NEW_LINE> worker_map[queue].start() <NEW_LINE> <DEDENT> <DEDENT> msg = ["{} total qsize {}".format(self._args[0], qsize), "{} worker alive, {} worker dead".format( len(worker_map) - dead, dead)] <NEW_LINE> logger.info("; ".join(msg)) <NEW_LINE> time.sleep(self.waiting_time) <NEW_LINE> <DEDENT> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> for worker in worker_map.values(): <NEW_LINE> <INDENT> worker.terminate() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._sentinel_worker = Process(target=_f) <NEW_LINE> self._sentinel_worker.start()
Manage a set of workers and recreate worker when worker dead.
6259904e1f037a2d8b9e529f
class HeadFootBorder(Border): <NEW_LINE> <INDENT> WIDTH_MAXIMUM = 100 <NEW_LINE> def head(self, width): <NEW_LINE> <INDENT> spacing = " " * int((self.WIDTH_MAXIMUM - width) / 2) <NEW_LINE> draw = '{}{}{}{}{}'.format( spacing, self.up_border['ul'], self.up_border['up'] * (width - 2), self.up_border['ur'], spacing) <NEW_LINE> print(self.color + draw) <NEW_LINE> <DEDENT> def foot(self, width): <NEW_LINE> <INDENT> spacing = " " * int((self.WIDTH_MAXIMUM - width) / 2) <NEW_LINE> draw = '{}{}{}{}{}'.format( spacing, self.down_border['dl'], self.down_border['dw'] * (width - 2), self.down_border['dr'], spacing) <NEW_LINE> print(self.color + draw)
Building the head and the foot of a border
6259904ee5267d203ee6cd51
class BaseModel(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _get_param_names(cls): <NEW_LINE> <INDENT> init = getattr(cls.__init__, 'deprecated_original', cls.__init__) <NEW_LINE> if init is object.__init__: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> init_signature = signature(init) <NEW_LINE> parameters = [p for p in init_signature.parameters.values() if p.name != 'self' and p.kind != p.VAR_KEYWORD] <NEW_LINE> for p in parameters: <NEW_LINE> <INDENT> if p.kind == p.VAR_POSITIONAL: <NEW_LINE> <INDENT> raise RuntimeError("DIG Models should always " "specify their parameters in the signature" " of their __init__ (no varargs)." " %s with constructor %s doesn't " " follow this convention." % (cls, init_signature)) <NEW_LINE> <DEDENT> <DEDENT> return sorted([p.name for p in parameters]) <NEW_LINE> <DEDENT> def get_params(self, deep=True): <NEW_LINE> <INDENT> out = dict() <NEW_LINE> for key in self._get_param_names(): <NEW_LINE> <INDENT> warnings.simplefilter("always", DeprecationWarning) <NEW_LINE> try: <NEW_LINE> <INDENT> with warnings.catch_warnings(record=True) as w: <NEW_LINE> <INDENT> value = getattr(self, key, None) <NEW_LINE> <DEDENT> if len(w) and w[0].category == DeprecationWarning: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> warnings.filters.pop(0) <NEW_LINE> <DEDENT> if deep and hasattr(value, 'get_params'): <NEW_LINE> <INDENT> deep_items = value.get_params().items() <NEW_LINE> out.update((key + '__' + k, val) for k, val in deep_items) <NEW_LINE> <DEDENT> out[key] = value <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> def set_params(self, **params): <NEW_LINE> <INDENT> if not params: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> valid_params = self.get_params(deep=True) <NEW_LINE> for key, value in six.iteritems(params): <NEW_LINE> <INDENT> split = key.split('__', 1) <NEW_LINE> if len(split) > 1: <NEW_LINE> <INDENT> name, sub_name = split <NEW_LINE> if name not in valid_params: <NEW_LINE> <INDENT> raise ValueError('Invalid parameter %s for Model %s. ' 'Check the list of available parameters ' 'with `Model.get_params().keys()`.' % (name, self)) <NEW_LINE> <DEDENT> sub_object = valid_params[name] <NEW_LINE> sub_object.set_params(**{sub_name: value}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if key not in valid_params: <NEW_LINE> <INDENT> raise ValueError('Invalid parameter %s for Model %s. ' 'Check the list of available parameters ' 'with `Model.get_params().keys()`.' % (key, self.__class__.__name__)) <NEW_LINE> <DEDENT> setattr(self, key, value) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> class_name = self.__class__.__name__ <NEW_LINE> return '%s(%s)' % (class_name, pprint(self.get_params(deep=False), offset=len(class_name),),)
Base class for all Models in DIG Notes ----- All Models should specify all the parameters that can be set at the class level in their ``__init__`` as explicit keyword arguments (no ``*args`` or ``**kwargs``).
6259904e6e29344779b01aa9
class Control: <NEW_LINE> <INDENT> def __init__(self, widget, name=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.widget = widget
A control which can be placed in a Tweak.List
6259904ef7d966606f7492eb
class FileAttributes(FSLocation): <NEW_LINE> <INDENT> __file_attributes__ = [] <NEW_LINE> def __getattribute__(self, name): <NEW_LINE> <INDENT> if name in object.__getattribute__(self, '__file_attributes__'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(os.path.join(self.fs_path, name), 'r') as file: <NEW_LINE> <INDENT> return file.read().strip('\n').strip() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> return object.__getattribute__(self, name)
Mixin for objects handling attributes stored in files. A single file represents a single attribute where the file name is the attribute name and the file content is the attribute value.
6259904e4428ac0f6e659999
class Party: <NEW_LINE> <INDENT> pass
A political party with a platform, pointer to jurisdiction, and heuristic for adaptation.
6259904ee64d504609df9e02
class Wapi(Flask): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(__name__) <NEW_LINE> self.config_json = config().json <NEW_LINE> self.setup_routes() <NEW_LINE> self.reload() <NEW_LINE> <DEDENT> def reload(self): <NEW_LINE> <INDENT> self.config['SERVER_NAME'] = self.config_json['server']['url'] <NEW_LINE> self.run( host=self.config_json['server']['host'], port=self.config_json['server']['port'], debug=self.config_json['server']['debug'] ) <NEW_LINE> <DEDENT> def setup_routes(self): <NEW_LINE> <INDENT> if self.config_json['server']['enable_admin'] == True: <NEW_LINE> <INDENT> msg = [] <NEW_LINE> msg.append(self.enable_admin()) <NEW_LINE> msg.append(self.enable_login()) <NEW_LINE> msg.append(self.enable_static()) <NEW_LINE> msg.append(self.enable_user()) <NEW_LINE> msg.append(self.enable_task()) <NEW_LINE> for m in msg: <NEW_LINE> <INDENT> print(m) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def enable_admin(self): <NEW_LINE> <INDENT> url = "/admin" <NEW_LINE> print("adding route %s" % url) <NEW_LINE> view = app.views.admin.AdminView.as_view('admin') <NEW_LINE> self.add_url_rule(url, view_func=view, methods=['GET',]) <NEW_LINE> return "Admin Module loaded" <NEW_LINE> <DEDENT> def enable_static(self): <NEW_LINE> <INDENT> print("adding route /static") <NEW_LINE> static_view = app.views.static.StaticView.as_view('static_file') <NEW_LINE> self.add_url_rule('/static/<path:path>', view_func=static_view, methods=['GET',]) <NEW_LINE> return "Static Module loaded" <NEW_LINE> <DEDENT> def enable_login(self): <NEW_LINE> <INDENT> url = "/login" <NEW_LINE> print("adding route %s" % url) <NEW_LINE> view = app.views.admin.LoginView.as_view('login') <NEW_LINE> self.add_url_rule(url, view_func=view, methods=['GET', 'POST']) <NEW_LINE> return "Login Module loaded" <NEW_LINE> <DEDENT> def enable_user(self): <NEW_LINE> <INDENT> url = "/user" <NEW_LINE> print("adding route %s" % url) <NEW_LINE> user_view = app.views.admin.UserView.as_view('user_api') <NEW_LINE> self.add_url_rule('/user/', defaults={'user_id': None}, view_func=user_view, methods=['GET',]) <NEW_LINE> self.add_url_rule('/user/', view_func=user_view, methods=['POST',]) <NEW_LINE> self.add_url_rule('/user/<int:user_id>', view_func=user_view, methods=['GET', 'PUT', 'DELETE']) <NEW_LINE> return "User Module loaded" <NEW_LINE> <DEDENT> def enable_task(self): <NEW_LINE> <INDENT> url = "/task" <NEW_LINE> print("adding route %s" % url) <NEW_LINE> view = app.views.admin.TaskView.as_view('task_api') <NEW_LINE> self.add_url_rule('/task/', defaults={'task_id': None}, view_func=view, methods=['GET',]) <NEW_LINE> self.add_url_rule('/task/', view_func=view, methods=['POST',]) <NEW_LINE> self.add_url_rule('/task/<int:task_id>', view_func=view, methods=['GET', 'PUT', 'DELETE'])
docstring for AdminRouter
6259904e8da39b475be0464c
class InputStreamLength(object): <NEW_LINE> <INDENT> swagger_types = { 'stream': 'InputStream', 'length': 'int', 'name': 'str', 'character_encoding': 'str', 'extension': 'str' } <NEW_LINE> attribute_map = { 'stream': 'stream', 'length': 'length', 'name': 'name', 'character_encoding': 'characterEncoding', 'extension': 'extension' } <NEW_LINE> def __init__(self, stream=None, length=None, name=None, character_encoding=None, extension=None): <NEW_LINE> <INDENT> self._stream = None <NEW_LINE> self._length = None <NEW_LINE> self._name = None <NEW_LINE> self._character_encoding = None <NEW_LINE> self._extension = None <NEW_LINE> self.discriminator = None <NEW_LINE> if stream is not None: <NEW_LINE> <INDENT> self.stream = stream <NEW_LINE> <DEDENT> if length is not None: <NEW_LINE> <INDENT> self.length = length <NEW_LINE> <DEDENT> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if character_encoding is not None: <NEW_LINE> <INDENT> self.character_encoding = character_encoding <NEW_LINE> <DEDENT> if extension is not None: <NEW_LINE> <INDENT> self.extension = extension <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def stream(self): <NEW_LINE> <INDENT> return self._stream <NEW_LINE> <DEDENT> @stream.setter <NEW_LINE> def stream(self, stream): <NEW_LINE> <INDENT> self._stream = stream <NEW_LINE> <DEDENT> @property <NEW_LINE> def length(self): <NEW_LINE> <INDENT> return self._length <NEW_LINE> <DEDENT> @length.setter <NEW_LINE> def length(self, length): <NEW_LINE> <INDENT> self._length = length <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def character_encoding(self): <NEW_LINE> <INDENT> return self._character_encoding <NEW_LINE> <DEDENT> @character_encoding.setter <NEW_LINE> def character_encoding(self, character_encoding): <NEW_LINE> <INDENT> self._character_encoding = character_encoding <NEW_LINE> <DEDENT> @property <NEW_LINE> def extension(self): <NEW_LINE> <INDENT> return self._extension <NEW_LINE> <DEDENT> @extension.setter <NEW_LINE> def extension(self, extension): <NEW_LINE> <INDENT> self._extension = extension <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(InputStreamLength, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InputStreamLength): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259904e24f1403a92686301
class ProductMethodTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_model_can_create_a_product(self): <NEW_LINE> <INDENT> pass
This class defines the test suite for the Product model.
6259904e26068e7796d4ddab
class CrispyFormMixin(object): <NEW_LINE> <INDENT> def get_form(self, form_class=None): <NEW_LINE> <INDENT> form = super(CrispyFormMixin, self).get_form(form_class) <NEW_LINE> form.helper = default_crispy_helper() <NEW_LINE> return form
Mixin to add Crispy form helper.
6259904e507cdc57c63a6207
class TuyaFanDevice(TuyaDevice, FanEntity): <NEW_LINE> <INDENT> def __init__(self, tuya, platform): <NEW_LINE> <INDENT> super().__init__(tuya, platform) <NEW_LINE> self.entity_id = ENTITY_ID_FORMAT.format(tuya.object_id()) <NEW_LINE> self.speeds = [] <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> await super().async_added_to_hass() <NEW_LINE> self.speeds.extend(self._tuya.speed_list()) <NEW_LINE> <DEDENT> def set_percentage(self, percentage: int) -> None: <NEW_LINE> <INDENT> if percentage == 0: <NEW_LINE> <INDENT> self.turn_off() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tuya_speed = percentage_to_ordered_list_item(self.speeds, percentage) <NEW_LINE> self._tuya.set_speed(tuya_speed) <NEW_LINE> <DEDENT> <DEDENT> def turn_on( self, speed: str = None, percentage: int = None, preset_mode: str = None, **kwargs, ) -> None: <NEW_LINE> <INDENT> if percentage is not None: <NEW_LINE> <INDENT> self.set_percentage(percentage) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._tuya.turn_on() <NEW_LINE> <DEDENT> <DEDENT> def turn_off(self, **kwargs) -> None: <NEW_LINE> <INDENT> self._tuya.turn_off() <NEW_LINE> <DEDENT> def oscillate(self, oscillating) -> None: <NEW_LINE> <INDENT> self._tuya.oscillate(oscillating) <NEW_LINE> <DEDENT> @property <NEW_LINE> def speed_count(self) -> int: <NEW_LINE> <INDENT> if self.speeds is None: <NEW_LINE> <INDENT> return super().speed_count <NEW_LINE> <DEDENT> return len(self.speeds) <NEW_LINE> <DEDENT> @property <NEW_LINE> def oscillating(self): <NEW_LINE> <INDENT> if self.supported_features & SUPPORT_OSCILLATE == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.speed == STATE_OFF: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self._tuya.oscillating() <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._tuya.state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def percentage(self) -> int | None: <NEW_LINE> <INDENT> if not self.is_on: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if self.speeds is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return ordered_list_item_to_percentage(self.speeds, self._tuya.speed()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self) -> int: <NEW_LINE> <INDENT> if self._tuya.support_oscillate(): <NEW_LINE> <INDENT> return SUPPORT_SET_SPEED | SUPPORT_OSCILLATE <NEW_LINE> <DEDENT> return SUPPORT_SET_SPEED
Tuya fan devices.
6259904e29b78933be26aaf6
class Obliteration(Rule): <NEW_LINE> <INDENT> kind = 'obliteration' <NEW_LINE> def __init__(self, **kwcontexts): <NEW_LINE> <INDENT> self.contexts = self.Contexts(**kwcontexts) <NEW_LINE> if not self.contexts: <NEW_LINE> <INDENT> raise ValueError(f'{self!r} no context.') <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> contexts = self.contexts._kwstr(plain=True) <NEW_LINE> return f'{self.__class__.__name__}({contexts})' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'[] -> 0{self.contexts}' <NEW_LINE> <DEDENT> def __call__(self, slots): <NEW_LINE> <INDENT> for i_s, i_h, slot, head in self.all_contexts_match(slots): <NEW_LINE> <INDENT> del slot[i_h] <NEW_LINE> if not slot: <NEW_LINE> <INDENT> del slots[i_s] <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return False
Delete the first context matching head, if applicable delete the resulting empty slot.
6259904e76d4e153a661dcac
class WeixinService(business_model.Model): <NEW_LINE> <INDENT> __slots__ = ( 'id', 'authorizer_appid', 'authorizer_access_token', 'user_id', 'access_token', 'weixin_api' ) <NEW_LINE> def __init__(self, model): <NEW_LINE> <INDENT> business_model.Model.__init__(self) <NEW_LINE> self.context['db_model'] = model <NEW_LINE> if model: <NEW_LINE> <INDENT> self._init_slot_from_model(model) <NEW_LINE> self.access_token = model.authorizer_access_token <NEW_LINE> self.weixin_api = get_weixin_api(self) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> @param_required(['user_id']) <NEW_LINE> def from_user_id(args): <NEW_LINE> <INDENT> user_id = args['user_id'] <NEW_LINE> db_model = weixin_models.ComponentAuthedAppid.select().dj_where(user_id=user_id).first() <NEW_LINE> if db_model: <NEW_LINE> <INDENT> weixin_service = WeixinService(db_model) <NEW_LINE> return weixin_service <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def send_template_message(self, message): <NEW_LINE> <INDENT> result = self.weixin_api.send_template_message(message) <NEW_LINE> watchdog.info(result)
微信service
6259904e7cff6e4e811b6ea3
class PyrUnit(nn.Layer): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, strides, bottleneck, data_format="channels_last", **kwargs): <NEW_LINE> <INDENT> super(PyrUnit, self).__init__(**kwargs) <NEW_LINE> assert (out_channels >= in_channels) <NEW_LINE> self.data_format = data_format <NEW_LINE> self.resize_identity = (strides != 1) <NEW_LINE> self.identity_pad_width = out_channels - in_channels <NEW_LINE> if bottleneck: <NEW_LINE> <INDENT> self.body = PyrBottleneck( in_channels=in_channels, out_channels=out_channels, strides=strides, data_format=data_format, name="body") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.body = PyrBlock( in_channels=in_channels, out_channels=out_channels, strides=strides, data_format=data_format, name="body") <NEW_LINE> <DEDENT> self.bn = BatchNorm( data_format=data_format, name="bn") <NEW_LINE> if self.resize_identity: <NEW_LINE> <INDENT> self.identity_pool = AvgPool2d( pool_size=2, strides=strides, ceil_mode=True, data_format=data_format, name="identity_pool") <NEW_LINE> <DEDENT> <DEDENT> def call(self, x, training=None): <NEW_LINE> <INDENT> identity = x <NEW_LINE> x = self.body(x, training=training) <NEW_LINE> x = self.bn(x, training=training) <NEW_LINE> if self.resize_identity: <NEW_LINE> <INDENT> identity = self.identity_pool(identity) <NEW_LINE> <DEDENT> if self.identity_pad_width > 0: <NEW_LINE> <INDENT> if is_channels_first(self.data_format): <NEW_LINE> <INDENT> paddings = [[0, 0], [0, self.identity_pad_width], [0, 0], [0, 0]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> paddings = [[0, 0], [0, 0], [0, 0], [0, self.identity_pad_width]] <NEW_LINE> <DEDENT> identity = tf.pad(identity, paddings=paddings) <NEW_LINE> <DEDENT> x = x + identity <NEW_LINE> return x
PyramidNet unit with residual connection. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. strides : int or tuple/list of 2 int Strides of the convolution. bottleneck : bool Whether to use a bottleneck or simple block in units. data_format : str, default 'channels_last' The ordering of the dimensions in tensors.
6259904e30dc7b76659a0c9d
class Session: <NEW_LINE> <INDENT> def __init__(self, conn): <NEW_LINE> <INDENT> self._cfg_ = ConfigObj("ipall.cfg") <NEW_LINE> self._url_ = self._cfg_['Server']['web_dir'] <NEW_LINE> self._conn_ = conn <NEW_LINE> <DEDENT> def check_user(self): <NEW_LINE> <INDENT> if os.environ.has_key('HTTP_COOKIE') and os.environ['HTTP_COOKIE'] != "": <NEW_LINE> <INDENT> C = SimpleCookie(os.environ['HTTP_COOKIE']) <NEW_LINE> phpsessid = str(C['PHPSESSID']).split("=")[1] <NEW_LINE> sql_session = """SELECT username, ip, starttime, endtime FROM php_session WHERE phpsessid='%s' """ % ( str(phpsessid) ) <NEW_LINE> session = self._conn_.get_data(sql_session) <NEW_LINE> if session != (): <NEW_LINE> <INDENT> user = session[0][0] <NEW_LINE> user_ip = session[0][1] <NEW_LINE> sess_start = session[0][2] <NEW_LINE> sess_end = session[0][3] <NEW_LINE> if os.environ.has_key('REMOTE_ADDR') and os.environ['REMOTE_ADDR'] != user_ip: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> y = zfill(str(localtime()[0]), 2) <NEW_LINE> m = zfill(str(localtime()[1]), 2) <NEW_LINE> d = zfill(str(localtime()[2]), 2) <NEW_LINE> h = zfill(str(localtime()[3]), 2) <NEW_LINE> i = zfill(str(localtime()[4]), 2) <NEW_LINE> s = zfill(str(localtime()[5]), 2) <NEW_LINE> now = y+m+d+h <NEW_LINE> end = sess_end[0:10] <NEW_LINE> if int(now) > int(end): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> summe = int(i) + 60 <NEW_LINE> min = summe % 60 <NEW_LINE> uebertrag = summe / 60 <NEW_LINE> std = int(h) + uebertrag <NEW_LINE> new_end = y+m+d+str(std)+str(min)+s <NEW_LINE> sql_upd_sess = """UPDATE php_session SET endtime='%s' WHERE phpsessid='%s' """ % ( new_end, phpsessid ) <NEW_LINE> upd = self._conn_.update_data(sql_upd_sess) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> return user <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> <DEDENT> def check_cookie(self): <NEW_LINE> <INDENT> HTML = HtmlContent() <NEW_LINE> if os.environ.has_key('HTTP_COOKIE') and os.environ.has_key('HTTP_REFERER'): <NEW_LINE> <INDENT> C = SimpleCookie(os.environ['HTTP_COOKIE']) <NEW_LINE> if C.has_key('session') and C['session'].value != "": <NEW_LINE> <INDENT> if C.has_key('company') and C['company'].value != "": <NEW_LINE> <INDENT> company = C['company'].value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> company = -1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> HTML.simple_redirect_header(self._url_) <NEW_LINE> return <NEW_LINE> <DEDENT> return company <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> HTML.simple_redirect_header(self._url_) <NEW_LINE> return
helper class for handling session cookies and users
6259904e3539df3088ecd70b
class Gist(msgs.Message): <NEW_LINE> <INDENT> id = msgs.IntegerField(1, default=0) <NEW_LINE> url = msgs.StringField(2) <NEW_LINE> description = msgs.StringField(3) <NEW_LINE> public = msgs.BooleanField(4, default=True) <NEW_LINE> user = msgs.MessageField('User', 5) <NEW_LINE> comments = msgs.IntegerField(6, default=0) <NEW_LINE> comments_url = msgs.StringField(7) <NEW_LINE> html_url = msgs.StringField(8) <NEW_LINE> git_pull_url = msgs.StringField(9) <NEW_LINE> git_push_url = msgs.StringField(10) <NEW_LINE> created_at = msgs.StringField(11) <NEW_LINE> forks = msgs.MessageField('Gist', 12, repeated=True)
{ "files": { "ring.erl": { "size": 932, "filename": "ring.erl", "raw_url": "https://gist.github.com/raw/365370/8c4d2d43d178df44f4c03a7f2ac0ff512853564e/ring.erl" } } "history": [ { "url": "https://api.github.com/gists/14a2302d4083e5331759", "version": "57a7f021a713b1c5a6a199b54cc514735d2d462f", "user": { "login": "octocat", "id": 1, "avatar_url": "https://github.com/images/error/octocat_happy.gif", "gravatar_id": "somehexcode", "url": "https://api.github.com/users/octocat" }, "change_status": { "deletions": 0, "additions": 180, "total": 180 }, "committed_at": "2010-04-14T02:15:15Z" } ]
6259904e55399d3f05627983
class Deprecated: <NEW_LINE> <INDENT> _warn = functools.partial( warnings.warn, "SelectableGroups dict interface is deprecated. Use select.", DeprecationWarning, stacklevel=2, ) <NEW_LINE> def __getitem__(self, name): <NEW_LINE> <INDENT> self._warn() <NEW_LINE> return super().__getitem__(name) <NEW_LINE> <DEDENT> def get(self, name, default=None): <NEW_LINE> <INDENT> flake8_bypass(self._warn)() <NEW_LINE> return super().get(name, default) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self._warn() <NEW_LINE> return super().__iter__() <NEW_LINE> <DEDENT> def __contains__(self, *args): <NEW_LINE> <INDENT> self._warn() <NEW_LINE> return super().__contains__(*args) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> self._warn() <NEW_LINE> return super().keys() <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> self._warn() <NEW_LINE> return super().values()
Compatibility add-in for mapping to indicate that mapping behavior is deprecated. >>> recwarn = getfixture('recwarn') >>> class DeprecatedDict(Deprecated, dict): pass >>> dd = DeprecatedDict(foo='bar') >>> dd.get('baz', None) >>> dd['foo'] 'bar' >>> list(dd) ['foo'] >>> list(dd.keys()) ['foo'] >>> 'foo' in dd True >>> list(dd.values()) ['bar'] >>> len(recwarn) 1
6259904e91af0d3eaad3b28d
class NumArray: <NEW_LINE> <INDENT> def __init__(self, nums: List[int]): <NEW_LINE> <INDENT> self.nums = nums <NEW_LINE> <DEDENT> def sumRange(self, left: int, right: int) -> int: <NEW_LINE> <INDENT> return sum(self.nums[left:right + 1])
15 / 15 test cases passed. Status: Accepted Runtime: 1100 ms Memory Usage: 17.7 MB
6259904ee64d504609df9e03
class recalculate_commision_wizard(orm.TransientModel): <NEW_LINE> <INDENT> _name = 'recalculate.commission.wizard' <NEW_LINE> _columns = { 'date_from': fields.date('From', required=True), 'date_to': fields.date('To', required=True), } <NEW_LINE> _defaults = { 'date_from': lambda *a: time.strftime('%Y-%m-01'), 'date_to': lambda *a: time.strftime('%Y-%m-%d'), } <NEW_LINE> def recalculate_exec(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> company_id = self.pool['res.users']._get_company(cr, uid, context) <NEW_LINE> invoice_obj = self.pool['account.invoice'] <NEW_LINE> for o in self.browse(cr, uid, ids, context=context): <NEW_LINE> <INDENT> invoice_ids = invoice_obj.search(cr, uid, [('date_invoice', '>=', o.date_from), ('date_invoice', '<=', o.date_to)]) <NEW_LINE> invoice_obj.invoice_set_agent(cr, uid, invoice_ids, context) <NEW_LINE> sql = 'SELECT invoice_line_agent.id FROM account_invoice_line ' 'INNER JOIN invoice_line_agent ON invoice_line_agent.invoice_line_id=account_invoice_line.id ' 'INNER JOIN account_invoice ON account_invoice_line.invoice_id = account_invoice.id ' 'WHERE invoice_line_agent.agent_id in (' + ",".join( map(str, context['active_ids'])) + ') AND invoice_line_agent.settled=False ' 'AND account_invoice.state not in (\'draft\',\'cancel\') AND account_invoice.type in (\'out_invoice\',\'out_refund\')' 'AND account_invoice.date_invoice >= \'' + o.date_from + '\' AND account_invoice.date_invoice <= \'' + o.date_to + '\'' ' AND account_invoice.company_id = ' + str(company_id) <NEW_LINE> cr.execute(sql) <NEW_LINE> res = cr.fetchall() <NEW_LINE> inv_line_agent_ids = [x[0] for x in res] <NEW_LINE> self.pool['invoice.line.agent'].calculate_commission(cr, uid, inv_line_agent_ids) <NEW_LINE> <DEDENT> return { 'type': 'ir.actions.act_window_close', } <NEW_LINE> <DEDENT> def action_cancel(self, cr, uid, ids, conect=None): <NEW_LINE> <INDENT> return { 'type': 'ir.actions.act_window_close', }
settled.wizard
6259904ed6c5a102081e3587
class GeoFeatureModelSerializerOptions(ModelSerializerOptions): <NEW_LINE> <INDENT> def __init__(self, meta): <NEW_LINE> <INDENT> super(GeoFeatureModelSerializerOptions, self).__init__(meta) <NEW_LINE> self.geo_field = getattr(meta, 'geo_field', None) <NEW_LINE> self.id_field = getattr(meta, 'id_field', meta.model._meta.pk.name)
Options for GeoFeatureModelSerializer
6259904e45492302aabfd93c
class IP3366Fetcher(BaseFetcher): <NEW_LINE> <INDENT> def fetch(self): <NEW_LINE> <INDENT> urls = [] <NEW_LINE> for stype in ['1', '2']: <NEW_LINE> <INDENT> for page in range(1, 6): <NEW_LINE> <INDENT> url = f'http://www.ip3366.net/free/?stype={stype}&page={page}' <NEW_LINE> urls.append(url) <NEW_LINE> <DEDENT> <DEDENT> proxies = [] <NEW_LINE> ip_regex = re.compile(r'^\d+\.\d+\.\d+\.\d+$') <NEW_LINE> port_regex = re.compile(r'^\d+$') <NEW_LINE> for url in urls: <NEW_LINE> <INDENT> headers = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8', 'Cache-Control': 'no-cache', 'Connection': 'keep-alive', 'Pragma': 'no-cache', 'Upgrade-Insecure-Requests': '1', 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/79.0.3945.130 Chrome/79.0.3945.130 Safari/537.36' } <NEW_LINE> html = requests.get(url, headers=headers, timeout=10).text <NEW_LINE> doc = pq(html) <NEW_LINE> for line in doc('tr').items(): <NEW_LINE> <INDENT> tds = list(line('td').items()) <NEW_LINE> if len(tds) == 7: <NEW_LINE> <INDENT> ip = tds[0].text().strip() <NEW_LINE> port = tds[1].text().strip() <NEW_LINE> if re.match(ip_regex, ip) is not None and re.match(port_regex, port) is not None: <NEW_LINE> <INDENT> proxies.append(('http', ip, int(port))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return list(set(proxies))
http://www.ip3366.net/free/?stype=1
6259904ed53ae8145f9198cc
class _ConsoleWriter(object): <NEW_LINE> <INDENT> def __init__(self, logger, output_filter, stream_wrapper, always_flush=False): <NEW_LINE> <INDENT> self.__logger = logger <NEW_LINE> self.__filter = output_filter <NEW_LINE> self.__stream_wrapper = stream_wrapper <NEW_LINE> self.__always_flush = always_flush <NEW_LINE> <DEDENT> def Print(self, *msg): <NEW_LINE> <INDENT> msg = (console_attr.SafeText( x, encoding=LOG_FILE_ENCODING, escape=False) for x in msg) <NEW_LINE> message = ' '.join(msg) <NEW_LINE> self._Write(message + '\n') <NEW_LINE> <DEDENT> def GetConsoleWriterStream(self): <NEW_LINE> <INDENT> return self.__stream_wrapper.stream <NEW_LINE> <DEDENT> def _Write(self, msg): <NEW_LINE> <INDENT> self.__logger.info(msg) <NEW_LINE> if self.__filter.enabled: <NEW_LINE> <INDENT> stream_encoding = console_attr.GetConsoleAttr().GetEncoding() <NEW_LINE> stream_msg = console_attr.SafeText( msg, encoding=stream_encoding, escape=False) <NEW_LINE> if six.PY2: <NEW_LINE> <INDENT> stream_msg = msg.encode(stream_encoding or 'utf8', 'replace') <NEW_LINE> <DEDENT> self.__stream_wrapper.stream.write(stream_msg) <NEW_LINE> if self.__always_flush: <NEW_LINE> <INDENT> self.flush() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def write(self, msg): <NEW_LINE> <INDENT> self._Write( console_attr.SafeText(msg, encoding=LOG_FILE_ENCODING, escape=False)) <NEW_LINE> <DEDENT> def writelines(self, lines): <NEW_LINE> <INDENT> for line in lines: <NEW_LINE> <INDENT> self.write(line) <NEW_LINE> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> if self.__filter.enabled: <NEW_LINE> <INDENT> self.__stream_wrapper.stream.flush() <NEW_LINE> <DEDENT> <DEDENT> def isatty(self): <NEW_LINE> <INDENT> isatty = getattr(self.__stream_wrapper.stream, 'isatty', None) <NEW_LINE> return isatty() if isatty else False
A class that wraps stdout or stderr so we can control how it gets logged. This class is a stripped down file-like object that provides the basic writing methods. When you write to this stream, if it is enabled, it will be written to stdout. All strings will also be logged at DEBUG level so they can be captured by the log file.
6259904ea79ad1619776b4e9
class TestRecursionLimit(object): <NEW_LINE> <INDENT> def setup_method(self, method): <NEW_LINE> <INDENT> self._oldlimit = sys.getrecursionlimit() <NEW_LINE> sys.setrecursionlimit(100) <NEW_LINE> size = 10000 <NEW_LINE> self._make_data(size) <NEW_LINE> <DEDENT> def _make_data(self, size): <NEW_LINE> <INDENT> data1 = np.arange(size * 2) <NEW_LINE> data2 = np.arange(size * 2) <NEW_LINE> data2[::2] += 2 <NEW_LINE> data1[-1] = 0 <NEW_LINE> data = np.vstack((data1, data2)) <NEW_LINE> self.data = data <NEW_LINE> self.size = size <NEW_LINE> <DEDENT> def test_compute(self): <NEW_LINE> <INDENT> d = Dendrogram.compute(self.data) <NEW_LINE> assert len(d.leaves) == self.size, "We expect {n} leaves, not {a}.".format(n=self.size, a=len(d.leaves)) <NEW_LINE> <DEDENT> def test_computing_level(self): <NEW_LINE> <INDENT> d = Dendrogram.compute(self.data) <NEW_LINE> mid_structure = d.structure_at((0, self.size // 2)) <NEW_LINE> sys.setrecursionlimit(100000) <NEW_LINE> _ = mid_structure.level <NEW_LINE> for structure in d.all_structures: <NEW_LINE> <INDENT> if structure.parent is None: <NEW_LINE> <INDENT> assert structure.level == 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert structure.level == structure.parent.level + 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_plot(self): <NEW_LINE> <INDENT> sys.setrecursionlimit(self._oldlimit) <NEW_LINE> ax = plt.gca() <NEW_LINE> sys.setrecursionlimit(150) <NEW_LINE> d = Dendrogram.compute(self.data) <NEW_LINE> p = d.plotter() <NEW_LINE> p.plot_tree(ax) <NEW_LINE> <DEDENT> def teardown_method(self, method): <NEW_LINE> <INDENT> sys.setrecursionlimit(self._oldlimit)
Test that we can efficiently compute deep dendrogram trees without hitting the recursion limit. Note: plot() uses recursion but we should be able to *compute* dendrograms without using deep recursion, even if we aren't yet able to plot them without using recursion.
6259904e3617ad0b5ee075a9
class ShowLacpNeighbor(ShowLacpNeighbor_iosxe): <NEW_LINE> <INDENT> pass
Parser for : show lacp neighbor
6259904e16aa5153ce401957
class User(ModelBase): <NEW_LINE> <INDENT> __dump_attributes__ = ["name", "address", "send_newsletter", "language"] <NEW_LINE> user_id = None <NEW_LINE> name = None <NEW_LINE> email = None <NEW_LINE> address = None <NEW_LINE> verified_email = None <NEW_LINE> send_newsletter = None <NEW_LINE> language = None <NEW_LINE> premium = None <NEW_LINE> premium_expires_on = None <NEW_LINE> premium_subscription = None <NEW_LINE> join_date = None <NEW_LINE> def __init__(self, session, **kwargs): <NEW_LINE> <INDENT> super(User, self).__init__(session, **kwargs) <NEW_LINE> if self.join_date: <NEW_LINE> <INDENT> self.join_date = dateutil.parser.parse(self.join_date) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "User: %s (%s, %s)" % (self.name, self.user_id, self.email)
Object representing an user
6259904ed486a94d0ba2d42f
class KeepAliveRPCProvider(RPCProvider): <NEW_LINE> <INDENT> def __init__(self, host="127.0.0.1", port=8545, path="/", **kwargs): <NEW_LINE> <INDENT> super(KeepAliveRPCProvider, self).__init__( host, port, path, **kwargs )
Deprecated: Use HTTPProvider instead.
6259904e73bcbd0ca4bcb6f3
class StandardParser(BaseParser): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _parse_row(cls, row): <NEW_LINE> <INDENT> line_strip = row.split('=') <NEW_LINE> if not line_strip[0].startswith('#') and row.strip() != '': <NEW_LINE> <INDENT> return line_strip[0], cls._parse_value(line_strip[1].strip()) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _parse_value(cls, value): <NEW_LINE> <INDENT> if value in ('False', 'True', 'FALSE', 'TRUE', 'true', 'false'): <NEW_LINE> <INDENT> return eval(value) <NEW_LINE> <DEDENT> if value.isdigit(): <NEW_LINE> <INDENT> return int(value) <NEW_LINE> <DEDENT> return value
Дефолтный простой пасрер. Преобразует булевы значения и числа.
6259904e07f4c71912bb089f
class InternalAPIError(WalkScoreError): <NEW_LINE> <INDENT> pass
Internal error within the WalkScore API itself. Inherits from :class:`WalkScoreError`.
6259904e8da39b475be0464f
class Rule(models.Model): <NEW_LINE> <INDENT> SKILL_RULE = 'SkillRule' <NEW_LINE> HEADER_RULE = 'HeaderRule' <NEW_LINE> ORIGIN_RULE = 'OriginRule' <NEW_LINE> GRANT_RULE = 'GrantRule' <NEW_LINE> RULE_REQUIREMENT_CHOICES = ( (SKILL_RULE, 'Skill Rule'), (HEADER_RULE, 'Header Rule'), (ORIGIN_RULE, 'Origin Rule'), (GRANT_RULE, 'Grant Rule') ) <NEW_LINE> name = models.CharField(max_length=100) <NEW_LINE> description = HTMLField(blank=True) <NEW_LINE> content_type = models.ForeignKey( ContentType, on_delete=models.CASCADE, limit_choices_to=models.Q( app_label='origins', model='Origin' ) | models.Q( app_label='skills', model='Header' ) | models.Q( app_label='skills', model='Skill' ) ) <NEW_LINE> object_id = models.PositiveIntegerField() <NEW_LINE> content_object = GenericForeignKey('content_type', 'object_id') <NEW_LINE> skill = models.ForeignKey( HeaderSkill, on_delete=models.CASCADE, blank=True, null=True ) <NEW_LINE> new_cost = models.PositiveIntegerField(default=0, blank=True, null=True) <NEW_LINE> free = models.BooleanField( default=False, blank=True, help_text=_("This is granted for free if the requirements are met.") ) <NEW_LINE> picks_remaining = models.PositiveIntegerField(null=True, blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{}".format( self.name ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> if self.free: <NEW_LINE> <INDENT> return self.GRANT_RULE <NEW_LINE> <DEDENT> return "{} Rule".format(self.content_type.name.capitalize())
Rules that change what skills/headers cost. Origins or other attributes may change what a skill costs. Rules track those changes and should be run when adding up character point changes/totals. Grant skills are skills that have the Boolean "free" field set to true: When a character fulfills the requirement, they get the skill automatically without having to buy it. Prerequisites are things that have to happen before something can be purchased and are attached to that thing.
6259904e009cb60464d029a3
class LogTransformer(BaseTransformer): <NEW_LINE> <INDENT> _tags = { "scitype:transform-input": "Series", "scitype:transform-output": "Series", "scitype:instancewise": True, "X_inner_mtype": "np.ndarray", "y_inner_mtype": "None", "transform-returns-same-time-index": True, "fit_is_empty": True, "univariate-only": False, "capability:inverse_transform": True, } <NEW_LINE> def _transform(self, X, y=None): <NEW_LINE> <INDENT> Xt = np.log(X) <NEW_LINE> return Xt <NEW_LINE> <DEDENT> def _inverse_transform(self, X, y=None): <NEW_LINE> <INDENT> Xt = np.exp(X) <NEW_LINE> return Xt
Natural logarithm transformation. The natural log transformation can used to make data more normally distributed and stabilize its variance. See Also -------- BoxCoxTransformer : Applies Box-Cox power transformation. Can help normalize data and compress variance of the series. sktime.transformations.series.exponent.ExponentTransformer : Transform input data by raising it to an exponent. Can help compress variance of series if a fractional exponent is supplied. sktime.transformations.series.exponent.SqrtTransformer : Transform input data by taking its square root. Can help compress variance of input series. Notes ----- The log transformation is applied as :math:`ln(y)`. Examples -------- >>> from sktime.transformations.series.boxcox import LogTransformer >>> from sktime.datasets import load_airline >>> y = load_airline() >>> transformer = LogTransformer() >>> y_hat = transformer.fit_transform(y)
6259904edc8b845886d54a28
class AbstractPlayer: <NEW_LINE> <INDENT> def __init__(self, setup_time, player_color, time_per_k_turns, k): <NEW_LINE> <INDENT> self.setup_time = setup_time <NEW_LINE> self.color = player_color <NEW_LINE> self.time_per_k_turns = time_per_k_turns <NEW_LINE> self.k = k <NEW_LINE> <DEDENT> def get_move(self, game_state, possible_moves): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.color
Your player must inherit from this class, and your player class name must be 'Player', as in the given examples. Like this: 'class Player(abstract.AbstractPlayer):'
6259904e3cc13d1c6d466ba4
class ButlerClient(object): <NEW_LINE> <INDENT> def __init__(self, butler, url, *args, **kwargs): <NEW_LINE> <INDENT> self.url = url.rstrip('/') <NEW_LINE> self.butler = butler(*args, **kwargs) <NEW_LINE> self.functions = {} <NEW_LINE> self.session = requests.Session() <NEW_LINE> self.response = None <NEW_LINE> self.butler._init_client(*args, **kwargs) <NEW_LINE> <DEDENT> def _get_function(self, function_name): <NEW_LINE> <INDENT> for func in self.butler.functions: <NEW_LINE> <INDENT> if func.function_name == function_name: <NEW_LINE> <INDENT> return func <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> if name in self.functions: <NEW_LINE> <INDENT> return self.functions[name] <NEW_LINE> <DEDENT> func = self._get_function(name) <NEW_LINE> if not func: <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> def client_func(*args, **kwargs): <NEW_LINE> <INDENT> params = [] <NEW_LINE> args = list(args) <NEW_LINE> for arg in func.args: <NEW_LINE> <INDENT> if arg in kwargs or not args: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params.append(kwargs.pop(arg)) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> params.append(func.get_default(arg)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> params.append(args.pop(0)) <NEW_LINE> <DEDENT> <DEDENT> url = self.url + func.get_url(params) <NEW_LINE> self.response = self.session.request(func.method, url, *args, **kwargs) <NEW_LINE> return self.response <NEW_LINE> <DEDENT> client_func.__name__ = name <NEW_LINE> client_func.__doc__ = func.obj.__doc__ <NEW_LINE> self.functions[name] = client_func <NEW_LINE> return client_func
ButlerClient is function factory for a Butler server. for each function in the Butler server, there is equvivalent function in the client with the same parameters, and can send the request to the Butler instance
6259904ed6c5a102081e3588
class NaluTaskCLI: <NEW_LINE> <INDENT> def __init__(self, subparsers, subcmd_name="tasks"): <NEW_LINE> <INDENT> parser = subparsers.add_parser( subcmd_name, description="Run pre/post-processing tasks as defined in YAML file", help="run pre/post processing tasks", epilog=get_epilog()) <NEW_LINE> parser.add_argument( '-l', '--list-tasks', action='store_true', help="List available tasks and exit") <NEW_LINE> parser.add_argument( '-i', '--input-file', default='nalu_tasks.yaml', help="Input file describing pre/post tasks (nalu_tasks.yaml)") <NEW_LINE> parser.set_defaults(func=self) <NEW_LINE> self.parser = parser <NEW_LINE> <DEDENT> def run_tasks(self, args): <NEW_LINE> <INDENT> inpfile = args.input_file <NEW_LINE> fpath = Path(inpfile).resolve() <NEW_LINE> if not fpath.exists(): <NEW_LINE> <INDENT> _lgr.error("Input file not found: %s. Exiting!", inpfile) <NEW_LINE> self.parser.exit(1) <NEW_LINE> <DEDENT> opts = Struct.load_file(fpath) <NEW_LINE> if "nalu_tasks" not in opts: <NEW_LINE> <INDENT> _lgr.error("Cannot find nalu_tasks entry in input file") <NEW_LINE> self.parser.exit(1) <NEW_LINE> <DEDENT> _lgr.info("Executing nalu-wind tasks from file: %s", inpfile) <NEW_LINE> runner = NaluTaskRunner() <NEW_LINE> runner(opts.nalu_tasks) <NEW_LINE> _lgr.info("All tasks completed successfully") <NEW_LINE> <DEDENT> def __call__(self, args): <NEW_LINE> <INDENT> if args.list_tasks: <NEW_LINE> <INDENT> print("Available tasks: ") <NEW_LINE> for key, value in NaluTask.available_tasks().items(): <NEW_LINE> <INDENT> full_doc = value.__doc__ <NEW_LINE> doc = full_doc.split("\n")[0].strip() <NEW_LINE> print("- %s :: %s"%(key, doc)) <NEW_LINE> <DEDENT> print() <NEW_LINE> return <NEW_LINE> <DEDENT> self.run_tasks(args)
Nalu-Wind Tasks sub-command
6259904e3539df3088ecd70d
class BetaProtocolStub(object): <NEW_LINE> <INDENT> def Run(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): <NEW_LINE> <INDENT> pass <NEW_LINE> raise NotImplementedError()
The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This class was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.
6259904e82261d6c527308fc
class DatabaseManagementTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.arango = Arango() <NEW_LINE> self.db_name = generate_db_name(self.arango) <NEW_LINE> self.addCleanup(self.arango.delete_database, name=self.db_name, safe_delete=True) <NEW_LINE> <DEDENT> def test_database_create_and_delete(self): <NEW_LINE> <INDENT> self.arango.create_database(self.db_name) <NEW_LINE> self.assertIn(self.db_name, self.arango.databases["all"]) <NEW_LINE> self.assertEqual(self.arango.database(self.db_name).name, self.db_name) <NEW_LINE> self.assertEqual(self.arango.database(self.db_name).is_system, False) <NEW_LINE> self.arango.delete_database(self.db_name) <NEW_LINE> self.assertNotIn(self.db_name, self.arango.databases["all"]) <NEW_LINE> <DEDENT> def test_database_properties(self): <NEW_LINE> <INDENT> db = self.arango.database("_system") <NEW_LINE> self.assertEqual(db.name, "_system") <NEW_LINE> self.assertTrue(isinstance(db.properties, dict)) <NEW_LINE> self.assertTrue(is_string(db.id)) <NEW_LINE> self.assertTrue(is_string(db.file_path)) <NEW_LINE> self.assertEqual(db.is_system, True)
Tests for managing ArangoDB databases.
6259904e91af0d3eaad3b28f
class TestWords(unittest.TestCase): <NEW_LINE> <INDENT> def test_word_occurance1(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'word': 1}, words('word'), msg='should count one word' ) <NEW_LINE> <DEDENT> def test_word_occurance2(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'one': 1, 'of': 1, 'each': 1}, words("one of each"), msg='should count one of each' ) <NEW_LINE> <DEDENT> def test_word_occurance3(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'one': 1, 'fish': 4, 'two': 1, 'red': 1, 'blue': 1}, words("one fish two fish red fish blue fish"), msg='should count multiple occurrences' ) <NEW_LINE> <DEDENT> def test_word_occurance4(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'car': 1, ":": 2, 'carpet': 1, 'as': 1, 'java': 1, 'javascript!!&@$%^&': 1 }, words('car : carpet as java : javascript!!&@$%^&'), msg='should include punctuation' ) <NEW_LINE> <DEDENT> def test_word_occurance5(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'testing': 2, 1: 1, 2: 1}, words('testing 1 2 testing'), msg='should include numbers' ) <NEW_LINE> <DEDENT> def test_word_occurance6(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'go': 1, 'Go': 1, 'GO': 1}, words('go Go GO'), msg='should respect case' ) <NEW_LINE> <DEDENT> def test_word_occurance7(self): <NEW_LINE> <INDENT> self.assertDictEqual( {"¡Hola!": 1, "¿Qué": 1, "tal?": 1, "Привет!": 1}, words('¡Hola! ¿Qué tal? Привет!'), msg='should count international characters properly' ) <NEW_LINE> <DEDENT> def test_word_occurance8(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'hello': 1, 'world': 1}, words('hello\nworld'), msg='should not count multilines' ) <NEW_LINE> <DEDENT> def test_word_occurance9(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'hello': 1, 'world': 1}, words('hello\tworld'), msg='should not count tabs' ) <NEW_LINE> <DEDENT> def test_word_occurance0(self): <NEW_LINE> <INDENT> self.assertDictEqual( {'hello': 1, 'world': 1}, words('hello world'), msg='should count multiple spaces as one' )
Test cases for the words function
6259904eb830903b9686eeb0
class UploadImageForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserProfile <NEW_LINE> fields = ['image']
用户更改图像
6259904e8e7ae83300eea4ff
class OaiSetMask(Transparent): <NEW_LINE> <INDENT> def __init__(self, setsMask, name=None): <NEW_LINE> <INDENT> Transparent.__init__(self, name=name) <NEW_LINE> self._setsMask = set(setsMask) <NEW_LINE> <DEDENT> def oaiSelect(self, setsMask=None, *args, **kwargs): <NEW_LINE> <INDENT> return self.call.oaiSelect(setsMask=self._combinedSetsMask(setsMask), *args, **kwargs) <NEW_LINE> <DEDENT> def getRecord(self, identifier, setsMask=None, **kwargs): <NEW_LINE> <INDENT> record = self.call.getRecord(identifier, **kwargs) <NEW_LINE> if self._combinedSetsMask(setsMask).issubset(record.sets): <NEW_LINE> <INDENT> return record <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _combinedSetsMask(self, setsMask): <NEW_LINE> <INDENT> return self._setsMask.union(setsMask or [])
A setsMask needs to be specified as a list or set of setSpecs. If more than one setSpec is specified (in a single instance or by chaining), the mask takes the form of the intersection of these setSpecs.
6259904ee64d504609df9e04
class TempsViewSet(ReadOnlyModelViewSetWithCountModified): <NEW_LINE> <INDENT> queryset = Temps.objects.all() <NEW_LINE> serializer_class = TempsSerializer
Example API Model View Set - Read Only. API's included with the ReadOnlyModelViewSetWithCountModified base ViewSet: 1) '/api/api_example/temps/' - This is the base api that returns all detail. 2) '/api/api_example/temps/count/' - Returns the record count. 3) '/api/api_example/temps/modified/' - Returns only the PK and modified fields. 4) '/api/api_example/temps/{id}/' - Returns the record matching the given ID value.
6259904ed53ae8145f9198cd
class NLTKCollocations: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> nltk.data.find('corpora/brown') <NEW_LINE> <DEDENT> except LookupError: <NEW_LINE> <INDENT> if license_prompt('brown data set', 'http://www.nltk.org/nltk_data/') is False: <NEW_LINE> <INDENT> raise Exception("can't continue data prepare process " "without downloading brown dataset") <NEW_LINE> <DEDENT> nltk.download('brown') <NEW_LINE> <DEDENT> self.bigram_finder = nltk.collocations.BigramCollocationFinder.from_words( nltk.corpus.brown.words()) <NEW_LINE> self.bigram_messure = nltk.collocations.BigramAssocMeasures() <NEW_LINE> self.likelihood_ration_dict = self.build_bigram_score_dict( self.bigram_messure.likelihood_ratio) <NEW_LINE> self.chi_sq_dict = self.build_bigram_score_dict(self.bigram_messure.chi_sq) <NEW_LINE> self.pmi_dict = self.build_bigram_score_dict(self.bigram_messure.pmi) <NEW_LINE> <DEDENT> def build_bigram_score_dict(self, score): <NEW_LINE> <INDENT> bigram_dict = collections.defaultdict(list) <NEW_LINE> scored_bigrams = self.bigram_finder.score_ngrams(score) <NEW_LINE> for key, scores in scored_bigrams: <NEW_LINE> <INDENT> bigram_dict[key[0], key[1]].append(scores) <NEW_LINE> <DEDENT> return bigram_dict <NEW_LINE> <DEDENT> def get_pmi_score(self, phrase): <NEW_LINE> <INDENT> candidates = phrase.split(" ") <NEW_LINE> if len(candidates) < 2: <NEW_LINE> <INDENT> candidates.extend(candidates[0]) <NEW_LINE> <DEDENT> response_list = [] <NEW_LINE> try: <NEW_LINE> <INDENT> pmi_score = self.pmi_dict[tuple(candidates)] <NEW_LINE> if pmi_score: <NEW_LINE> <INDENT> response_list.append(pmi_score[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response_list.append(0) <NEW_LINE> <DEDENT> chi_sq_score = self.chi_sq_dict[tuple(candidates)] <NEW_LINE> if chi_sq_score: <NEW_LINE> <INDENT> response_list.append(chi_sq_score[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response_list.append(0) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> response_list.extend([0, 0]) <NEW_LINE> <DEDENT> return response_list
NLTKCollocations score using NLTK framework on Brown dataset
6259904e507cdc57c63a620b
class TestBug690154(unittest.TestCase): <NEW_LINE> <INDENT> def make_test(self): <NEW_LINE> <INDENT> test = """1 = foo""" <NEW_LINE> fd, path = tempfile.mkstemp() <NEW_LINE> os.write(fd, test) <NEW_LINE> os.close(fd) <NEW_LINE> return path <NEW_LINE> <DEDENT> def test_JSON_structure(self): <NEW_LINE> <INDENT> passes = 1 <NEW_LINE> path = self.make_test() <NEW_LINE> m = mozmill.MozMill.create() <NEW_LINE> results = m.run(dict(path=path)) <NEW_LINE> self.assertFalse(results.passes) <NEW_LINE> self.assertTrue(len(results.fails) == 1) <NEW_LINE> fails = results.fails[0] <NEW_LINE> self.assertFalse(fails['passes']) <NEW_LINE> self.assertTrue(len(fails['fails']) == 1) <NEW_LINE> failure = fails['fails'][0] <NEW_LINE> self.assertTrue('exception' in failure) <NEW_LINE> self.assertTrue(fails['name'] == '<TOP_LEVEL>')
JSON structure when test throws a global exception: https://bugzilla.mozilla.org/show_bug.cgi?id=690154
6259904e07f4c71912bb08a0
class CowrieDailyLogFile(logfile.DailyLogFile): <NEW_LINE> <INDENT> def suffix(self, tupledate): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return "{:02d}-{:02d}-{:02d}".format(tupledate[0], tupledate[1], tupledate[2]) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return '_'.join(map(str, self.toDate(tupledate)))
Overload original Twisted with improved date formatting
6259904fbaa26c4b54d50716
class TipsFromVenueResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
Retrieve the value for the "Response" output from this choreography execution. (The response from Foursquare. Corresponds to the ResponseFormat input. Defaults to JSON.)
6259904f4e696a045264e856
class CustomSources(CAMB_Structure): <NEW_LINE> <INDENT> _fields_ = [("num_custom_sources", c_int, "number of sources set"), ("c_source_func", c_void_p, "Don't directly change this"), ("custom_source_ell_scales", AllocatableArrayInt, "scaling in L for outputs")]
Structure containing symoblic-compiled custom CMB angular power spectrum source functions. Don't change this directly, instead call :meth:`.model.CAMBparams.set_custom_scalar_sources`.
6259904f23e79379d538d969
class Model: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.property_int = 1 <NEW_LINE> self.property_str = "value 1" <NEW_LINE> self.property_repr = mock.MagicMock( spec=["__repr__"], __repr__=lambda _: "open_alchemy.models.RefModel()" )
Model class for testing.
6259904fb57a9660fecd2ee8
class ImageBannerSet(BannerSet): <NEW_LINE> <INDENT> banners = models.ManyToManyField(ImageBanner, related_name='banner_sets')
Containing Model for Image Banners
6259904fd99f1b3c44d06b06
class ErrorDefinitionProperties(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'message': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'message': {'key': 'message', 'type': 'str'}, 'code': {'key': 'code', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ErrorDefinitionProperties, self).__init__(**kwargs) <NEW_LINE> self.message = None <NEW_LINE> self.code = kwargs.get('code', None)
Error description and code explaining why an operation failed. Variables are only populated by the server, and will be ignored when sending a request. :ivar message: Description of the error. :vartype message: str :param code: Error code of list gateway. :type code: str
6259904f009cb60464d029a5
class WebLogHelper(): <NEW_LINE> <INDENT> def __init__(self, filter_ip, log_file): <NEW_LINE> <INDENT> self.convert_ip_to_list(filter_ip) <NEW_LINE> self.set_log_file(log_file) <NEW_LINE> <DEDENT> def convert_ip_to_list(self, ip_or_cidr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.filter_ip_list = [str(ipaddress.ip_address(ip_or_cidr))] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.filter_ip_list = [str(ip) for ip in ipaddress.ip_network(ip_or_cidr)] <NEW_LINE> <DEDENT> <DEDENT> def set_log_file(self, log_file): <NEW_LINE> <INDENT> if log_file: <NEW_LINE> <INDENT> self.web_log_file = log_file <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise LogFileParameterException() <NEW_LINE> <DEDENT> <DEDENT> def run_filter(self): <NEW_LINE> <INDENT> with open(self.web_log_file) as log_content: <NEW_LINE> <INDENT> for line in log_content: <NEW_LINE> <INDENT> self.print_filtered_log_line(line) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def print_filtered_log_line(self, log_line): <NEW_LINE> <INDENT> ip_on_log = log_line.split(' ', 1)[0] <NEW_LINE> if ip_on_log in self.filter_ip_list: <NEW_LINE> <INDENT> print(log_line, end='')
Provides functionality to filter a given webserver logfile based on a given IP address or a CIDR Note: This utility class works with common websever log formats where IP address logged as the first string of the log line and do not support log formats such as JSON or XML
6259904f7cff6e4e811b6ea7
class TftpPacket(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.opcode = 0 <NEW_LINE> self.buffer = None <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method") <NEW_LINE> <DEDENT> def decode(self): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method")
This class is the parent class of all tftp packet classes. It is an abstract class, providing an interface, and should not be instantiated directly.
6259904fcad5886f8bdc5ab4
class MLP(nn.Module): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super(MLP, self).__init__() <NEW_LINE> self.fc1 = nn.Linear(784, 256) <NEW_LINE> self.fc2 = nn.Linear(256, 128) <NEW_LINE> self.fc3 = nn.Linear(128, 64) <NEW_LINE> self.fc4 = nn.Linear(64, 10) <NEW_LINE> self.a1 = BSplineActivation(num_activations=256, mode='linear', device=config.device) <NEW_LINE> self.a2 = BSplineActivation(num_activations=128, mode='linear', device=config.device) <NEW_LINE> self.a3 = BSplineActivation(num_activations=64, mode='linear', device=config.device) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = x.view(x.shape[0], -1) <NEW_LINE> x = self.a1(self.fc1(x)) <NEW_LINE> x = self.a2(self.fc2(x)) <NEW_LINE> x = self.a3(self.fc3(x)) <NEW_LINE> x = F.log_softmax(self.fc4(x), dim=1) <NEW_LINE> return x
Simple fully-connected classifier model to demonstrate activation.
6259904fa8ecb0332587267f
class ArnetParseError(Exception): <NEW_LINE> <INDENT> pass
Raised if an error is encountered during parsing the arnet DBLP data
6259904f596a897236128fe5
class UpdateAgent(standard.ExecuteBinaryCommand): <NEW_LINE> <INDENT> def ProcessFile(self, path, args): <NEW_LINE> <INDENT> cmd = "/usr/sbin/installer" <NEW_LINE> cmd_args = ["-pkg", path, "-target", "/"] <NEW_LINE> time_limit = args.time_limit <NEW_LINE> res = client_utils_common.Execute( cmd, cmd_args, time_limit=time_limit, bypass_whitelist=True) <NEW_LINE> (stdout, stderr, status, time_used) = res <NEW_LINE> stdout = stdout[:10 * 1024 * 1024] <NEW_LINE> stderr = stderr[:10 * 1024 * 1024] <NEW_LINE> self.SendReply( rdf_client.ExecuteBinaryResponse( stdout=stdout, stderr=stderr, exit_status=status, time_used=int(1e6 * time_used)))
Updates the GRR agent to a new version.
6259904fe76e3b2f99fd9e6d
class StorageInterface(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def analysis_backlog(self): <NEW_LINE> <INDENT> return <NEW_LINE> yield <NEW_LINE> <DEDENT> def analysis_backlog_for_path(self, path=None): <NEW_LINE> <INDENT> return <NEW_LINE> yield <NEW_LINE> <DEDENT> def last_modified(self, path=None, recursive=False): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def file_in_path(self, path, filepath): <NEW_LINE> <INDENT> return NotImplementedError() <NEW_LINE> <DEDENT> def file_exists(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def folder_exists(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def list_files( self, path=None, filter=None, recursive=True, level=0, force_refresh=False ): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def add_folder(self, path, ignore_existing=True, display=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def remove_folder(self, path, recursive=True): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def copy_folder(self, source, destination): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def move_folder(self, source, destination): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def add_file( self, path, file_object, printer_profile=None, links=None, allow_overwrite=False, display=None, ): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def remove_file(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def copy_file(self, source, destination): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def move_file(self, source, destination): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def has_analysis(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_metadata(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def add_link(self, path, rel, data): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def remove_link(self, path, rel, data): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_additional_metadata(self, path, key): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def set_additional_metadata(self, path, key, data, overwrite=False, merge=False): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def remove_additional_metadata(self, path, key): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def canonicalize(self, path): <NEW_LINE> <INDENT> return self.sanitize(path) <NEW_LINE> <DEDENT> def sanitize(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def sanitize_path(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def sanitize_name(self, name): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def split_path(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def join_path(self, *path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def path_on_disk(self, path): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def path_in_storage(self, path): <NEW_LINE> <INDENT> raise NotImplementedError()
Interface of storage adapters for OctoPrint.
6259904f3c8af77a43b68974
class Repository(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> campus = models.ManyToManyField(Campus, blank=True) <NEW_LINE> slug = AutoSlugField(max_length=50, populate_from=('name'), editable=True) <NEW_LINE> ark = models.CharField(max_length=255, blank=True) <NEW_LINE> aeon_prod = models.CharField(max_length=255, blank=True) <NEW_LINE> aeon_test = models.CharField(max_length=255, blank=True) <NEW_LINE> google_analytics_tracking_code = models.CharField( max_length=64, blank=True, help_text='Enable tracking of your digital assets hosted in the ' 'UCLDC by entering your Google Analytics tracking code.' ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = 'repositories' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> campuses = self.campus.all() <NEW_LINE> if campuses: <NEW_LINE> <INDENT> return '{0} {1}'.format(campuses[0].slug, self.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('repository_collections', kwargs = {'repoid': self.id, 'repo_slug': str(self.slug)}) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.name = self.name.strip() <NEW_LINE> if not self.id: <NEW_LINE> <INDENT> if self.ark: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Repository.objects.get(ark=self.ark) <NEW_LINE> raise ValueError('Unit with ark ' + self.ark + ' already exists') <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return super(Repository, self).save(*args, **kwargs)
Representation of a holding "repository" for UCLDC
6259904f004d5f362081fa1f
class HeadshotForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserProfile <NEW_LINE> fields = ['headshot']
修改用户头像
6259904fd53ae8145f9198d0
class HappyCat(Benchmark): <NEW_LINE> <INDENT> Name = ['HappyCat'] <NEW_LINE> def __init__(self, Lower=-100.0, Upper=100.0): <NEW_LINE> <INDENT> Benchmark.__init__(self, Lower, Upper) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def function(cls): <NEW_LINE> <INDENT> def evaluate(D, sol): <NEW_LINE> <INDENT> val1 = 0.0 <NEW_LINE> val2 = 0.0 <NEW_LINE> alpha = 0.125 <NEW_LINE> for i in range(D): <NEW_LINE> <INDENT> val1 += math.pow(abs(math.pow(sol[i], 2) - D), alpha) <NEW_LINE> val2 += (0.5 * math.pow(sol[i], 2) + sol[i]) / D <NEW_LINE> <DEDENT> return val1 + val2 + 0.5 <NEW_LINE> <DEDENT> return evaluate
Implementation of Happy cat function. Date: 2018 Author: Lucija Brezočnik License: MIT Function: **Happy cat function** :math:`f(\mathbf{x}) = {\left |\sum_{i = 1}^D {x_i}^2 - D \right|}^{1/4} + (0.5 \sum_{i = 1}^D {x_i}^2 + \sum_{i = 1}^D x_i) / D + 0.5` **Input domain:** The function can be defined on any input domain but it is usually evaluated on the hypercube :math:`x_i ∈ [-100, 100]`, for all :math:`i = 1, 2,..., D`. **Global minimum:** :math:`f(x^*) = 0`, at :math:`x^* = (-1,...,-1)` LaTeX formats: Inline: $f(\mathbf{x}) = {\left|\sum_{i = 1}^D {x_i}^2 - D \right|}^{1/4} + (0.5 \sum_{i = 1}^D {x_i}^2 + \sum_{i = 1}^D x_i) / D + 0.5$ Equation: \begin{equation} f(\mathbf{x}) = {\left| \sum_{i = 1}^D {x_i}^2 - D \right|}^{1/4} + (0.5 \sum_{i = 1}^D {x_i}^2 + \sum_{i = 1}^D x_i) / D + 0.5 \end{equation} Domain: $-100 \leq x_i \leq 100$ Reference: http://bee22.com/manual/tf_images/Liang%20CEC2014.pdf & Beyer, H. G., & Finck, S. (2012). HappyCat - A Simple Function Class Where Well-Known Direct Search Algorithms Do Fail. In International Conference on Parallel Problem Solving from Nature (pp. 367-376). Springer, Berlin, Heidelberg.
6259904fd7e4931a7ef3d4e5
class OptionChainData: <NEW_LINE> <INDENT> __slots__ = ['underline', 'open_spot_price', 'all_contracts', 'all_contracts_fetched'] <NEW_LINE> def __init__(self, underline: str): <NEW_LINE> <INDENT> self.underline = underline <NEW_LINE> self.open_spot_price = -1 <NEW_LINE> self.all_contracts = defaultdict(lambda: {}) <NEW_LINE> self.all_contracts_fetched = False <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.all_contracts = {}
When handling options data, we keep the entire options chain, with the contract for each specific option
6259904f99cbb53fe6832352
class VBScriptMode(FundamentalMode): <NEW_LINE> <INDENT> keyword = 'VBScript' <NEW_LINE> editra_synonym = 'VBScript' <NEW_LINE> stc_lexer_id = wx.stc.STC_LEX_VBSCRIPT <NEW_LINE> start_line_comment = u"'" <NEW_LINE> end_line_comment = '' <NEW_LINE> icon = 'icons/page_white.png' <NEW_LINE> default_classprefs = ( StrParam('extensions', 'dsm vbs', fullwidth=True), StrParam('keyword_set_0', unique_keywords[90], hidden=False, fullwidth=True), )
Stub major mode for editing VBScript files. This major mode has been automatically generated and is a boilerplate/ placeholder major mode. Enhancements to this mode are appreciated!
6259904f3617ad0b5ee075ad
class NotFound(ClientError): <NEW_LINE> <INDENT> status_code = 404
Exception mapping a ``404 Not Found`` response.
6259904f23e79379d538d96a
class LogisticRegression(object): <NEW_LINE> <INDENT> def __init__(self, _input, n_in, n_out): <NEW_LINE> <INDENT> self.W = shared(value=numpy.zeros((n_in, n_out), dtype=config.floatX), name='W', borrow=True) <NEW_LINE> self.b = shared(value=numpy.zeros((n_out,), dtype=config.floatX), name='b', borrow=True) <NEW_LINE> self.p_y_given_x = T.nnet.softmax(T.dot(_input, self.W) + self.b) <NEW_LINE> self.y_pred = T.argmax(self.p_y_given_x, axis=1) <NEW_LINE> self._params = [self.W, self.b] <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def params(self): <NEW_LINE> <INDENT> return self._params <NEW_LINE> <DEDENT> @params.setter <NEW_LINE> def params(self, value): <NEW_LINE> <INDENT> self._params = value <NEW_LINE> <DEDENT> def negative_log_likelihood(self, y): <NEW_LINE> <INDENT> return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y]) <NEW_LINE> <DEDENT> def errors(self, y): <NEW_LINE> <INDENT> if y.ndim != self.y_pred.ndim: <NEW_LINE> <INDENT> raise TypeError('y should have the same shape as self.y_pred', ('y', y.type, 'y_pred', self.y_pred.type)) <NEW_LINE> <DEDENT> if y.dtype.startswith('int'): <NEW_LINE> <INDENT> return T.mean(T.neq(self.y_pred, y)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError()
Multi-class Logistic Regression Class The logistic regression is fully described by a weight matrix :math:`W` and bias vector :math:`b`. Classification is done by projecting data points onto a set of hyperplanes, the distance to which is used to determine a class membership probability.
6259904fd486a94d0ba2d433
class BankAccount: <NEW_LINE> <INDENT> def __init__(self, name, balance): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.balance = balance <NEW_LINE> <DEDENT> def withdraw(self, amount): <NEW_LINE> <INDENT> self.balance -= amount <NEW_LINE> <DEDENT> def deposit(self, amount): <NEW_LINE> <INDENT> self.balance += amount <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{}님의 계좌 예치금은 {}원입니다".format(self.name, self.balance)
은행 계좌 클래스
6259904f3eb6a72ae038bac9
class LocationFieldFunctionalTestCase(ptc.FunctionalTestCase): <NEW_LINE> <INDENT> pass
Common functional test base class
6259904f7b25080760ed8714
class ConcatDataset(Dataset): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def cumsum(sequence): <NEW_LINE> <INDENT> r, s = [], 0 <NEW_LINE> for e in sequence: <NEW_LINE> <INDENT> l = len(e) <NEW_LINE> r.append(l + s) <NEW_LINE> s += l <NEW_LINE> <DEDENT> return r <NEW_LINE> <DEDENT> def __init__(self, datasets): <NEW_LINE> <INDENT> super(ConcatDataset, self).__init__() <NEW_LINE> assert len(datasets) > 0, 'datasets should not be an empty iterable' <NEW_LINE> self.datasets = list(datasets) <NEW_LINE> for d in self.datasets: <NEW_LINE> <INDENT> assert len(d) >= 0 <NEW_LINE> <DEDENT> self.cumulative_sizes = self.cumsum(self.datasets) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.cumulative_sizes[-1] <NEW_LINE> <DEDENT> def get_item(self, idx): <NEW_LINE> <INDENT> if idx < 0: <NEW_LINE> <INDENT> if -idx > len(self): <NEW_LINE> <INDENT> raise ValueError("absolute value of index should not exceed dataset length") <NEW_LINE> <DEDENT> idx = len(self) + idx <NEW_LINE> <DEDENT> dataset_idx = bisect.bisect_right(self.cumulative_sizes, idx) <NEW_LINE> if dataset_idx == 0: <NEW_LINE> <INDENT> sample_idx = idx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sample_idx = idx - self.cumulative_sizes[dataset_idx - 1] <NEW_LINE> <DEDENT> return self.datasets[dataset_idx][sample_idx]
Dataset as a concatenation of multiple datasets. This class is useful to assemble different existing datasets. Arguments: datasets (sequence): List of datasets to be concatenated
6259904f6fece00bbaccce28
class SaleComplaintTestCase(ModuleTestCase): <NEW_LINE> <INDENT> module = 'sale_complaint'
Test Sale Complaint module
6259904f30c21e258be99c73
class MaterielViewTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> <DEDENT> def test_list_materiel(self): <NEW_LINE> <INDENT> url = reverse('app_name_materiel_list') <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_create_materiel(self): <NEW_LINE> <INDENT> url = reverse('app_name_materiel_create') <NEW_LINE> data = { "nom": "nom", "type": "type", "qteTotal": "qteTotal", "lieuStockage": "lieuStockage", } <NEW_LINE> response = self.client.post(url, data=data) <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> <DEDENT> def test_detail_materiel(self): <NEW_LINE> <INDENT> materiel = create_materiel() <NEW_LINE> url = reverse('app_name_materiel_detail', args=[materiel.pk,]) <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_update_materiel(self): <NEW_LINE> <INDENT> materiel = create_materiel() <NEW_LINE> data = { "nom": "nom", "type": "type", "qteTotal": "qteTotal", "lieuStockage": "lieuStockage", } <NEW_LINE> url = reverse('app_name_materiel_update', args=[materiel.pk,]) <NEW_LINE> response = self.client.post(url, data) <NEW_LINE> self.assertEqual(response.status_code, 302)
Tests for Materiel
6259904f8da39b475be04653
class KodiProtocol(asyncio.Protocol): <NEW_LINE> <INDENT> @asyncio.coroutine <NEW_LINE> def notify(self, notification, async_notifier): <NEW_LINE> <INDENT> request = _NotificationRequest(notification) <NEW_LINE> response = yield from self._send_request(request, self.target) <NEW_LINE> return response <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def _send_request(self, request, target): <NEW_LINE> <INDENT> request_data = request.marshal() <NEW_LINE> auth = (target.username, target.password) <NEW_LINE> client = aiohttp.HttpClient([(target.host, target.port)], method='POST', path='/jsonrpc') <NEW_LINE> headers = {'Content-Type': 'application/json'} <NEW_LINE> result = {} <NEW_LINE> try: <NEW_LINE> <INDENT> http_response = yield from client.request(headers=headers, data=request_data, auth=auth) <NEW_LINE> if http_response.status == 200: <NEW_LINE> <INDENT> response_data = yield from http_response.read() <NEW_LINE> http_response.close() <NEW_LINE> data = json.loads(response_data.decode('UTF-8')) <NEW_LINE> result['status'] = data['result'] <NEW_LINE> if data['result'] == 'OK': <NEW_LINE> <INDENT> result['status_code'] = 0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result['status'] = 'ERROR' <NEW_LINE> result['reason'] = http_response.reason <NEW_LINE> <DEDENT> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> result['status'] = 'ERROR' <NEW_LINE> result['reason'] = exc.args[0] <NEW_LINE> <DEDENT> result['target'] = str(self.target) <NEW_LINE> return result
Kodi JSON Protocol. Uses :mod:`aiohttp` to communicate with Kodi
6259904f15baa723494633fb
class TestTimestampedObject(test.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestTimestampedObject, self).setUp() <NEW_LINE> @base.VersionedObjectRegistry.register_if(False) <NEW_LINE> class MyTimestampedObject(base.VersionedObject, base.TimestampedObject): <NEW_LINE> <INDENT> fields = { 'field1': fields.Field(fields.String()), } <NEW_LINE> <DEDENT> self.myclass = MyTimestampedObject <NEW_LINE> self.my_object = self.myclass(field1='field1') <NEW_LINE> <DEDENT> def test_timestamped_has_fields(self): <NEW_LINE> <INDENT> self.assertEqual('field1', self.my_object.field1) <NEW_LINE> self.assertIn('updated_at', self.my_object.fields) <NEW_LINE> self.assertIn('created_at', self.my_object.fields) <NEW_LINE> <DEDENT> def test_timestamped_holds_timestamps(self): <NEW_LINE> <INDENT> now = timeutils.utcnow(with_timezone=True) <NEW_LINE> self.my_object.updated_at = now <NEW_LINE> self.my_object.created_at = now <NEW_LINE> self.assertEqual(now, self.my_object.updated_at) <NEW_LINE> self.assertEqual(now, self.my_object.created_at) <NEW_LINE> <DEDENT> def test_timestamped_rejects_not_timestamps(self): <NEW_LINE> <INDENT> with testtools.ExpectedException(ValueError, '.*parse date.*'): <NEW_LINE> <INDENT> self.my_object.updated_at = 'a string' <NEW_LINE> <DEDENT> with testtools.ExpectedException(ValueError, '.*parse date.*'): <NEW_LINE> <INDENT> self.my_object.created_at = 'a string'
Test TimestampedObject mixin. Do this by creating an object that uses the mixin and confirm that the added fields are there and in fact behaves as the DateTimeFields we desire.
6259904f009cb60464d029a7
class StringTable: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.table = [] <NEW_LINE> self.length = 0 <NEW_LINE> <DEDENT> def add(self, string): <NEW_LINE> <INDENT> for te in self.table: <NEW_LINE> <INDENT> if te[0].endswith(string): <NEW_LINE> <INDENT> idx = te[1] + len(te[0]) - len(string) <NEW_LINE> te[2].add(idx) <NEW_LINE> return idx <NEW_LINE> <DEDENT> <DEDENT> idx = self.length <NEW_LINE> self.table.append((string, idx, set((idx,)))) <NEW_LINE> self.length += len(string) + 1 <NEW_LINE> return idx <NEW_LINE> <DEDENT> def emit(self, filp, name, static=True): <NEW_LINE> <INDENT> fragments = [ '"%s\\0" /* %s */' % ( te[0].encode('unicode_escape').decode(), ', '.join(str(idx) for idx in sorted(te[2])) ) for te in self.table ] <NEW_LINE> filp.write('%sconst char %s[] =\n%s;\n' % ( 'static ' if static else '', name, '\n'.join('\t' + fragment for fragment in fragments) ))
A class for collecting multiple strings in a single larger string that is used by indexing (to avoid relocations in the resulting binary)
6259904f30dc7b76659a0ca3
@pytest.mark.components <NEW_LINE> @pytest.allure.story('Clients') <NEW_LINE> @pytest.allure.feature('PATCH') <NEW_LINE> class Test_PFE_Components(object): <NEW_LINE> <INDENT> @pytest.allure.link('https://jira.qumu.com/browse/TC-43395') <NEW_LINE> @pytest.mark.Clients <NEW_LINE> @pytest.mark.PATCH <NEW_LINE> def test_TC_43395_PATCH_Clients_Width_Gt(self, context): <NEW_LINE> <INDENT> with pytest.allure.step("""Verify that user is able to modify source constraint rule with specific rule for parameter 'Width>GT(Greater Than) using request PATCH '/clients/'."""): <NEW_LINE> <INDENT> clientDetails = context.sc.ClientDetails( id=None, matchingRule={'operator': 'ALL', 'rules': [], 'groups': []}, name='PATCH: Client updated with Source Rule Width GT', sourceSelectionRule=[{ 'operator': 'ALL', 'rules': [{ 'expressionType': 'Single', 'contextField': 'widthPx', 'operator': 'GT', 'contextFieldType': 'String', 'matchValue': 1000, 'contextFieldKey': None }], 'groups': [] }]) <NEW_LINE> response = check( context.cl.Clients.updateEntity( body=clientDetails, id='clientUpdate' ) )
PFE Clients test cases.
6259904f7cff6e4e811b6eaa
class EmptyMailSubjectOrBody(ExceptionWhitoutTraceBack): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> msg = "Neither e-mail subject nor e-mail body can be left blank." <NEW_LINE> super().__init__(msg)
EmptyMailSubjectOrBody
6259904f4428ac0f6e6599a1
class MinuteQuery(QueryBase): <NEW_LINE> <INDENT> def __init__(self, symbol, start_time, end_time): <NEW_LINE> <INDENT> QueryBase.__init__(self, symbol) <NEW_LINE> if time_delta_seconds(start_time, end_time) <= 0: <NEW_LINE> <INDENT> raise RuntimeError("Supplied time range is negative.") <NEW_LINE> <DEDENT> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> starttime_str = self.start_time.strftime('%M%S') <NEW_LINE> endtime_str = self.end_time.strftime('%M%S') <NEW_LINE> subroot_str = "2-1-17-0-0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0," "0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0," "0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0," "0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0|0,0,0," "0,0|0,0,0,0,0|0,0,0,0,0|0,0,0,0,0-0" <NEW_LINE> postamble_str = '-BG=FFFFFF-BT=0-&WD=635-HT=395---XXCL-' <NEW_LINE> self.request_url = self._REQUEST_ROOT + subroot_str + self.start_time.strftime('%H%M') + self.end_time.strftime('%H%M') + "-03NA000000" + self.symbol + postamble_str <NEW_LINE> timestamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d') <NEW_LINE> self.output_filename = timestamp + "_" + self.start_time.strftime('%H%M') + self.end_time.strftime('%H%M') <NEW_LINE> self.download_data() <NEW_LINE> <DEDENT> def get_pandas_dataframe(self): <NEW_LINE> <INDENT> if not self.file_saved: <NEW_LINE> <INDENT> raise RuntimeError("No data yet acquired. Please acquire data first.") <NEW_LINE> <DEDENT> dataframe = pd.read_csv(self.file_buffer, parse_dates=['Time']) <NEW_LINE> dataframe = dataframe.reindex(index=dataframe.index[::-1]) <NEW_LINE> return dataframe
This returns 1-minute resolution data for a supplied symbol. The data should ideally be 1-minute resolution, but there may be missing minutes here and there. Returned are timestamp (HH:MM), Price, and Volume
6259904fa8ecb03325872681
class ShowPlatformFedActiveIfmMappingSchema(MetaParser): <NEW_LINE> <INDENT> schema = {'interface': {Any(): {'IF_ID': str, 'Inst': str, 'Asic': str, 'Core': str, 'IFG_ID': str, 'Port': str, 'SubPort': str, 'Mac': str, 'First_Serdes': str, 'Last_Serdes': str, 'Cntx': str, 'LPN': str, 'GPN': str, 'Type': str, 'Active': str, } }, }
Schema for show platform software fed active ifm mappings
6259904fe76e3b2f99fd9e6f
class DownloadPython(threading.Thread): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> log(INFO, 'Downloading embeddable Python...') <NEW_LINE> ver = '3.8.1' <NEW_LINE> arch = 'amd64' if platform.architecture()[0] == '64bit' else 'win32' <NEW_LINE> url = 'https://www.python.org/ftp/python/{ver}/python-{ver}-embed-{arch}.zip'.format( ver=ver, arch=arch, ) <NEW_LINE> if not os.path.exists(RESOURCES_FOLDER): <NEW_LINE> <INDENT> os.makedirs(RESOURCES_FOLDER) <NEW_LINE> <DEDENT> zip_file = os.path.join(RESOURCES_FOLDER, 'python.zip') <NEW_LINE> try: <NEW_LINE> <INDENT> urllib.urlretrieve(url, zip_file) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> urllib.request.urlretrieve(url, zip_file) <NEW_LINE> <DEDENT> log(INFO, 'Extracting Python...') <NEW_LINE> with contextlib.closing(ZipFile(zip_file)) as zf: <NEW_LINE> <INDENT> path = os.path.join(RESOURCES_FOLDER, 'python') <NEW_LINE> zf.extractall(path) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> os.remove(zip_file) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> log(INFO, 'Finished extracting Python.')
Non-blocking thread for extracting embeddable Python on Windows machines.
6259904f8da39b475be04654
class ExecutionError(Error): <NEW_LINE> <INDENT> def __init__(self, step, msg): <NEW_LINE> <INDENT> self.step = step <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'error in step {0} - {1}'.format( self.step , self.msg )
Exception raised for errors during execution. Attributes: expr -- execution item in which the error occurred msg -- explanation of the error
6259904fa219f33f346c7c70
class TestError(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testError(self): <NEW_LINE> <INDENT> model = ProcessMaker_PMIO.models.error.Error()
Error unit test stubs
6259904fd53ae8145f9198d1
class RoleTestCase(UITestCase): <NEW_LINE> <INDENT> @tier1 <NEW_LINE> def test_positive_create_with_name(self): <NEW_LINE> <INDENT> with Session(self.browser) as session: <NEW_LINE> <INDENT> for name in generate_strings_list(length=10): <NEW_LINE> <INDENT> with self.subTest(name): <NEW_LINE> <INDENT> make_role(session, name=name) <NEW_LINE> self.assertIsNotNone(self.role.search(name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @tier1 <NEW_LINE> def test_negative_create_with_invalid_name(self): <NEW_LINE> <INDENT> with Session(self.browser) as session: <NEW_LINE> <INDENT> for name in invalid_values_list(interface='ui'): <NEW_LINE> <INDENT> with self.subTest(name): <NEW_LINE> <INDENT> make_role(session, name=name) <NEW_LINE> self.assertIsNotNone(session.nav.wait_until_element( common_locators['name_haserror'])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @tier1 <NEW_LINE> def test_positive_delete(self): <NEW_LINE> <INDENT> with Session(self.browser) as session: <NEW_LINE> <INDENT> for name in generate_strings_list(length=10): <NEW_LINE> <INDENT> with self.subTest(name): <NEW_LINE> <INDENT> make_role(session, name=name) <NEW_LINE> self.role.delete(name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @tier1 <NEW_LINE> def test_positive_update_name(self): <NEW_LINE> <INDENT> name = gen_string('utf8') <NEW_LINE> with Session(self.browser) as session: <NEW_LINE> <INDENT> make_role(session, name=name) <NEW_LINE> self.assertIsNotNone(self.role.search(name)) <NEW_LINE> for new_name in generate_strings_list(length=10): <NEW_LINE> <INDENT> with self.subTest(new_name): <NEW_LINE> <INDENT> self.role.update(name, new_name) <NEW_LINE> self.assertIsNotNone(self.role.search(new_name)) <NEW_LINE> name = new_name <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @tier1 <NEW_LINE> def test_positive_update_permission(self): <NEW_LINE> <INDENT> name = gen_string('alpha') <NEW_LINE> with Session(self.browser) as session: <NEW_LINE> <INDENT> make_role(session, name=name) <NEW_LINE> self.assertIsNotNone(self.role.search(name)) <NEW_LINE> self.role.update( name, add_permission=True, resource_type='Architecture', permission_list=['view_architectures', 'create_architectures'], ) <NEW_LINE> <DEDENT> <DEDENT> @tier1 <NEW_LINE> def test_positive_update_org(self): <NEW_LINE> <INDENT> name = gen_string('alpha') <NEW_LINE> org = entities.Organization().create() <NEW_LINE> with Session(self.browser) as session: <NEW_LINE> <INDENT> make_role(session, name=name) <NEW_LINE> self.assertIsNotNone(self.role.search(name)) <NEW_LINE> self.role.update( name, add_permission=True, resource_type='Activation Keys', permission_list=['view_activation_keys'], organization=[org.name], )
Implements Roles tests from UI
6259904fe64d504609df9e06
class DefConfig(object): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> LISTEN = '0.0.0.0' <NEW_LINE> SUTEKH_PREFS = prefs_dir("Sutekh") <NEW_LINE> DATABASE_URI = sqlite_uri(os.path.join(SUTEKH_PREFS, "sutekh.db")) <NEW_LINE> ICONS = False
Default config for the web app
6259904f379a373c97d9a49a
class SimpleLBP(nn.Module): <NEW_LINE> <INDENT> def __init__(self, n_classes=2): <NEW_LINE> <INDENT> super(SimpleLBP, self).__init__() <NEW_LINE> self.pool = nn.MaxPool3d(2, 2) <NEW_LINE> self.conv1 = ConvLBP(1, 8, 4) <NEW_LINE> self.conv2 = nn.Conv3d(8, 8, 5) <NEW_LINE> self.conv3 = ConvLBP(8, 16, 4) <NEW_LINE> self.conv4 = nn.Conv3d(16, 16, 5) <NEW_LINE> self.conv5 = ConvLBP(16, 32, 4) <NEW_LINE> self.conv6 = nn.Conv3d(32, 32, 5) <NEW_LINE> self.fc1 = nn.Linear(32 * 10 * 13 * 10, 5000) <NEW_LINE> self.fc2 = nn.Linear(5000, 800) <NEW_LINE> self.fc3 = nn.Linear(800, 100) <NEW_LINE> self.fc4 = nn.Linear(100, 10) <NEW_LINE> self.fc5 = nn.Linear(10, n_classes) <NEW_LINE> <DEDENT> def forward(self, x, train=False): <NEW_LINE> <INDENT> x = F.relu(self.conv1(x)) <NEW_LINE> x = F.relu(self.conv2(x)) <NEW_LINE> x = self.pool(x) <NEW_LINE> x = F.relu(self.conv3(x)) <NEW_LINE> x = F.relu(self.conv4(x)) <NEW_LINE> x = self.pool(x) <NEW_LINE> x = F.relu(self.conv5(x)) <NEW_LINE> x = F.relu(self.conv6(x)) <NEW_LINE> x = self.pool(x) <NEW_LINE> x = x.view(-1, 32 * 10 * 13 * 10) <NEW_LINE> x = F.relu(self.fc1(x)) <NEW_LINE> x = F.relu(self.fc2(x)) <NEW_LINE> x = F.relu(self.fc3(x)) <NEW_LINE> x = F.relu(self.fc4(x)) <NEW_LINE> x = self.fc5(x) <NEW_LINE> return x
Classifier for a binary classification task
6259904f462c4b4f79dbce6e
class GenericClass(): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass
Generic class for init of MambuMapObj
6259904f71ff763f4b5e8c17
class GetM3u8HlsJob(Queueable): <NEW_LINE> <INDENT> hls_id = None <NEW_LINE> def __init__(self, hls_id): <NEW_LINE> <INDENT> self.hls_id = hls_id <NEW_LINE> <DEDENT> @func_timeout.func_set_timeout(3) <NEW_LINE> def fetch(self, qiniu, url, key): <NEW_LINE> <INDENT> bucket = qiniu.bucket() <NEW_LINE> fetch, ret = bucket.fetch(url, qiniu.bucket_name, key) <NEW_LINE> print(fetch) <NEW_LINE> if ret.status_code == 200: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def handle(self): <NEW_LINE> <INDENT> hls_id = self.hls_id <NEW_LINE> print("收到 hls %d" % hls_id) <NEW_LINE> hls_info = M3u8Hls.find(hls_id) <NEW_LINE> if hls_info.status != M3u8Hls.STATUS_DEFAULT: <NEW_LINE> <INDENT> print("hls {} 状态错误".format(hls_id)) <NEW_LINE> return <NEW_LINE> <DEDENT> hls_info.update({ "status": M3u8Hls.STATUS_LOADING }) <NEW_LINE> key = 'hls/{}/{}.ts'.format(hls_info.m3u8_list_id, hls_id) <NEW_LINE> qiniu = container().make('Qiniu') <NEW_LINE> try: <NEW_LINE> <INDENT> fetch_status = self.fetch(qiniu, hls_info.url, key) <NEW_LINE> <DEDENT> except func_timeout.exceptions.FunctionTimedOut: <NEW_LINE> <INDENT> print("{} 超时 {}".format(hls_id, key)) <NEW_LINE> fetch_status = False <NEW_LINE> <DEDENT> if fetch_status: <NEW_LINE> <INDENT> hls_info.update({ "key": key, "status": M3u8Hls.STATUS_SUCCESS }) <NEW_LINE> print("{} 成功 {}".format(hls_id, key)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hls_info.update({ "status": M3u8Hls.STATUS_ERROR }) <NEW_LINE> print("{} 失败".format(hls_id))
A GetM3u8HlsJob Job.
6259904f0a366e3fb87dde55
class ParamAndHeaderTest(LimitTestBase): <NEW_LINE> <INDENT> handler_class = ParamAndHeaderHandler <NEW_LINE> def testHeaderAndParam(self): <NEW_LINE> <INDENT> os.environ['REMOTE_ADDR'] = '10.1.1.3' <NEW_LINE> for i in xrange(3): <NEW_LINE> <INDENT> self.handle('post', ('foo', 'meep')) <NEW_LINE> self.assertEquals(200, self.response_code()) <NEW_LINE> self.assertEquals('post success', self.response_body()) <NEW_LINE> <DEDENT> self.handle('post', ('foo', 'meep')) <NEW_LINE> self.assertEquals(503, self.response_code()) <NEW_LINE> self.handle('post', ('foo', 'stuff')) <NEW_LINE> self.assertEquals(200, self.response_code()) <NEW_LINE> os.environ['REMOTE_ADDR'] = '10.1.1.4' <NEW_LINE> self.handle('post', ('foo', 'meep')) <NEW_LINE> self.assertEquals(200, self.response_code()) <NEW_LINE> <DEDENT> def testHeaderMissing(self): <NEW_LINE> <INDENT> for i in xrange(4): <NEW_LINE> <INDENT> self.handle('post', ('foo', 'meep')) <NEW_LINE> self.assertEquals(200, self.response_code()) <NEW_LINE> self.assertEquals('post success', self.response_body()) <NEW_LINE> <DEDENT> <DEDENT> def testParamMissing(self): <NEW_LINE> <INDENT> os.environ['REMOTE_ADDR'] = '10.1.1.4' <NEW_LINE> for i in xrange(4): <NEW_LINE> <INDENT> self.handle('post') <NEW_LINE> self.assertEquals(200, self.response_code()) <NEW_LINE> self.assertEquals('post success', self.response_body()) <NEW_LINE> <DEDENT> <DEDENT> def testBothMissing(self): <NEW_LINE> <INDENT> for i in xrange(4): <NEW_LINE> <INDENT> self.handle('post') <NEW_LINE> self.assertEquals(200, self.response_code()) <NEW_LINE> self.assertEquals('post success', self.response_body())
Tests for limiting by parameters and headers.
6259904f07d97122c4218113
class ParamMeta(type): <NEW_LINE> <INDENT> def __new__(cls, classname, bases, classdict): <NEW_LINE> <INDENT> if '_params' not in classdict: <NEW_LINE> <INDENT> classdict['_params'] = {} <NEW_LINE> <DEDENT> params = classdict['_params'] <NEW_LINE> for base in bases: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> baseparams = base._params <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for param_name in baseparams: <NEW_LINE> <INDENT> if param_name in params: <NEW_LINE> <INDENT> if params[param_name].doc is None: <NEW_LINE> <INDENT> params[param_name].doc = baseparams[param_name].doc <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> params[param_name] = baseparams[param_name] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if '__doc__' not in classdict: <NEW_LINE> <INDENT> classdict['__doc__'] = '%s -- Class using Params' % classname <NEW_LINE> <DEDENT> docs = [('%s: %s (default=%s)' % (param_name, params[param_name].doc, params[param_name].default)) for param_name in params] <NEW_LINE> docs.sort() <NEW_LINE> classdict['__doc__'] = classdict['__doc__'] + '\n '.join(['\n%s Parameters:' % classname] + docs) <NEW_LINE> if '__init__' not in classdict: <NEW_LINE> <INDENT> def __init__(self, **kwds): <NEW_LINE> <INDENT> for key in kwds: <NEW_LINE> <INDENT> if key not in self._params: <NEW_LINE> <INDENT> raise ParamError(self.__class__.__name__, key) <NEW_LINE> <DEDENT> <DEDENT> for param_name in self._params: <NEW_LINE> <INDENT> if param_name in kwds: <NEW_LINE> <INDENT> value = kwds[param_name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = self._params[param_name].default <NEW_LINE> <DEDENT> setattr(self, param_name, value) <NEW_LINE> <DEDENT> <DEDENT> classdict['__init__'] = __init__ <NEW_LINE> <DEDENT> return type.__new__(cls, classname, bases, classdict)
A metaclass that lets you define params. When creating a new class of type ParamMeta, add a dictionary named params into the class namespace. Add Param objects to the dictionary with the key being the name of the parameter. Now, each object of the class will have an attribute with the appropriate name. The value will default to the default value in the Param object, but it can be overridden by name in __init__. Rather than using ParamMeta directly, we recommend that you subclass ParamObj, which will allow you to override __init__ as long as you call super's __init__.
6259904f8a43f66fc4bf3607
class ActiveSecurityAdminRulesListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ActiveBaseSecurityAdminRule]'}, 'skip_token': {'key': 'skipToken', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ActiveBaseSecurityAdminRule"]] = None, skip_token: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ActiveSecurityAdminRulesListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.skip_token = skip_token
Result of the request to list active security admin rules. It contains a list of active security admin rules and a skiptoken to get the next set of results. :param value: Gets a page of active security admin rules. :type value: list[~azure.mgmt.network.v2021_02_01_preview.models.ActiveBaseSecurityAdminRule] :param skip_token: When present, the value can be passed to a subsequent query call (together with the same query and scopes used in the current request) to retrieve the next page of data. :type skip_token: str
6259904fe76e3b2f99fd9e70
class NetworkDeviceResource(Resource): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @swag_from('../swagger/networkDevice/GET.yml') <NEW_LINE> def get(_id): <NEW_LINE> <INDENT> networkDevice = NetworkDeviceRepository.get(deviceId=deviceId) <NEW_LINE> return jsonify({'networkDevice': networkDevice.json}) <NEW_LINE> <DEDENT> @swag_from('../swagger/networkDevice/POST.yml') <NEW_LINE> def post(_id, name, v4addr, v6addr, netmask, hwaddr, deviceId): <NEW_LINE> <INDENT> networkDevice = NetworkDeviceRepository.create( _id=_id, name=name, v4addr=v4addr, v6addr=v6addr, netmask=netmask, hwaddr=hwaddr, deviceId=deviceId ) <NEW_LINE> return jsonify({'networkDevice': networkDevice.json})
Verbs relative to the users
6259904f76d4e153a661dcb0