code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class TreatmentResource(ModelResource): <NEW_LINE> <INDENT> animals = fields.ToManyField('mousedb.animal.api.AnimalResource','animals',full=True, null=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> queryset = Treatment.objects.all() <NEW_LINE> fields = 'treatment' <NEW_LINE> resource_name = 'treatment' <NEW_LINE> include_resource_uri = False <NEW_LINE> list_allowed_methods = ['get'] <NEW_LINE> detail_allowed_methods = ['get'] <NEW_LINE> authentication = ApiKeyAuthentication() <NEW_LINE> filtering = {"treatment":ALL} | This generates the API resource for :class:`~mousedb.data.models.Treatment` objects.
This is a limited dataset for use in AnimalResource calls. | 625990714f6381625f19a110 |
class Router(Node): <NEW_LINE> <INDENT> def __init__(self, node_id): <NEW_LINE> <INDENT> super(Router, self).__init__(node_id) | Router class, subclass of Node. | 62599071ac7a0e7691f73db8 |
class Document(_MainResource, Base): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _collection_path(cls): <NEW_LINE> <INDENT> return 'documents' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _instance_path(cls, dispute_id, document_id): <NEW_LINE> <INDENT> return ('disputes', dispute_id, 'documents', document_id) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def retrieve(cls, dispute_id, document_id): <NEW_LINE> <INDENT> return _as_object(cls._request('get', cls._instance_path(dispute_id, document_id))) <NEW_LINE> <DEDENT> def reload(self): <NEW_LINE> <INDENT> return self._reload_data( self._request('get', self._attributes['location'])) <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> return self._reload_data( self._request('delete', self._attributes['location'])) <NEW_LINE> <DEDENT> @property <NEW_LINE> def destroyed(self): <NEW_LINE> <INDENT> return self._attributes.get('deleted', False) | API class representing a dispute document in an account.
This API class is used for managing dispute document files. Documents are
used to help resolve disputes. Supported file types include PNG, JPG, and
PDF.
Basic usage::
>>> import omise
>>> omise.api_secret = 'skey_test_4xs8breq3htbkj03d2x'
>>> dispute = omise.Dispute.retrieve('dspt_test_5mr4ox8e818viqtaqs1')
>>> document = dispute.documents.retrieve("docu_test_5mr4oyqphijal1ps9u6")
<Document id='docu_test_5mr4oyqphijal1ps9u6' at 0x7ffdbb90d410>
>>> document.filename
'evidence.png' | 62599071aad79263cf430085 |
class Storage: <NEW_LINE> <INDENT> def __init__(self, base_folder, filename): <NEW_LINE> <INDENT> self.filename = os.path.join(base_folder, filename) <NEW_LINE> self.last_value = None <NEW_LINE> self.last_read_time = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> if not self.exists: <NEW_LINE> <INDENT> fname = self.filename <NEW_LINE> print(f"Warning: Storage file \"{fname}\" does not exist") <NEW_LINE> return None <NEW_LINE> <DEDENT> tlread = self.last_read_time <NEW_LINE> tlmod = self.last_modified_time <NEW_LINE> if tlread is None or tlmod > tlread: <NEW_LINE> <INDENT> self.last_value = self.load() <NEW_LINE> self.last_read_time = tlmod <NEW_LINE> <DEDENT> return self.last_value <NEW_LINE> <DEDENT> @property <NEW_LINE> def exists(self): <NEW_LINE> <INDENT> return os.path.isfile(self.filename) <NEW_LINE> <DEDENT> @property <NEW_LINE> def last_modified_time(self): <NEW_LINE> <INDENT> if self.exists: <NEW_LINE> <INDENT> return os.stat(self.filename).st_mtime <NEW_LINE> <DEDENT> <DEDENT> def store(self, value): <NEW_LINE> <INDENT> timestamp = asctime() <NEW_LINE> with open(self.filename, "w") as f: <NEW_LINE> <INDENT> f.write(f"# {timestamp}\n") <NEW_LINE> f.write(f"{value}\n") <NEW_LINE> <DEDENT> <DEDENT> def load(self): <NEW_LINE> <INDENT> value = np.loadtxt(self.filename) <NEW_LINE> return float(value) | Read/write a value from/to a file | 62599071f548e778e596ce5c |
class Reminders: <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> self.timers = bot.get_cog('Timers') <NEW_LINE> <DEDENT> async def on_message(self, msg): <NEW_LINE> <INDENT> if msg.channel.id != 269910005837332480: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if msg.content.lower().startswith('remind '): <NEW_LINE> <INDENT> reminder = msg.content[7:] <NEW_LINE> if reminder.lower().startswith('me '): <NEW_LINE> <INDENT> reminder = reminder[3:] <NEW_LINE> <DEDENT> if reminder.lower().startswith('to '): <NEW_LINE> <INDENT> reminder = reminder[3:] <NEW_LINE> <DEDENT> time = get_date(reminder) <NEW_LINE> if not time: <NEW_LINE> <INDENT> await msg.channel.send("When?") <NEW_LINE> msg = await self.bot.wait_for('message', check=lambda m: m.author == msg.author and m.channel == msg.channel) <NEW_LINE> time = get_date(msg.content) <NEW_LINE> <DEDENT> if time: <NEW_LINE> <INDENT> await self.timers.create_timer('reminder', time, [msg.author.id, msg.channel.id, reminder]) <NEW_LINE> await msg.channel.send(f'I\'ll remind you then!') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await msg.channel.send(f"Idk when you want me to remind you") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> async def on_reminder_event(self, author_id, destination_id, msg): <NEW_LINE> <INDENT> author = self.bot.get_user(author_id) <NEW_LINE> if author is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> channel = self.bot.get_channel(destination_id) <NEW_LINE> if channel is None: <NEW_LINE> <INDENT> author = self.bot.get_user(author_id) <NEW_LINE> try: <NEW_LINE> <INDENT> channel = await author.dm_channel() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> await channel.send(f'{author.mention}\n{msg}') | Tools for reminding | 625990717d43ff248742807a |
class IPermission(Interface): <NEW_LINE> <INDENT> id = TextLine( title=u"Id", description=u"Id as which this permission will be known and used.", readonly=True, required=True) <NEW_LINE> title = TextLine( title=u"Title", description=u"Provides a title for the permission.", required=True) <NEW_LINE> description = Text( title=u"Description", description=u"Provides a description for the permission.", required=False) | A permission object. | 625990710a50d4780f706a29 |
class PluginReportStateAPI(object): <NEW_LINE> <INDENT> def __init__(self, topic): <NEW_LINE> <INDENT> target = oslo_messaging.Target(topic=topic, version='1.0', namespace=n_const.RPC_NAMESPACE_STATE) <NEW_LINE> self.client = n_rpc.get_client(target) <NEW_LINE> <DEDENT> def report_state(self, context, agent_state, use_call=False): <NEW_LINE> <INDENT> cctxt = self.client.prepare( timeout=n_rpc.TRANSPORT.conf.rpc_response_timeout) <NEW_LINE> agent_state['uuid'] = uuidutils.generate_uuid() <NEW_LINE> kwargs = { 'agent_state': {'agent_state': agent_state}, 'time': datetime.utcnow().strftime(constants.ISO8601_TIME_FORMAT), } <NEW_LINE> method = cctxt.call if use_call else cctxt.cast <NEW_LINE> return method(context, 'report_state', **kwargs) | RPC client used to report state back to plugin.
This class implements the client side of an rpc interface. The server side
can be found in neutron.db.agents_db.AgentExtRpcCallback. For more
information on changing rpc interfaces, see
doc/source/contributor/internals/rpc_api.rst. | 6259907166673b3332c31cce |
class DrawingHandler: <NEW_LINE> <INDENT> @Route.get( r"/projects/{project_id}/drawings", parameters={ "project_id": "Project UUID" }, status_codes={ 200: "List of drawings returned", }, description="List drawings of a project") <NEW_LINE> async def list_drawings(request, response): <NEW_LINE> <INDENT> project = await Controller.instance().get_loaded_project(request.match_info["project_id"]) <NEW_LINE> response.json([v for v in project.drawings.values()]) <NEW_LINE> <DEDENT> @Route.post( r"/projects/{project_id}/drawings", parameters={ "project_id": "Project UUID" }, status_codes={ 201: "Drawing created", 400: "Invalid request" }, description="Create a new drawing instance", input=DRAWING_OBJECT_SCHEMA, output=DRAWING_OBJECT_SCHEMA) <NEW_LINE> async def create(request, response): <NEW_LINE> <INDENT> project = await Controller.instance().get_loaded_project(request.match_info["project_id"]) <NEW_LINE> drawing = await project.add_drawing(**request.json) <NEW_LINE> response.set_status(201) <NEW_LINE> response.json(drawing) <NEW_LINE> <DEDENT> @Route.get( r"/projects/{project_id}/drawings/{drawing_id}", parameters={ "project_id": "Project UUID", "drawing_id": "Drawing UUID" }, status_codes={ 200: "Drawing found", 400: "Invalid request", 404: "Drawing doesn't exist" }, description="Get a drawing instance", output=DRAWING_OBJECT_SCHEMA) <NEW_LINE> async def get_drawing(request, response): <NEW_LINE> <INDENT> project = await Controller.instance().get_loaded_project(request.match_info["project_id"]) <NEW_LINE> drawing = project.get_drawing(request.match_info["drawing_id"]) <NEW_LINE> response.set_status(200) <NEW_LINE> response.json(drawing) <NEW_LINE> <DEDENT> @Route.put( r"/projects/{project_id}/drawings/{drawing_id}", parameters={ "project_id": "Project UUID", "drawing_id": "Drawing UUID" }, status_codes={ 201: "Drawing updated", 400: "Invalid request" }, description="Update a drawing instance", input=DRAWING_OBJECT_SCHEMA, output=DRAWING_OBJECT_SCHEMA) <NEW_LINE> async def update(request, response): <NEW_LINE> <INDENT> project = await Controller.instance().get_loaded_project(request.match_info["project_id"]) <NEW_LINE> drawing = project.get_drawing(request.match_info["drawing_id"]) <NEW_LINE> await drawing.update(**request.json) <NEW_LINE> response.set_status(201) <NEW_LINE> response.json(drawing) <NEW_LINE> <DEDENT> @Route.delete( r"/projects/{project_id}/drawings/{drawing_id}", parameters={ "project_id": "Project UUID", "drawing_id": "Drawing UUID" }, status_codes={ 204: "Drawing deleted", 400: "Invalid request" }, description="Delete a drawing instance") <NEW_LINE> async def delete(request, response): <NEW_LINE> <INDENT> project = await Controller.instance().get_loaded_project(request.match_info["project_id"]) <NEW_LINE> await project.delete_drawing(request.match_info["drawing_id"]) <NEW_LINE> response.set_status(204) | API entry point for Drawing | 62599071fff4ab517ebcf0ea |
class Core(object): <NEW_LINE> <INDENT> def __init__(self, logger, capability_repository, nodes_repository, valmanager, inter_node_orchestrator, communication_manager, capability_engine, as_daemon=True): <NEW_LINE> <INDENT> self.as_daemon = as_daemon <NEW_LINE> self.stopped = False <NEW_LINE> self.daemon = None <NEW_LINE> self.logger = logger <NEW_LINE> self.logger.info("App started") <NEW_LINE> self.communication_manager = communication_manager <NEW_LINE> self.capability_repository = capability_repository <NEW_LINE> self.nodes_repository = nodes_repository <NEW_LINE> self.valmanager = valmanager <NEW_LINE> self.inter_node_orchestrator = inter_node_orchestrator <NEW_LINE> self.capability_engine = capability_engine <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.startup_clean() <NEW_LINE> if self.as_daemon: <NEW_LINE> <INDENT> self.daemon = Daemonize(app=config['GENERAL']['app_name'], pid=config['GENERAL']['pid'], action=self.run) <NEW_LINE> self.daemon.start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.run() <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.logger.info('Core started') <NEW_LINE> self.communication_manager.start() <NEW_LINE> self.capability_engine.start() <NEW_LINE> self.valmanager.start() <NEW_LINE> while not self.stopped: <NEW_LINE> <INDENT> sleep(.1) <NEW_LINE> <DEDENT> <DEDENT> def restart(self): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.stopped = True <NEW_LINE> self.valmanager.close() <NEW_LINE> self.capability_engine.stop() <NEW_LINE> self.communication_manager.stop() <NEW_LINE> if self.daemon: <NEW_LINE> <INDENT> self.daemon.exit() <NEW_LINE> <DEDENT> self.logger.info('Core stopped') <NEW_LINE> <DEDENT> def startup_clean(self): <NEW_LINE> <INDENT> self.capability_repository.clear() <NEW_LINE> self.nodes_repository.clear() | This module provides the core functionality of Motey.
It can be executed as a daemon service or can be executed in foreground.
It will start an API webserver and a MQTTServer which can be configured via the config.ini file.
The core will also start all the necessary components like the VALManager, the InterNodeOrchestrator and the
HardwareEventEngine.
After it is started via self.start() it will be executed until self.stop() is executed. | 62599071091ae35668706506 |
class Dconv_shuffle_depthwise(nn.Module): <NEW_LINE> <INDENT> def __init__(self, inplane, outplane, kernel_size, stride, padding): <NEW_LINE> <INDENT> super(Dconv_shuffle_depthwise, self).__init__() <NEW_LINE> print('cifar Dconv_shuffle_depthwise is used') <NEW_LINE> self.dilated_conv = nn.Conv2d(inplane, outplane, kernel_size=kernel_size, stride=stride, padding=padding, groups=inplane, bias=False) <NEW_LINE> self.indices = None <NEW_LINE> <DEDENT> def _setup(self, inplane, spatial_size): <NEW_LINE> <INDENT> self.indices = np.empty((inplane, spatial_size), dtype=np.int64) <NEW_LINE> for i in range(inplane): <NEW_LINE> <INDENT> self.indices[i, :] = np.arange(self.indices.shape[1]) + i * self.indices.shape[1] <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x_shape = x.size() <NEW_LINE> x = x.view(x_shape[0], -1) <NEW_LINE> if self.indices is None: <NEW_LINE> <INDENT> self._setup(x_shape[1], x_shape[2] * x_shape[3]) <NEW_LINE> <DEDENT> for i in range(x_shape[1]): <NEW_LINE> <INDENT> np.random.shuffle(self.indices[i]) <NEW_LINE> <DEDENT> x = x[:, torch.from_numpy(self.indices)].view(x_shape) <NEW_LINE> return self.dilated_conv(x) | Deformable convolution with random shuffling of the feature map.
Random shuffling only happened within each page independently.
The sampling locations are generated for each forward pass during the training. | 625990714e4d562566373cd6 |
class BasicUserTestCase(StaticLiveServerTestCase): <NEW_LINE> <INDENT> def test_user_input(self): <NEW_LINE> <INDENT> client = requests.session() <NEW_LINE> r = client.get(self.live_server_url) <NEW_LINE> csrftoken = client.cookies['csrftoken'] <NEW_LINE> page = html.parse(StringIO(r.text)).getroot() <NEW_LINE> form_textarea = page.cssselect('textarea[id="id_test_text_input"]') <NEW_LINE> self.assertEqual(r.status_code, 200, r.text) <NEW_LINE> self.assertEqual(form_textarea[0].attrib.get('placeholder'), 'Test Text') <NEW_LINE> r = client.post(self.live_server_url, data={'csrfmiddlewaretoken': csrftoken, 'test_text_input': 'Testing 1234'}) <NEW_LINE> self.assertEqual(r.status_code, 200, r.text) <NEW_LINE> self.assertTrue('Testing 1234' in r.text) | Tests basic user interaction with the app | 62599071bf627c535bcb2d9b |
class SetOutBandVpnAuthPasswordResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId") | SetOutBandVpnAuthPassword返回参数结构体
| 62599071e5267d203ee6d025 |
class ProbLogObject(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def create_from(cls, obj, **kwdargs): <NEW_LINE> <INDENT> return ProbLog.convert(obj, cls, **kwdargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def createFrom(cls, obj, **kwdargs): <NEW_LINE> <INDENT> return cls.create_from(obj, **kwdargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_from_default_action(cls, src): <NEW_LINE> <INDENT> raise ProbLogError("No default conversion strategy defined.") | Root class for all convertible objects in the ProbLog system. | 62599071ad47b63b2c5a911d |
class Type(BaseType): <NEW_LINE> <INDENT> def __init__(self, field): <NEW_LINE> <INDENT> super(Type, self).__init__([field]) | 基础类型 | 6259907192d797404e3897c3 |
class FileStream: <NEW_LINE> <INDENT> def __init__(self, content): <NEW_LINE> <INDENT> self._content = content <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> content = self._content <NEW_LINE> self._content = '' <NEW_LINE> return content | A class that wraps a file stream, but adds extra attributes to it.
Attributes:
content: str. The content of the file snapshot. | 62599071d268445f2663a7c5 |
class MaspyJsonEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if hasattr(obj, '_reprJSON'): <NEW_LINE> <INDENT> return obj._reprJSON() <NEW_LINE> <DEDENT> return json.JSONEncoder.default(self, obj) | Extension of the json.JSONEncoder to serialize MasPy classes.
Maspy classes need to define a _reprJSON() method, which returns a json
serializable object. | 6259907132920d7e50bc7917 |
class UICustomContextMenu(QMenu): <NEW_LINE> <INDENT> def __init__(self, sender, point, actions): <NEW_LINE> <INDENT> super(UICustomContextMenu, self).__init__() <NEW_LINE> for text, tip in actions: <NEW_LINE> <INDENT> act = self.addAction(self.tr(text)) <NEW_LINE> act.setStatusTip(self.tr(tip)) <NEW_LINE> <DEDENT> self.exec_(sender.mapToGlobal(point)) | By setting a QWidget's contextMenuPolicy to CustomContextMenu, you can use
this class to make the context menu creation more simple. For example:
@pyqtSlot(QPoint)
def on_myWidget_customContextMenuRequested(self, point):
UICustomContextMenu(self.myWidget, point, (
('Action Text 1', 'Status Tip 1'),
('Action Text 2', 'Status Tip 2')
))
TODO: Add the click command too. | 62599071aad79263cf430086 |
class RedFlagModel(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.db = redflag_incidents <NEW_LINE> <DEDENT> def store(self, redflag_incidents): <NEW_LINE> <INDENT> self.db.append(redflag_incidents) <NEW_LINE> <DEDENT> def get_flags(self): <NEW_LINE> <INDENT> return self.db | This class contains methods to manipulate the redflag_incident
variable used to simulate a database | 62599071796e427e53850049 |
class AnnotationDot(Dot): <NEW_LINE> <INDENT> def __init__( self, radius=DEFAULT_DOT_RADIUS * 1.3, stroke_width=5, stroke_color=WHITE, fill_color=BLUE, **kwargs ): <NEW_LINE> <INDENT> Dot.__init__( self, radius=radius, stroke_width=stroke_width, stroke_color=stroke_color, fill_color=fill_color, **kwargs ) | A dot with bigger radius and bold stroke to annotate scenes. | 62599071a8370b77170f1c9a |
class StringTable: <NEW_LINE> <INDENT> def __init__(s): <NEW_LINE> <INDENT> s.bytecounter = 0 <NEW_LINE> s.entry = [] <NEW_LINE> s.__map_stoo = {} <NEW_LINE> s.__map_otos = {} <NEW_LINE> s.__getitem__('<NULL>') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, data): <NEW_LINE> <INDENT> s = cls(); <NEW_LINE> init = data.strip('\x00').split('\x00') <NEW_LINE> for entry in init: <NEW_LINE> <INDENT> s.__getitem__(entry) <NEW_LINE> <DEDENT> return s; <NEW_LINE> <DEDENT> def __getitem__(s, key): <NEW_LINE> <INDENT> if type(key) == str: <NEW_LINE> <INDENT> if s.__map_stoo.has_key(key): <NEW_LINE> <INDENT> return s.__map_stoo[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s.entry.append(key); <NEW_LINE> s.__map_otos[s.bytecounter] = key <NEW_LINE> s.__map_stoo[key] = s.bytecounter <NEW_LINE> s.bytecounter += len(key) + 1 <NEW_LINE> return s[key]; <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if s.__map_otos.has_key(key): <NEW_LINE> <INDENT> return s.__map_otos[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Cannot find string entry at 0x%x" % (key)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def dump(s, io): <NEW_LINE> <INDENT> return io.write('\x00'.join(s.entry) + '\x00') | @UTF Table String Table | 62599071a17c0f6771d5d813 |
class PrintOption(Option): <NEW_LINE> <INDENT> def __init__(self, key, value): <NEW_LINE> <INDENT> self.need_stat = False <NEW_LINE> self.print_title = False <NEW_LINE> self.fmt = [] <NEW_LINE> for arg in value.replace(',', ' ').split(): <NEW_LINE> <INDENT> self.fmt.append(arg) <NEW_LINE> if arg not in ['name', 'path']: <NEW_LINE> <INDENT> self.need_stat = True <NEW_LINE> <DEDENT> <DEDENT> if len(self.fmt) == 0: <NEW_LINE> <INDENT> self.fmt.append('path') <NEW_LINE> <DEDENT> <DEDENT> def requires(self): <NEW_LINE> <INDENT> return _REQUIRES_STAT if self.need_stat else _REQUIRES_PATH <NEW_LINE> <DEDENT> def execute(self, fullpath, fstat): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for arg in self.fmt: <NEW_LINE> <INDENT> if arg == 'path': <NEW_LINE> <INDENT> result.append(fullpath) <NEW_LINE> <DEDENT> elif arg == 'name': <NEW_LINE> <INDENT> result.append(os.path.basename(fullpath)) <NEW_LINE> <DEDENT> elif arg == 'size': <NEW_LINE> <INDENT> result.append(fstat[stat.ST_SIZE]) <NEW_LINE> <DEDENT> elif arg == 'type': <NEW_LINE> <INDENT> result.append(_FILE_TYPES.get(stat.S_IFMT(fstat[stat.ST_MODE]), '?')) <NEW_LINE> <DEDENT> elif arg == 'mode': <NEW_LINE> <INDENT> result.append(fstat[stat.ST_MODE]) <NEW_LINE> <DEDENT> elif arg == 'mtime': <NEW_LINE> <INDENT> result.append(fstat[stat.ST_MTIME]) <NEW_LINE> <DEDENT> elif arg == 'user': <NEW_LINE> <INDENT> uid = fstat[stat.ST_UID] <NEW_LINE> try: <NEW_LINE> <INDENT> result.append(pwd.getpwuid(uid).pw_name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> result.append(uid) <NEW_LINE> <DEDENT> <DEDENT> elif arg == 'group': <NEW_LINE> <INDENT> gid = fstat[stat.ST_GID] <NEW_LINE> try: <NEW_LINE> <INDENT> result.append(grp.getgrgid(gid).gr_name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> result.append(gid) <NEW_LINE> <DEDENT> <DEDENT> elif arg == 'md5': <NEW_LINE> <INDENT> if stat.S_ISREG(fstat[stat.ST_MODE]): <NEW_LINE> <INDENT> with open(fullpath, 'rb') as f: <NEW_LINE> <INDENT> buf = f.read(8192) <NEW_LINE> h = hashlib.md5() <NEW_LINE> while buf: <NEW_LINE> <INDENT> h.update(buf) <NEW_LINE> buf = f.read(8192) <NEW_LINE> <DEDENT> <DEDENT> result.append(h.hexdigest()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append('') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(result) == 1: <NEW_LINE> <INDENT> return result[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result | Return information about a matched file.
Print options are specified as a comma and/or space separated list of
one or more of the following:
group = group name
md5 = MD5 digest of file contents
mode = file mode (as integer)
mtime = last modification time (as time_t)
name = file basename
path = file absolute path
size = file size in bytes
type = file type
user = user name | 62599071a8370b77170f1c9b |
class SoftLayerTransactionStatus(BaseSoftLayerObject): <NEW_LINE> <INDENT> def __init__(self, obj): <NEW_LINE> <INDENT> super(SoftLayerTransactionStatus, self).__init__(obj) <NEW_LINE> <DEDENT> @softlayer_property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.data['friendlyName'] if 'friendlyName' in self.data else self.data['name'] | SoftLayer_Provisioning_Version1_Transaction_Status | 6259907132920d7e50bc7918 |
class SingleDataset(BaseDataset): <NEW_LINE> <INDENT> def __init__(self, opt): <NEW_LINE> <INDENT> BaseDataset.__init__(self, opt) <NEW_LINE> self.A_paths = sorted(make_dataset(opt.dataroot, opt.max_dataset_size)) <NEW_LINE> input_nc = self.opt.output_nc if self.opt.direction == 'BtoA' else self.opt.input_nc <NEW_LINE> self.transform = get_transform(opt, grayscale=(input_nc == 1)) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> A_path = self.A_paths[index] <NEW_LINE> A_img = Image.open(A_path).convert('RGB') <NEW_LINE> A = self.transform(A_img) <NEW_LINE> return {'A': A, 'A_paths': A_path} <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.A_paths) | This dataset class can load a set of images specified by the path --dataroot /path/to/data.
It can be used for generating CycleGAN results only for one side with the model option '-model test'. | 62599071091ae35668706508 |
class BrianObjectException(Exception): <NEW_LINE> <INDENT> def __init__(self, message, brianobj, original_exception): <NEW_LINE> <INDENT> self._brian_message = message <NEW_LINE> self._brian_objname = brianobj.name <NEW_LINE> self._brian_origexc = '\n'.join(traceback.format_exception_only(type(original_exception), original_exception)) <NEW_LINE> self._brian_origtb = traceback.format_exc() <NEW_LINE> self._brian_objcreate = brianobj._creation_stack <NEW_LINE> logger.diagnostic('Error was encountered with object "{objname}":\n{fullstack}'.format( objname=self._brian_objname, fullstack=brianobj._full_creation_stack)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('Original error and traceback:\n{origtb}\n' 'Error encountered with object named "{objname}".\n' '{objcreate}\n\n' '{message} {origexc}' '(See above for original error message and traceback.)' ).format(origtb=self._brian_origtb, origexc=self._brian_origexc, objname=self._brian_objname, message=self._brian_message, objcreate=self._brian_objcreate) | High level exception that adds extra Brian-specific information to exceptions
This exception should only be raised at a fairly high level in Brian code to
pass information back to the user. It adds extra information about where a
`BrianObject` was defined to better enable users to locate the source of
problems.
You should use the `brian_object_exception` function to raise this, and
it should only be raised in an ``except`` block handling a prior
exception.
Parameters
----------
message : str
Additional error information to add to the original exception.
brianobj : BrianObject
The object that caused the error to happen.
original_exception : Exception
The original exception that was raised. | 625990717b25080760ed894c |
class Test_FileConfigFiller: <NEW_LINE> <INDENT> pass | >>> try_parse('key = value', 'Found config option outside of config section')
caught
>>> try_parse(' key = value', 'Invalid indentation')
caught
>>> try_parse('[section', 'Unable to parse config section')
caught
>>> try_parse('[section]\nkey', 'Please use "key = value" syntax or indent values!')
caught | 6259907155399d3f05627dea |
class PgDB(BaseDB): <NEW_LINE> <INDENT> def __init__(self, host, user, password, db): <NEW_LINE> <INDENT> BaseDB.__init__(self) <NEW_LINE> self._verify_connection(host, user, password, db) <NEW_LINE> <DEDENT> def _verify_connection(self, host, user, password, db=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.connection = psycopg2.connect("dbname=%(db)s user=%(user)s " "password=%(password)s " "host=%(host)s" % {'db': db, 'user': user, 'password': password, 'host': host}) <NEW_LINE> self.host = host <NEW_LINE> self.user = user <NEW_LINE> self.password = password <NEW_LINE> self.db = db <NEW_LINE> self.is_valid = True <NEW_LINE> <DEDENT> except psycopg2.DatabaseError: <NEW_LINE> <INDENT> self.is_valid = False <NEW_LINE> <DEDENT> except Exception as ee: <NEW_LINE> <INDENT> print(ee) <NEW_LINE> self.is_valid = False <NEW_LINE> <DEDENT> <DEDENT> def _verify_field(self, field): <NEW_LINE> <INDENT> if isinstance(field, memoryview): <NEW_LINE> <INDENT> return field.tobytes() <NEW_LINE> <DEDENT> return field <NEW_LINE> <DEDENT> def query(self, query, params=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._query(query, params) <NEW_LINE> <DEDENT> except psycopg2.DatabaseError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> raise e <NEW_LINE> <DEDENT> except Exception as ee: <NEW_LINE> <INDENT> print(ee) <NEW_LINE> raise ee <NEW_LINE> <DEDENT> <DEDENT> def query_with_columns(self, query, params=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._query_with_columns(query, params) <NEW_LINE> <DEDENT> except psycopg2.DatabaseError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> raise e <NEW_LINE> <DEDENT> except Exception as ee: <NEW_LINE> <INDENT> print(ee) <NEW_LINE> raise ee <NEW_LINE> <DEDENT> <DEDENT> def command(self, command, params=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._command(command, params) <NEW_LINE> <DEDENT> except psycopg2.IntegrityError as e: <NEW_LINE> <INDENT> se = str(e) <NEW_LINE> if str(e).startswith('duplicate key value ' 'violates unique constraint'): <NEW_LINE> <INDENT> raise BaseDBException('DUPLICATE KEY\n' + se) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise psycopg2.IntegrityError(se) <NEW_LINE> <DEDENT> <DEDENT> except psycopg2.DatabaseError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> raise e <NEW_LINE> <DEDENT> except Exception as ee: <NEW_LINE> <INDENT> print(ee) <NEW_LINE> raise ee | Class for PostgreSQL database.
:param host: database host address.
:param user: database username.
:param password: database password.
:param db: database name.
| 6259907171ff763f4b5e907a |
class FinancialCodeAssignmentFormTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.codes = create_financial_codes() <NEW_LINE> <DEDENT> def test_expense_code_select(self): <NEW_LINE> <INDENT> transaction_type = self.codes[0].financial_code_group.type <NEW_LINE> system_id = self.codes[0].financial_code_group.budget_year.financial_code_system.id <NEW_LINE> form = FinancialCodeAssignmentForm(transaction_type=transaction_type, system=system_id) <NEW_LINE> code_select = form.fields["code"] <NEW_LINE> self.assertEqual(len(code_select.choices), 2) <NEW_LINE> for option_grouping in code_select.choices: <NEW_LINE> <INDENT> if option_grouping[0]: <NEW_LINE> <INDENT> for code in option_grouping[1]: <NEW_LINE> <INDENT> code_instance = FinancialCode.objects.get(id=code[0]) <NEW_LINE> self.assertEqual( code_instance.financial_code_group.type, "e" ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def test_revenue_code_select(self): <NEW_LINE> <INDENT> transaction_type = self.codes[1].financial_code_group.type <NEW_LINE> system_id = self.codes[1].financial_code_group.budget_year.financial_code_system.id <NEW_LINE> form = FinancialCodeAssignmentForm(transaction_type=transaction_type, system=system_id) <NEW_LINE> code_select = form.fields["code"] <NEW_LINE> self.assertEqual(len(code_select.choices), 2) <NEW_LINE> for option_grouping in code_select.choices: <NEW_LINE> <INDENT> if option_grouping[0]: <NEW_LINE> <INDENT> for code in option_grouping[1]: <NEW_LINE> <INDENT> code_instance = FinancialCode.objects.get(id=code[0]) <NEW_LINE> self.assertEqual( code_instance.financial_code_group.type, "r" ) | Test functions for the FinancialCode model | 625990717c178a314d78e854 |
class TestPluginApi: <NEW_LINE> <INDENT> config = 'tasks: {}' <NEW_LINE> def test_unknown_plugin(self): <NEW_LINE> <INDENT> with pytest.raises(plugin.DependencyError): <NEW_LINE> <INDENT> plugin.get_plugin_by_name('nonexisting_plugin') <NEW_LINE> <DEDENT> <DEDENT> def test_unknown_plugin(self): <NEW_LINE> <INDENT> with pytest.raises(plugin.DependencyError): <NEW_LINE> <INDENT> plugin.get('nonexisting_plugin', 'test') <NEW_LINE> <DEDENT> <DEDENT> def test_no_dupes(self): <NEW_LINE> <INDENT> plugin.load_plugins() <NEW_LINE> assert plugin.PluginInfo.dupe_counter == 0, "Duplicate plugin names, see log" <NEW_LINE> <DEDENT> def test_load(self): <NEW_LINE> <INDENT> plugin.load_plugins() <NEW_LINE> plugin_path = os.path.dirname(plugins.__file__) <NEW_LINE> plugin_modules = set( os.path.basename(i) for k in ("/*.py", "/*/*.py") for i in glob.glob(plugin_path + k) ) <NEW_LINE> assert len(plugin_modules) >= 10, "Less than 10 plugin modules looks fishy" <NEW_LINE> <DEDENT> def test_register_by_class(self, execute_task): <NEW_LINE> <INDENT> class TestPlugin: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class Oneword: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class TestHTML: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> assert 'test_plugin' not in plugin.plugins <NEW_LINE> @event('plugin.register') <NEW_LINE> def rp(): <NEW_LINE> <INDENT> plugin.register(TestPlugin, api_ver=2) <NEW_LINE> plugin.register(Oneword, api_ver=2) <NEW_LINE> plugin.register(TestHTML, api_ver=2) <NEW_LINE> <DEDENT> plugin.load_plugins() <NEW_LINE> assert 'test_plugin' in plugin.plugins <NEW_LINE> assert 'oneword' in plugin.plugins <NEW_LINE> assert 'test_html' in plugin.plugins | Contains plugin api related tests | 625990717047854f46340c89 |
class Writable(object): <NEW_LINE> <INDENT> def __init__(self, fname): <NEW_LINE> <INDENT> self.orig_mode = os.stat(fname).st_mode <NEW_LINE> self.change_needed = ~(self.orig_mode & stat.S_IWUSR) <NEW_LINE> self.fname = fname <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.change_needed: <NEW_LINE> <INDENT> os.chmod(self.fname, self.orig_mode | stat.S_IWUSR) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> if self.change_needed: <NEW_LINE> <INDENT> os.chmod(self.fname, self.orig_mode) | Context manager making file writable.
It's not safe to use it concurrently on the same file, but nesting is ok. | 62599071baa26c4b54d50b7d |
class Event(object): <NEW_LINE> <INDENT> def __init__(self, event_type=None): <NEW_LINE> <INDENT> self.event_type = event_type <NEW_LINE> self.event_data_dict = {} | 事件对象 | 6259907126068e7796d4e20d |
class Scan(models.Model): <NEW_LINE> <INDENT> pdf = models.FileField(upload_to=TMP_UPLOAD, blank=True) <NEW_LINE> author = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="scans_authored", blank=True, null=True) <NEW_LINE> uploader = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="scans_uploaded") <NEW_LINE> org = models.ForeignKey('profiles.Organization', null=True) <NEW_LINE> processing_complete = models.BooleanField(default=False) <NEW_LINE> under_construction = models.BooleanField(default=False) <NEW_LINE> created = models.DateTimeField(default=datetime.datetime.now) <NEW_LINE> modified = models.DateTimeField(default=datetime.datetime.now) <NEW_LINE> source_id = models.CharField(blank=True, max_length=100) <NEW_LINE> objects = OrgManager() <NEW_LINE> class QuerySet(OrgQuerySet): <NEW_LINE> <INDENT> orgs = ["author__organization", "org"] <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.modified = datetime.datetime.now() <NEW_LINE> super(Scan, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> lock = self.editlock_set.all()[0].to_dict() <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> lock = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> code = self.pendingscan.code <NEW_LINE> <DEDENT> except PendingScan.DoesNotExist: <NEW_LINE> <INDENT> code = None <NEW_LINE> <DEDENT> return { 'id': self.pk, 'pdf': self.pdf.url, 'author': self.author.profile.to_dict() if self.author else None, 'uploader': self.uploader.profile.to_dict(), 'processing_complete': self.processing_complete, 'under_construction': self.under_construction, 'created': self.created.isoformat() if self.created else None, 'modified': self.modified.isoformat() if self.modified else None, 'pages': [p.to_dict() for p in self.scanpage_set.all()], 'url': self.get_absolute_url(), 'edit_url': self.get_edit_url(), 'lock': lock, 'pendingscan_code': code, } <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse("moderation.home") + "#/process/scan/{0}".format(self.pk) <NEW_LINE> <DEDENT> def get_edit_url(self): <NEW_LINE> <INDENT> return self.get_absolute_url() <NEW_LINE> <DEDENT> def full_delete(self, filesonly=False): <NEW_LINE> <INDENT> for doc in self.document_set.all(): <NEW_LINE> <INDENT> doc.full_delete(filesonly) <NEW_LINE> <DEDENT> for page in self.scanpage_set.all(): <NEW_LINE> <INDENT> page.full_delete(filesonly) <NEW_LINE> <DEDENT> if self.pdf: <NEW_LINE> <INDENT> self.pdf.delete() <NEW_LINE> <DEDENT> if not filesonly: <NEW_LINE> <INDENT> self.delete() <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['created'] <NEW_LINE> permissions = ( ('view_raw_scans', 'View raw scans'), ) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "(%s) %s" % (self.pk or "", self.pdf.name) | This is the raw scan with envelope and all. | 625990718a43f66fc4bf3a66 |
class PycodestyleTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._style = pycodestyle.StyleGuide( paths=[os.path.join(ROOT_DIR, 'testsuite')], select='E,W', quiet=True) <NEW_LINE> <DEDENT> def test_doctest(self): <NEW_LINE> <INDENT> import doctest <NEW_LINE> fail_d, done_d = doctest.testmod( pycodestyle, verbose=False, report=False ) <NEW_LINE> self.assertTrue(done_d, msg='tests not found') <NEW_LINE> self.assertFalse(fail_d, msg='%s failure(s)' % fail_d) <NEW_LINE> <DEDENT> def test_selftest(self): <NEW_LINE> <INDENT> fail_s, done_s = selftest(self._style.options) <NEW_LINE> self.assertTrue(done_s, msg='tests not found') <NEW_LINE> self.assertFalse(fail_s, msg='%s failure(s)' % fail_s) <NEW_LINE> <DEDENT> def test_checkers_testsuite(self): <NEW_LINE> <INDENT> init_tests(self._style) <NEW_LINE> report = self._style.check_files() <NEW_LINE> self.assertFalse(report.total_errors, msg='%s failure(s)' % report.total_errors) <NEW_LINE> <DEDENT> def test_own_dog_food(self): <NEW_LINE> <INDENT> files = [pycodestyle.__file__.rstrip('oc'), __file__.rstrip('oc'), os.path.join(ROOT_DIR, 'setup.py')] <NEW_LINE> report = self._style.init_report(pycodestyle.StandardReport) <NEW_LINE> report = self._style.check_files(files) <NEW_LINE> self.assertEqual(list(report.messages.keys()), ['W504'], msg='Failures: %s' % report.messages) | Test the standard errors and warnings (E and W). | 625990712c8b7c6e89bd50b9 |
class SasAuthentication(Authentication): <NEW_LINE> <INDENT> _validation = { 'type': {'required': True}, } <NEW_LINE> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'sas_uri': {'key': 'properties.sasUri', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, sas_uri: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(SasAuthentication, self).__init__(**kwargs) <NEW_LINE> self.type = 'Sas' <NEW_LINE> self.sas_uri = sas_uri | Defines the properties to access the artifacts using an Azure Storage SAS URI.
All required parameters must be populated in order to send to Azure.
:param type: Required. The authentication type.Constant filled by server.
:type type: str
:param sas_uri: The SAS URI to the Azure Storage blob container. Any offset from the root of
the container to where the artifacts are located can be defined in the artifactRoot.
:type sas_uri: str | 62599071283ffb24f3cf517a |
class ApiAuthSvcDeleteApiKeyFailedError(ApiAuthSvcRequestError): <NEW_LINE> <INDENT> def __init__(self, identity=None) -> None: <NEW_LINE> <INDENT> message = "API key delete failed" <NEW_LINE> super().__init__(message, identity) | Key delete failed | 625990715fdd1c0f98e5f85a |
class Offer: <NEW_LINE> <INDENT> address_owner = "" <NEW_LINE> offer_id = 0 <NEW_LINE> item_id = 0 <NEW_LINE> price = 0 | A container object for storing the details of an offer. | 62599071d486a94d0ba2d891 |
class BinarySearchTreeNode: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> self._left = None <NEW_LINE> self._right = None <NEW_LINE> <DEDENT> def add_child(self, data): <NEW_LINE> <INDENT> if data == self._data: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if data < self._data: <NEW_LINE> <INDENT> if self._left: <NEW_LINE> <INDENT> self._left.add_child(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._left = BinarySearchTreeNode(data) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._right: <NEW_LINE> <INDENT> self._right.add_child(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._right = BinarySearchTreeNode(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def in_order_traversal(self): <NEW_LINE> <INDENT> elements = [] <NEW_LINE> if self._left: <NEW_LINE> <INDENT> elements += self._left.in_order_traversal() <NEW_LINE> <DEDENT> elements.append(self._data) <NEW_LINE> if self._right: <NEW_LINE> <INDENT> elements += self._right.in_order_traversal() <NEW_LINE> <DEDENT> return elements <NEW_LINE> <DEDENT> def search(self, value): <NEW_LINE> <INDENT> if value == self._data: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if value < self._data: <NEW_LINE> <INDENT> if self._left: <NEW_LINE> <INDENT> return self._left.search(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._right: <NEW_LINE> <INDENT> return self._right.search(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def find_max(self): <NEW_LINE> <INDENT> if self._right is None: <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> return self._right.find_max() <NEW_LINE> <DEDENT> def find_min(self): <NEW_LINE> <INDENT> if self._left is None: <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> return self._left.find_min() <NEW_LINE> <DEDENT> def delete(self, value): <NEW_LINE> <INDENT> if value < self._data: <NEW_LINE> <INDENT> if self._left: <NEW_LINE> <INDENT> self._left = self._left.delete(value) <NEW_LINE> <DEDENT> <DEDENT> elif value > self._data: <NEW_LINE> <INDENT> if self._right: <NEW_LINE> <INDENT> self._right = self._right.delete(value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._left is None and self._right is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self._left is None: <NEW_LINE> <INDENT> return self._right <NEW_LINE> <DEDENT> if self._right is None: <NEW_LINE> <INDENT> return self._left <NEW_LINE> <DEDENT> min_value_of_right = self._right.find_min() <NEW_LINE> self._data = min_value_of_right <NEW_LINE> self._right = self._right.delete(min_value_of_right) <NEW_LINE> <DEDENT> return self | Binary Search Tree (BST)
It can be used for sorting arrays and remove duplicates
https://www.youtube.com/watch?v=lFq5mYUWEBk&list=PLeo1K3hjS3uu_n_a__MI_KktGTLYopZ12&index=10
https://www.youtube.com/watch?v=JnrbMQyGLiU&list=PLeo1K3hjS3uu_n_a__MI_KktGTLYopZ12&index=11 | 6259907160cbc95b063659d7 |
class Segment(object): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return '[profLength: %s,profHeight: %s,profPush: %s,profCode: %s,profSpeed: %s,profNoiseID: %s]' % (self.profLength,self.profHeight,self.profPush,self.profCode,self.profSpeed,self.profNoiseID) <NEW_LINE> <DEDENT> def __init__(self,profLength,profHeight,profPush,profCode,profSpeed,profNoiseID): <NEW_LINE> <INDENT> self.profLength = int(float(profLength)) <NEW_LINE> self.profHeight = int(float(profHeight)) <NEW_LINE> self.profPush = float(profPush) <NEW_LINE> self.profCode = profCode <NEW_LINE> self.profSpeed = int(float(profSpeed)) <NEW_LINE> self.profNoiseID = profNoiseID | classdocs | 625990714f6381625f19a112 |
class Registers(Enum): <NEW_LINE> <INDENT> zero = 0 <NEW_LINE> at = 1 <NEW_LINE> v0 = 2 <NEW_LINE> v1 = 3 <NEW_LINE> a0 = 4 <NEW_LINE> a1 = 5 <NEW_LINE> a2 = 6 <NEW_LINE> a3 = 7 <NEW_LINE> t0 = 8 <NEW_LINE> t1 = 9 <NEW_LINE> t2 = 10 <NEW_LINE> t3 = 11 <NEW_LINE> t4 = 12 <NEW_LINE> t5 = 13 <NEW_LINE> t6 = 14 <NEW_LINE> t7 = 15 <NEW_LINE> s0 = 16 <NEW_LINE> s1 = 17 <NEW_LINE> s2 = 18 <NEW_LINE> s3 = 19 <NEW_LINE> s4 = 20 <NEW_LINE> s5 = 21 <NEW_LINE> s6 = 22 <NEW_LINE> s7 = 23 <NEW_LINE> t8 = 24 <NEW_LINE> t9 = 25 <NEW_LINE> gp = 28 <NEW_LINE> sp = 29 <NEW_LINE> fp = 30 <NEW_LINE> ra = 31 <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> return '$' + self.name <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def register_num(register_name: str) -> int: <NEW_LINE> <INDENT> if register_name.startswith('$'): <NEW_LINE> <INDENT> register_name = register_name[1:] <NEW_LINE> <DEDENT> if register_name.isdigit(): <NEW_LINE> <INDENT> return Registers(int(register_name)).value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Registers[register_name].value | An enum representing the different register names/numbers in MIPS | 6259907123849d37ff852989 |
class HostsOrderByCommand(SubCommand): <NEW_LINE> <INDENT> name = "hosts:order-by" <NEW_LINE> def add_arguments(self, parser: ArgumentParser) -> None: <NEW_LINE> <INDENT> parser.add_argument( "properties", nargs="*", help="items will be sorted in order" ) <NEW_LINE> parser.add_argument( "--reverse", action="store_true", help="reverse the order after sort" ) <NEW_LINE> parser.add_argument("-i", "--indent", type=int, default=None) <NEW_LINE> <DEDENT> def __call__(self, args: Namespace) -> int: <NEW_LINE> <INDENT> print( Hosts.from_json(input()) .order_by(*args.properties, reverse=args.reverse) .to_json(indent=args.indent) ) <NEW_LINE> return 0 | [Hosts -> Hosts] sort the hosts by the given properties | 625990714e4d562566373cd9 |
class Omegabase: <NEW_LINE> <INDENT> def __init__(self, conn_string, max_records=20): <NEW_LINE> <INDENT> self.engine = create_engine(conn_string, convert_unicode=True) <NEW_LINE> self.connection_string = conn_string <NEW_LINE> self.max_records = max_records <NEW_LINE> <DEDENT> def start_call(self, url, session_id): <NEW_LINE> <INDENT> return run_transaction( sessionmaker(bind=self.engine), lambda session: start_call_txn(session, url, session_id)) <NEW_LINE> <DEDENT> def join_call(self, url): <NEW_LINE> <INDENT> return run_transaction( sessionmaker(bind=self.engine), lambda session: join_call_txn(session, url)) <NEW_LINE> <DEDENT> def leave_call(self, url): <NEW_LINE> <INDENT> return run_transaction( sessionmaker(bind=self.engine), lambda session: leave_call_txn(session, url)) <NEW_LINE> <DEDENT> def end_call(self, url): <NEW_LINE> <INDENT> return run_transaction( sessionmaker(bind=self.engine), lambda session: end_ride_txn(session, url)) <NEW_LINE> <DEDENT> def remove_view(self, url): <NEW_LINE> <INDENT> return run_transaction( sessionmaker(bind=self.engine), lambda session: remove_view_txn(session, url)) <NEW_LINE> <DEDENT> def add_view(self, url): <NEW_LINE> <INDENT> return run_transaction(sessionmaker(bind=self.engine), lambda session: add_view_txn(session, url)) <NEW_LINE> <DEDENT> def get_view(self, url): <NEW_LINE> <INDENT> return run_transaction(sessionmaker(bind=self.engine), lambda session: get_view_txn(session, url)) <NEW_LINE> <DEDENT> def show_tables(self): <NEW_LINE> <INDENT> return self.engine.table_names() | Wraps the database connection. The class methods wrap transactions. | 625990715fc7496912d48ed2 |
class UnhandledError(Exception): <NEW_LINE> <INDENT> pass | Raise for unhandled errors | 62599071f9cc0f698b1c5f34 |
class Command(BaseCommand): <NEW_LINE> <INDENT> def parse_date_from_path(self, path): <NEW_LINE> <INDENT> path_parts = path.split('/') <NEW_LINE> date = path_parts[-1] <NEW_LINE> m = re.match(r'^(\d\d\d\d)-(\d\d)-(\d\d)', date) <NEW_LINE> d = datetime.date( int(m.group(1)), int(m.group(2)), int(m.group(3)), ) <NEW_LINE> return d <NEW_LINE> <DEDENT> def handle(self, *args, **kwargs): <NEW_LINE> <INDENT> meeting_pages = Page.objects.filter(path__startswith='psf/records/board/minutes/') <NEW_LINE> for p in meeting_pages: <NEW_LINE> <INDENT> date = self.parse_date_from_path(p.path) <NEW_LINE> try: <NEW_LINE> <INDENT> m = Minutes.objects.get(date=date) <NEW_LINE> <DEDENT> except Minutes.DoesNotExist: <NEW_LINE> <INDENT> m = Minutes(date=date) <NEW_LINE> <DEDENT> m.content = p.content <NEW_LINE> m.content_markup_type = p.content_markup_type <NEW_LINE> m.is_published = True <NEW_LINE> m.save() <NEW_LINE> p.delete() | Move meeting notes from Pages to Minutes app | 625990714a966d76dd5f07bd |
class Ogr2OgrExecOutput(ExecOutput): <NEW_LINE> <INDENT> @Config(ptype=str, default=None, required=True) <NEW_LINE> def dest_data_source(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def dest_format(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def lco(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def spatial_extent(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def gfs_template(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=bool, default=False, required=False) <NEW_LINE> def cleanup_input(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=bool, default=False, required=False) <NEW_LINE> def always_apply_lco(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, configdict, section): <NEW_LINE> <INDENT> ExecOutput.__init__(self, configdict, section, consumes=[FORMAT.string, FORMAT.gdal_vsi_path]) <NEW_LINE> self.regex_vsi_filter = re.compile("^/vsi[a-z0-9_]+/.*", re.I) <NEW_LINE> self.ogr2ogr_cmd = 'ogr2ogr -f ' + self.dest_format + ' ' + self.dest_data_source <NEW_LINE> if self.spatial_extent: <NEW_LINE> <INDENT> self.ogr2ogr_cmd += ' -spat ' + self.spatial_extent <NEW_LINE> <DEDENT> if self.options: <NEW_LINE> <INDENT> self.ogr2ogr_cmd += ' ' + self.options <NEW_LINE> <DEDENT> self.first_run = True <NEW_LINE> <DEDENT> def write(self, packet): <NEW_LINE> <INDENT> if packet.data is None: <NEW_LINE> <INDENT> return packet <NEW_LINE> <DEDENT> ogr2ogr_cmd = self.ogr2ogr_cmd <NEW_LINE> if self.lco and (self.first_run or self.always_apply_lco): <NEW_LINE> <INDENT> ogr2ogr_cmd += ' ' + self.lco <NEW_LINE> self.first_run = False <NEW_LINE> <DEDENT> if type(packet.data) is list: <NEW_LINE> <INDENT> for item in packet.data: <NEW_LINE> <INDENT> self.execute(ogr2ogr_cmd, item) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.execute(ogr2ogr_cmd, packet.data) <NEW_LINE> <DEDENT> return packet <NEW_LINE> <DEDENT> def execute(self, ogr2ogr_cmd, file_path): <NEW_LINE> <INDENT> file_ext = os.path.splitext(file_path) <NEW_LINE> gfs_path = file_ext[0] + '.gfs' <NEW_LINE> if self.gfs_template and not self.regex_vsi_filter.match(file_path): <NEW_LINE> <INDENT> shutil.copy(self.gfs_template, gfs_path) <NEW_LINE> <DEDENT> self.execute_cmd(ogr2ogr_cmd + ' ' + file_path) <NEW_LINE> if self.cleanup_input and not self.regex_vsi_filter.match(file_path): <NEW_LINE> <INDENT> os.remove(file_path) <NEW_LINE> if gfs_path and os.path.exists(gfs_path): <NEW_LINE> <INDENT> os.remove(gfs_path) | Executes an Ogr2Ogr command.
Input is a file name to be processed.
Output by calling Ogr2Ogr command.
consumes=FORMAT.string | 6259907155399d3f05627deb |
class LevelShutdown(_ListenerManager): <NEW_LINE> <INDENT> manager = level_shutdown_listener_manager | Register/unregister a LevelShutdown listener. | 62599071aad79263cf430089 |
class HttpBinInitValidator(BaseValidator, ValidatorRedisSpider): <NEW_LINE> <INDENT> name = 'init' <NEW_LINE> urls = [ 'http://httpbin.org/ip', 'https://httpbin.org/ip', ] <NEW_LINE> use_set = False <NEW_LINE> task_queue = INIT_HTTP_QUEUE <NEW_LINE> https_tasks = ['https', 'weibo', 'zhihu'] <NEW_LINE> http_tasks = ['http'] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.origin_ip = requests.get(self.urls[1]).json().get('origin') <NEW_LINE> <DEDENT> def is_transparent(self, response): <NEW_LINE> <INDENT> if not response.body_as_unicode(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ip = json.loads(response.body_as_unicode()).get('origin') <NEW_LINE> if self.origin_ip in ip: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> except (AttributeError, JSONDecodeError): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def set_item_queue(self, url, proxy, score, incr, speed=0): <NEW_LINE> <INDENT> items = list() <NEW_LINE> tasks = self.https_tasks if 'https' in url else self.http_tasks <NEW_LINE> for task in tasks: <NEW_LINE> <INDENT> score_item = ProxyScoreItem(url=proxy, score=score, incr=incr) <NEW_LINE> ttl_item = ProxyVerifiedTimeItem(url=proxy, verified_time=int(time.time()), incr=incr) <NEW_LINE> speed_item = ProxySpeedItem(url=proxy, response_time=speed, incr=incr) <NEW_LINE> score_item['queue'] = SCORE_MAPS.get(task) <NEW_LINE> ttl_item['queue'] = TTL_MAPS.get(task) <NEW_LINE> speed_item['queue'] = SPEED_MAPS.get(task) <NEW_LINE> items.append(score_item) <NEW_LINE> items.append(ttl_item) <NEW_LINE> items.append(speed_item) <NEW_LINE> <DEDENT> return items | This validator does initial work for ip resources | 625990711f037a2d8b9e54d4 |
class Cart(object): <NEW_LINE> <INDENT> def __init__(self, session_storage=None): <NEW_LINE> <INDENT> self.storage = OrderedDict({}) <NEW_LINE> self.session_storage = session_storage <NEW_LINE> pids = self.session_storage.keys() <NEW_LINE> products = Product.objects.filter(pk__in=pids) <NEW_LINE> self.queryset = products <NEW_LINE> self.storage = OrderedDict({p: session_storage["%s"%(p.pk)] for p in products}) <NEW_LINE> <DEDENT> def add(self, product, quantity): <NEW_LINE> <INDENT> if not product.enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if quantity == 0: <NEW_LINE> <INDENT> self.remove(product) <NEW_LINE> return <NEW_LINE> <DEDENT> if product in self.storage: <NEW_LINE> <INDENT> self.storage[product] += quantity <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.storage[product] = quantity <NEW_LINE> <DEDENT> if self.session_storage is not None: <NEW_LINE> <INDENT> if u"%s"%(product.pk) in self.session_storage: <NEW_LINE> <INDENT> self.session_storage[u"%s"%(product.pk)] += quantity <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.session_storage[u"%s"%(product.pk)] = quantity <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update(self, product, quantity): <NEW_LINE> <INDENT> if not product.enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if quantity == 0: <NEW_LINE> <INDENT> self.remove(product) <NEW_LINE> return <NEW_LINE> <DEDENT> self.storage[product] = quantity <NEW_LINE> if self.session_storage is not None: <NEW_LINE> <INDENT> self.session_storage[u"%s"%(product.pk)] = quantity <NEW_LINE> <DEDENT> <DEDENT> def remove(self, product): <NEW_LINE> <INDENT> if product in self.storage: <NEW_LINE> <INDENT> del self.storage[product] <NEW_LINE> <DEDENT> if self.session_storage is not None and u"%s"%(product.pk) in self.session_storage: <NEW_LINE> <INDENT> del self.session_storage[u"%s"%(product.pk)] <NEW_LINE> <DEDENT> <DEDENT> def remove_all(self): <NEW_LINE> <INDENT> self.storage = {} <NEW_LINE> self.session_storage = {} <NEW_LINE> <DEDENT> def to_session_storage(self): <NEW_LINE> <INDENT> return {u"%s"%(k.pk): v for k, v in self.storage.iteritems()} <NEW_LINE> <DEDENT> def get_total(self): <NEW_LINE> <INDENT> return sum([p.get_price() * q for p, q in self.storage.iteritems()]) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s"%(self.storage) | _cart_instance = None
def __new__(cls, *args, **kwargs):
if not cls._cart_instance:
cls._cart_instance = super(Cart, cls).__new__(
cls, *args, **kwargs)
return cls._cart_instance | 625990713317a56b869bf1ae |
class Service(_messages.Message): <NEW_LINE> <INDENT> displayName = _messages.StringField(1) <NEW_LINE> name = _messages.StringField(2) <NEW_LINE> serviceId = _messages.StringField(3) | Encapsulates a single service in Google Cloud Platform.
Fields:
displayName: A human readable display name for this service.
name: The resource name for the service. Example:
"services/DA34-426B-A397"
serviceId: The identifier for the service. Example: "DA34-426B-A397" | 6259907126068e7796d4e20f |
class MonitoringVMForm(forms.Form): <NEW_LINE> <INDENT> vm_id = forms.CharField(widget=forms.HiddenInput()) <NEW_LINE> time = forms.CharField(widget=forms.HiddenInput()) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(MonitoringVMForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['stat_name'] = forms.MultipleChoiceField(choices=stat_names_reversed.items(), widget=My2CheckboxSelectMultiple, label=_('Statistic'), initial=['0']) <NEW_LINE> self.fields['resolution'] = forms.ChoiceField(choices=stat_resolutions_reversed.items(), label=_('Time range')) <NEW_LINE> self.fields['resolution'].widget.attrs['class'] = 'small' <NEW_LINE> self.fields['stat_range'] = forms.ChoiceField(choices=stat_ranges_reversed.items(), label=_('Period')) <NEW_LINE> self.fields['stat_range'].widget.attrs['class'] = 'small' <NEW_LINE> <DEDENT> def clean_vm_id(self): <NEW_LINE> <INDENT> return int(self.cleaned_data['vm_id']) <NEW_LINE> <DEDENT> def clean_stat_name(self): <NEW_LINE> <INDENT> return [int(i) for i in self.cleaned_data['stat_name']] <NEW_LINE> <DEDENT> def clean_resolution(self): <NEW_LINE> <INDENT> return int(self.cleaned_data['resolution']) <NEW_LINE> <DEDENT> def clean_stat_range(self): <NEW_LINE> <INDENT> return int(self.cleaned_data['stat_range']) | Form for fetching the monitoring data for a selected virtual machine. | 62599071b7558d5895464b9d |
class HashDirectory(object): <NEW_LINE> <INDENT> def __init__(self, directory, exclude=None): <NEW_LINE> <INDENT> self.directory = directory <NEW_LINE> self.hashes = {} <NEW_LINE> self.exclude = [] if exclude is None else exclude <NEW_LINE> <DEDENT> def hash_files(self): <NEW_LINE> <INDENT> files_to_hash = [] <NEW_LINE> num_processes = multiprocessing.cpu_count() * 2 <NEW_LINE> logging.info('Discovering files in %s', self.directory) <NEW_LINE> for root, _, files in os.walk(self.directory): <NEW_LINE> <INDENT> for found_file in files: <NEW_LINE> <INDENT> full_path = os.path.join(root, found_file) <NEW_LINE> relative_path = os.path.relpath(full_path, self.directory) <NEW_LINE> if self.excluded(relative_path): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> logging.debug('Found %s', relative_path) <NEW_LINE> files_to_hash.append(relative_path) <NEW_LINE> <DEDENT> <DEDENT> self.hashes = {} <NEW_LINE> logging.debug('Switching current working directory to %s', self.directory) <NEW_LINE> old_cwd = os.getcwd() <NEW_LINE> os.chdir(self.directory) <NEW_LINE> logging.debug('Starting %s hash worker processes', num_processes) <NEW_LINE> terminating = multiprocessing.Event() <NEW_LINE> pool = multiprocessing.Pool(initializer=initializer, initargs=(terminating, ), processes=num_processes) <NEW_LINE> logging.info('Hashing %s files', len(files_to_hash)) <NEW_LINE> try: <NEW_LINE> <INDENT> results = [] <NEW_LINE> results = pool.map(hash_task, files_to_hash, num_processes*2) <NEW_LINE> logging.debug('Stopping hashing processes') <NEW_LINE> pool.close() <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> pool.terminate() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> logging.debug('Waiting for processes to stop') <NEW_LINE> pool.close() <NEW_LINE> pool.join() <NEW_LINE> logging.debug('Processes stopped') <NEW_LINE> <DEDENT> logging.debug('Switching current working directory back to %s', old_cwd) <NEW_LINE> os.chdir(old_cwd) <NEW_LINE> for item in results: <NEW_LINE> <INDENT> self.hashes[list(item.keys())[0]] = list(item.values())[0] <NEW_LINE> <DEDENT> return self.hashes <NEW_LINE> <DEDENT> def excluded(self, file_path): <NEW_LINE> <INDENT> for pattern in self.exclude: <NEW_LINE> <INDENT> if fnmatch.fnmatchcase(file_path, pattern): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False | Object to manage hashing files in a directory.
This object is responsible for walking the directory tree and
adding each file to a list. Once the directory walk has compelted, each
file path is passed to hash_task(). After each file has been
hashed, this object will then create a Python dictionary of files with
their associated hash.
:param directory: Path to directory containing files
:type directory: str
:param exclude: list containing patterns to use to exclude files from
the manifest.
:type exclude: list
:return: Dictionary of file:hash
:return type: dict | 625990715166f23b2e244ca7 |
@dataclass(frozen=True) <NEW_LINE> class Action: <NEW_LINE> <INDENT> cyclist: "Cyclist" | an action in Flamme Rouge | 6259907199cbb53fe68327bd |
class DescribeClusterNodesRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.InstanceId = None <NEW_LINE> self.NodeFlag = None <NEW_LINE> self.Offset = None <NEW_LINE> self.Limit = None <NEW_LINE> self.HardwareResourceType = None <NEW_LINE> self.SearchFields = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.InstanceId = params.get("InstanceId") <NEW_LINE> self.NodeFlag = params.get("NodeFlag") <NEW_LINE> self.Offset = params.get("Offset") <NEW_LINE> self.Limit = params.get("Limit") <NEW_LINE> self.HardwareResourceType = params.get("HardwareResourceType") <NEW_LINE> if params.get("SearchFields") is not None: <NEW_LINE> <INDENT> self.SearchFields = [] <NEW_LINE> for item in params.get("SearchFields"): <NEW_LINE> <INDENT> obj = SearchItem() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.SearchFields.append(obj) <NEW_LINE> <DEDENT> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | DescribeClusterNodes请求参数结构体
| 62599071283ffb24f3cf517c |
class TestSquareMatrixMethods(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.squarematrix1 = SquareMatrix(2) <NEW_LINE> self.squarematrix2 = SquareMatrix(2, [1, 2, 3, 4]) <NEW_LINE> self.squarematrix3 = SquareMatrix(2, ['a', 2, 3, 'b']) <NEW_LINE> self.squarematrix4 = SquareMatrix(3) <NEW_LINE> <DEDENT> def test_square_matrix_init(self): <NEW_LINE> <INDENT> actual_result1 = str(self.squarematrix1) <NEW_LINE> expected_result1 = '[0, 0]\n[0, 0]' <NEW_LINE> self.assertEqual(actual_result1, expected_result1, 'Test if zero square matrix is created') <NEW_LINE> actual_result2 = str(self.squarematrix2) <NEW_LINE> expected_result2 = '[1, 2]\n[3, 4]' <NEW_LINE> self.assertEqual(actual_result2, expected_result2, 'Test if filled square matrix is created') <NEW_LINE> with self.assertRaises(MatrixError): <NEW_LINE> <INDENT> SquareMatrix(2, [2, 'a', 'b']) <NEW_LINE> <DEDENT> <DEDENT> def test_square_matrix_get_diagonal(self): <NEW_LINE> <INDENT> actual_result1 = self.squarematrix1.get_diagonal() <NEW_LINE> expected_result1 = [0, 0] <NEW_LINE> self.assertEqual(actual_result1, expected_result1, 'Test get diagonal for zero square matrix') <NEW_LINE> actual_result2 = self.squarematrix2.get_diagonal() <NEW_LINE> expected_result2 = [1, 4] <NEW_LINE> self.assertEqual(actual_result2, expected_result2, 'Test get diagonal for filled square matrix') <NEW_LINE> actual_result3 = self.squarematrix3.get_diagonal() <NEW_LINE> expected_result3 = ['a', 'b'] <NEW_LINE> self.assertEqual(actual_result3, expected_result3, 'Test get diagonal for square matrix with strings') <NEW_LINE> <DEDENT> def test_square_matrix_set_diagonal(self): <NEW_LINE> <INDENT> self.squarematrix1.set_diagonal([1, 2]) <NEW_LINE> actual_result1 = self.squarematrix1.get_diagonal() <NEW_LINE> expected_result1 = [1, 2] <NEW_LINE> self.assertEqual(actual_result1, expected_result1, 'Test set diagonal for zero square matrix') <NEW_LINE> self.squarematrix2.set_diagonal([5, 5]) <NEW_LINE> actual_result2 = self.squarematrix2.get_diagonal() <NEW_LINE> expected_result2 = [5, 5] <NEW_LINE> self.assertEqual(actual_result2, expected_result2, 'Test set diagonal for filled square matrix') <NEW_LINE> with self.assertRaises(MatrixError): <NEW_LINE> <INDENT> self.squarematrix1.set_diagonal([1, 2, 3]) <NEW_LINE> <DEDENT> <DEDENT> def test_square_matrix_determinant(self): <NEW_LINE> <INDENT> actual_result1 = self.squarematrix2.determinant() <NEW_LINE> expected_result1 = -2 <NEW_LINE> self.assertEqual(actual_result1, expected_result1, 'Test determinant') <NEW_LINE> with self.assertRaises(MatrixError): <NEW_LINE> <INDENT> self.squarematrix3.determinant() <NEW_LINE> <DEDENT> with self.assertRaises(MatrixError): <NEW_LINE> <INDENT> self.squarematrix4.determinant() <NEW_LINE> <DEDENT> <DEDENT> def test_square_matrix_transpose(self): <NEW_LINE> <INDENT> actual_result = self.squarematrix2.transpose() <NEW_LINE> expected_result = SquareMatrix(2, [1, 3, 2, 4]) <NEW_LINE> self.assertEqual(actual_result, expected_result, 'Test square matrix transpose') | Unittests for square matrix class methods | 62599071435de62698e9d6da |
class EncryptedDisk(RawDisk): <NEW_LINE> <INDENT> POSSIBLE_STATES = [] <NEW_LINE> def __init__( self, encryption_type=None, encryption_key=None, unencrypted_path=None, *args, **kwargs): <NEW_LINE> <INDENT> self.encryption_type = encryption_type <NEW_LINE> self.encryption_key = encryption_key <NEW_LINE> self.unencrypted_path = unencrypted_path <NEW_LINE> super(EncryptedDisk, self).__init__(*args, **kwargs) | Encrypted disk file evidence.
Attributes:
encryption_type: The type of encryption used, e.g. FileVault or Bitlocker.
encryption_key: A string of the encryption key used for this disk.
unencrypted_path: A string to the unencrypted local path | 62599071ad47b63b2c5a9121 |
class GroovyLexer(RegexLexer): <NEW_LINE> <INDENT> name = 'Groovy' <NEW_LINE> aliases = ['groovy'] <NEW_LINE> filenames = ['*.groovy'] <NEW_LINE> mimetypes = ['text/x-groovy'] <NEW_LINE> flags = re.MULTILINE | re.DOTALL <NEW_LINE> tokens = { 'root': [ (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' r'([a-zA-Z_][a-zA-Z0-9_]*)' r'(\s*)(\()', bygroups(using(this), Name.Function, Text, Operator)), (r'[^\S\n]+', Text), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator), (r'(assert|break|case|catch|continue|default|do|else|finally|for|' r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b', Keyword), (r'(abstract|const|enum|extends|final|implements|native|private|' r'protected|public|static|strictfp|super|synchronized|throws|' r'transient|volatile)\b', Keyword.Declaration), (r'(def|boolean|byte|char|double|float|int|long|short|void)\b', Keyword.Type), (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)), (r'(true|false|null)\b', Keyword.Constant), (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'), (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), (r'\$/((?!/\$).)*/\$', String), (r'/(\\\\|\\"|[^/])*/', String), (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char), (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)), (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label), (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name), (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+L?', Number.Integer), (r'\n', Text) ], 'class': [ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop') ], 'import': [ (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop') ], } | For `Groovy <http://groovy.codehaus.org/>`_ source code.
*New in Pygments 1.5.* | 6259907192d797404e3897c5 |
class WTChooseAnswerPage1(AllBasePage): <NEW_LINE> <INDENT> appPackage = 'com.langlib.ncee' <NEW_LINE> page_title_id = (By.ID, "{}:id/title_iframe_title_tv".format(appPackage)) <NEW_LINE> word_trans_list_num_id = (By.ID, "{}:id/fragment_word_trans_index_tv".format(appPackage)) <NEW_LINE> audio_icon_id = (By.ID, "{}:id/fragment_word_trans_detail_play_imagebtn".format(appPackage)) <NEW_LINE> answer_a_id = (By.ID, "{}:id/fragment_word_trans_detail_answer_a".format(appPackage)) <NEW_LINE> answer_b_id = (By.ID, "{}:id/fragment_word_trans_detail_answer_b".format(appPackage)) <NEW_LINE> answer_c_id = (By.ID, "{}:id/fragment_word_trans_detail_answer_c".format(appPackage)) <NEW_LINE> answer_d_id = (By.ID, "{}:id/fragment_word_trans_detail_answer_d".format(appPackage)) <NEW_LINE> i_dont_know_id = (By.ID, "{}:id/fragment_word_trans_detail_answer_e".format(appPackage)) <NEW_LINE> next_button_ele_id = "{}:id/fragment_word_trans_next_tv".format(appPackage) <NEW_LINE> word_trans_next_button_id = (By.ID, "{}:id/fragment_word_trans_next_tv".format(appPackage)) <NEW_LINE> word_trans_finish_btn_id = (By.ID, "{}:id/fragment_word_trans_done_tv".format(appPackage)) <NEW_LINE> def get_trans_list_text(self): <NEW_LINE> <INDENT> return self.getText(self.find_element(*self.word_trans_list_num_id)) <NEW_LINE> <DEDENT> def get_words_trans_lists_nums(self): <NEW_LINE> <INDENT> text = self.getText(self.find_element(*self.word_trans_list_num_id)) <NEW_LINE> text_regx = re.compile(r'.*\((\d+)\/(\d+)') <NEW_LINE> result = text_regx.search(text).groups() <NEW_LINE> current_num = result[0] <NEW_LINE> total_num = result[1] <NEW_LINE> return current_num, total_num <NEW_LINE> <DEDENT> def click_trans_audio_button(self): <NEW_LINE> <INDENT> self.find_element(*self.audio_icon_id).click() <NEW_LINE> sleep(0.5) <NEW_LINE> <DEDENT> def choose_answer(self, answers=None): <NEW_LINE> <INDENT> if answers.lower() == 'a': <NEW_LINE> <INDENT> self.find_element(*self.answer_a_id).click() <NEW_LINE> <DEDENT> if answers.lower() == 'b': <NEW_LINE> <INDENT> self.find_element(*self.answer_b_id).click() <NEW_LINE> <DEDENT> if answers.lower() == 'c': <NEW_LINE> <INDENT> self.find_element(*self.answer_c_id).click() <NEW_LINE> <DEDENT> if answers.lower() == 'd': <NEW_LINE> <INDENT> self.find_element(*self.answer_d_id).click() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.find_element(*self.i_dont_know_id).click() <NEW_LINE> <DEDENT> <DEDENT> def click_word_trans_next_button(self): <NEW_LINE> <INDENT> self.find_element(*self.word_trans_next_button_id).click() <NEW_LINE> sleep(2) <NEW_LINE> <DEDENT> def click_word_trans_finish_btn(self): <NEW_LINE> <INDENT> self.find_element(*self.word_trans_finish_btn_id).click() | 单词听译题目作答页 | 625990717047854f46340c8b |
class StatsWrapper(gym.Wrapper): <NEW_LINE> <INDENT> def __init__(self, env, window=100): <NEW_LINE> <INDENT> super().__init__(env) <NEW_LINE> self.reward_hist = deque([0], maxlen=window + 1) <NEW_LINE> self.done_hist = deque([1], maxlen=window + 1) <NEW_LINE> self.total_steps = 0 <NEW_LINE> <DEDENT> def reset(self, **kwargs): <NEW_LINE> <INDENT> return super().reset(**kwargs) <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> m, r, d, info = super().step(action) <NEW_LINE> self.reward_hist.append(r) <NEW_LINE> self.done_hist.append(d) <NEW_LINE> self.total_steps += 1 <NEW_LINE> return m, r, d, info <NEW_LINE> <DEDENT> def stats(self): <NEW_LINE> <INDENT> returns = [0] <NEW_LINE> steps = [0] <NEW_LINE> for reward, done in zip(self.reward_hist, self.done_hist): <NEW_LINE> <INDENT> returns[-1] += reward <NEW_LINE> steps[-1] += 1 <NEW_LINE> if done: <NEW_LINE> <INDENT> returns.append(0) <NEW_LINE> steps.append(0) <NEW_LINE> <DEDENT> <DEDENT> returns = returns[1:-1] <NEW_LINE> steps = steps[1:-1] <NEW_LINE> return dict( episodes=len(returns), episode_length=np.mean(steps) if len(steps) else np.nan, returns=np.mean(returns) if len(returns) else np.nan, average_reward=np.mean(tuple(self.reward_hist)[1:]), ) | Compute running statistics (return, number of episodes, etc.) over a certain time window. | 62599071cc0a2c111447c73b |
class AKFloatingRoundedAppbarAvatarItem(AKFloatingRoundedAppbarItemBase): <NEW_LINE> <INDENT> source = StringProperty() <NEW_LINE> text = StringProperty() <NEW_LINE> text_color = ListProperty() | This class is used to create a button with an image to be placed inside the app bar | 6259907144b2445a339b75c8 |
class ScaleAction(Model): <NEW_LINE> <INDENT> _validation = { 'direction': {'required': True}, 'type': {'required': True}, 'cooldown': {'required': True}, } <NEW_LINE> _attribute_map = { 'direction': {'key': 'direction', 'type': 'ScaleDirection'}, 'type': {'key': 'type', 'type': 'ScaleType'}, 'value': {'key': 'value', 'type': 'str'}, 'cooldown': {'key': 'cooldown', 'type': 'duration'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(ScaleAction, self).__init__(**kwargs) <NEW_LINE> self.direction = kwargs.get('direction', None) <NEW_LINE> self.type = kwargs.get('type', None) <NEW_LINE> self.value = kwargs.get('value', "1") <NEW_LINE> self.cooldown = kwargs.get('cooldown', None) | The parameters for the scaling action.
All required parameters must be populated in order to send to Azure.
:param direction: Required. the scale direction. Whether the scaling
action increases or decreases the number of instances. Possible values
include: 'None', 'Increase', 'Decrease'
:type direction: str or ~azure.mgmt.monitor.models.ScaleDirection
:param type: Required. the type of action that should occur when the scale
rule fires. Possible values include: 'ChangeCount', 'PercentChangeCount',
'ExactCount'
:type type: str or ~azure.mgmt.monitor.models.ScaleType
:param value: the number of instances that are involved in the scaling
action. This value must be 1 or greater. The default value is 1. Default
value: "1" .
:type value: str
:param cooldown: Required. the amount of time to wait since the last
scaling action before this action occurs. It must be between 1 week and 1
minute in ISO 8601 format.
:type cooldown: timedelta | 62599071be8e80087fbc0964 |
class RequestForbidden(RestClientError): <NEW_LINE> <INDENT> pass | Error indicating a 403 was raised on the server | 6259907160cbc95b063659d8 |
class MicrowaveRabiFloppingClock(QsimExperiment): <NEW_LINE> <INDENT> name = 'Microwave Rabi Flopping Clock' <NEW_LINE> exp_parameters = [] <NEW_LINE> exp_parameters.append(('RabiFlopping', 'scan')) <NEW_LINE> exp_parameters.append(('DopplerCooling', 'detuning')) <NEW_LINE> exp_parameters.append(('Transitions', 'main_cooling_369')) <NEW_LINE> exp_parameters.append(('Modes', 'state_detection_mode')) <NEW_LINE> exp_parameters.append(('StandardStateDetection', 'repetitions')) <NEW_LINE> exp_parameters.append(('StandardStateDetection', 'points_per_histogram')) <NEW_LINE> exp_parameters.append(('StandardStateDetection', 'state_readout_threshold')) <NEW_LINE> exp_parameters.extend(sequence.all_required_parameters()) <NEW_LINE> exp_parameters.remove(('MicrowaveInterrogation', 'duration')) <NEW_LINE> def initialize(self, cxn, context, ident): <NEW_LINE> <INDENT> self.ident = ident <NEW_LINE> self.pulser = cxn.pulser <NEW_LINE> <DEDENT> def run(self, cxn, context): <NEW_LINE> <INDENT> self.set_default_parameters() <NEW_LINE> self.setup_datavault('time', 'probability') <NEW_LINE> self.setup_grapher('Rabi Flopping qubit_0') <NEW_LINE> self.times = np.arange(0.1, 6.1 * self.pi_time, 3.2) <NEW_LINE> probs, times = [], [] <NEW_LINE> for i, duration in enumerate(self.times): <NEW_LINE> <INDENT> should_break = self.update_progress(i/float(len(self.times))) <NEW_LINE> if should_break: <NEW_LINE> <INDENT> self.pulser.line_trigger_state(False) <NEW_LINE> break <NEW_LINE> <DEDENT> self.p['MicrowaveInterrogation.duration'] = U(duration, 'us') <NEW_LINE> self.program_pulser(sequence) <NEW_LINE> [counts] = self.run_sequence() <NEW_LINE> if i % self.p.StandardStateDetection.points_per_histogram == 0: <NEW_LINE> <INDENT> hist = self.process_data(counts) <NEW_LINE> self.plot_hist(hist) <NEW_LINE> <DEDENT> pop = self.get_pop(counts) <NEW_LINE> self.dv.add(duration, pop) <NEW_LINE> probs.append(pop) <NEW_LINE> times.append(duration) <NEW_LINE> <DEDENT> popt, pcov = fit(self.rabi, times, probs, p0=[1.0, self.pi_time, 0.0, 0.0], bounds=(0.0, [1.0, 200.0, 3.14, 1.0])) <NEW_LINE> self.pv.set_parameter(('Pi_times', 'qubit_0', U(popt[1], 'us'))) <NEW_LINE> print('Updated qubit_0 pi_time to ' + str(popt[1])[:8] + ' microseconds') <NEW_LINE> return popt[1] <NEW_LINE> <DEDENT> def set_default_parameters(self): <NEW_LINE> <INDENT> self.p['OpticalPumping.method'] = 'Standard' <NEW_LINE> self.p['StandardStateDetection.repetitions'] = 400.0 <NEW_LINE> self.p['Line_Selection.qubit'] = 'qubit_0' <NEW_LINE> self.pi_time = self.pv.get_parameter('Pi_times', 'qubit_0')['us'] <NEW_LINE> <DEDENT> def rabi(self, t, A, pi_time, phase, offset): <NEW_LINE> <INDENT> return A * np.sin(np.pi * t /(2.0 * pi_time) + phase)**2 + offset <NEW_LINE> <DEDENT> def finalize(self, cxn, context): <NEW_LINE> <INDENT> pass | repeatedly prepare the |0> state, interrogate with resonant microwaves for
a variable time t and measure the population in the bright state | 62599071ac7a0e7691f73dbe |
class idgenerator(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "node.idgenerator" <NEW_LINE> bl_label = "ID Color Node Generator" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> bpy.ops.view3d.assign_material() <NEW_LINE> bpy.context.object.active_material.use_nodes = True <NEW_LINE> ACTOBJ=bpy.context.active_object <NEW_LINE> ACTMAT=ACTOBJ.material_slots[bpy.context.object.active_material_index].material <NEW_LINE> NODE=ACTMAT.node_tree.nodes.new(type='ShaderNodeValToRGB') <NEW_LINE> COLORS=30 <NEW_LINE> CHUNK=1/COLORS <NEW_LINE> I=0 <NEW_LINE> for ELEMENT in range(COLORS): <NEW_LINE> <INDENT> NODE.color_ramp.interpolation="CONSTANT" <NEW_LINE> ELEMENTO=NODE.color_ramp.elements.new(I) <NEW_LINE> ELEMENTO.color=(random.uniform(0,1),random.uniform(0,1),random.uniform(0,1),1) <NEW_LINE> I+=CHUNK <NEW_LINE> <DEDENT> return {'FINISHED'} | add a id colorramp node to node editor | 6259907101c39578d7f1439f |
class MLIODependenciesNotSatisfied(RuntimeError): <NEW_LINE> <INDENT> pass | Exception raised if a model cannot be loaded because dependencies are not satisfied | 6259907132920d7e50bc791c |
class PersonQuerySet(models.query.QuerySet): <NEW_LINE> <INDENT> def have_skills(self, skills): <NEW_LINE> <INDENT> for s in skills: <NEW_LINE> <INDENT> self = self.filter(qualification__skill=s) <NEW_LINE> <DEDENT> return self | Handles finding past, ongoing and upcoming events | 62599071a8370b77170f1c9f |
class GBM_By_Euler(GeometricBrownianMotion): <NEW_LINE> <INDENT> def __init__(self, f_s0, f_mu, f_sigma, f_time, f_steps, b_random_state=True): <NEW_LINE> <INDENT> super(GBM_By_Euler, self).__init__(f_s0=f_s0, f_mu=f_mu, f_sigma=f_sigma, f_time=f_time, f_steps=f_steps) <NEW_LINE> self.sigma_sqrt_t_eps = WienerProcess(f_sigma, f_time, f_steps) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> self.current_step += 1 <NEW_LINE> if self.current_step > 0: <NEW_LINE> <INDENT> dt = self.f_time/self.f_steps <NEW_LINE> mu_St_dt = self.f_mu * self.f_St * dt <NEW_LINE> sigma_St_sqrt_t_eps = self.f_St * self.sigma_sqrt_t_eps() <NEW_LINE> self.f_St += mu_St_dt + sigma_St_sqrt_t_eps <NEW_LINE> <DEDENT> return self.f_St | A representation of the Geometric Brownian Motion (GBM) simulated using
Euler Method. Besides de variance, this process might present a drift | 62599071aad79263cf43008b |
class Sort: <NEW_LINE> <INDENT> def __init__(self, data=None, size=15): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> self._data = np.random.randint(0, 100, size) <NEW_LINE> self._size = size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._data = np.array(data) <NEW_LINE> self._size = len(data) <NEW_LINE> <DEDENT> <DEDENT> def is_sorted(self, left_i=None, right_i=None): <NEW_LINE> <INDENT> if left_i is None and right_i is None: <NEW_LINE> <INDENT> return np.all(self._data[:-1] <= self._data[1:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> array = self._data[left_i:right_i] <NEW_LINE> return np.all(array[:-1] <= array[1:]) <NEW_LINE> <DEDENT> <DEDENT> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return self._size | Contains an array of numbers and methods to retrieve information on array | 625990711b99ca40022901a0 |
class myClass(): <NEW_LINE> <INDENT> def __init__(self,name,age): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.age = age <NEW_LINE> global COUNT <NEW_LINE> COUNT +=1 <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def getAge(self): <NEW_LINE> <INDENT> return self.age <NEW_LINE> <DEDENT> def printDetails(self): <NEW_LINE> <INDENT> print("Name: " + str(self.getName()) + "\n" + "Age: " + str(self.getAge()) + "\n" + "Count: " + str(COUNT)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "This class is great!" | myClass allows you to create student objects that have both an age
and a name. COUNT variable is incremented to mantain a running
total of students. | 625990711f037a2d8b9e54d5 |
class AvailabilitySymlinks(object): <NEW_LINE> <INDENT> def __init__(self, dir_a, dir_e, supports_activation, ignore_ext=[]): <NEW_LINE> <INDENT> self.dir_a, self.dir_e, self.ignore_ext = dir_a, dir_e, ignore_ext <NEW_LINE> self.supports_activation = supports_activation <NEW_LINE> <DEDENT> def list_available(self): <NEW_LINE> <INDENT> return [x for x in sorted(os.listdir(self.dir_a)) if not os.path.isdir(os.path.join(self.dir_a, x)) and not os.path.splitext(os.path.join(self.dir_a, x))[-1] in self.ignore_ext] <NEW_LINE> <DEDENT> def is_enabled(self, entry): <NEW_LINE> <INDENT> if not self.supports_activation: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.find_link(entry) is not None <NEW_LINE> <DEDENT> def get_path(self, entry): <NEW_LINE> <INDENT> return os.path.abspath(os.path.join(self.dir_a, entry)) <NEW_LINE> <DEDENT> def find_link(self, entry): <NEW_LINE> <INDENT> path = self.get_path(entry) <NEW_LINE> for e in os.listdir(self.dir_e): <NEW_LINE> <INDENT> if os.path.abspath(os.path.realpath(os.path.join(self.dir_e, e))) == path: <NEW_LINE> <INDENT> return e <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def enable(self, entry): <NEW_LINE> <INDENT> if not self.supports_activation: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> e = self.find_link(entry) <NEW_LINE> if not e: <NEW_LINE> <INDENT> link_path = os.path.join(self.dir_e, entry) <NEW_LINE> if not os.path.exists(link_path): <NEW_LINE> <INDENT> os.symlink(self.get_path(entry), link_path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def disable(self, entry): <NEW_LINE> <INDENT> if not self.supports_activation: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> e = self.find_link(entry) <NEW_LINE> if e: <NEW_LINE> <INDENT> os.unlink(os.path.join(self.dir_e, e)) <NEW_LINE> <DEDENT> <DEDENT> def rename(self, old, new): <NEW_LINE> <INDENT> on = self.is_enabled(old) <NEW_LINE> self.disable(old) <NEW_LINE> os.rename(self.get_path(old), self.get_path(new)) <NEW_LINE> if on: <NEW_LINE> <INDENT> self.enable(new) <NEW_LINE> <DEDENT> <DEDENT> def delete(self, entry): <NEW_LINE> <INDENT> self.disable(entry) <NEW_LINE> os.unlink(self.get_path(entry)) <NEW_LINE> <DEDENT> def open(self, entry, mode='r'): <NEW_LINE> <INDENT> return open(os.path.join(self.dir_a, entry), mode) <NEW_LINE> <DEDENT> def exists(self): <NEW_LINE> <INDENT> return os.path.exists(self.dir_a) and os.path.exists(self.dir_e) | Manage directories of following style::
--sites.available
|-a.site
--b.site
--sites.enabled
--a.site -> ../sites.available/a.site | 6259907191f36d47f2231af9 |
class JobTrialSpace1D(JobTrialSpace2D): <NEW_LINE> <INDENT> def __init__(self, shape, rootDir, **kwargs): <NEW_LINE> <INDENT> if shape is not None: <NEW_LINE> <INDENT> raise TypeError('shape must be None.') <NEW_LINE> <DEDENT> super(JobTrialSpace1D, self).__init__(None, rootDir, **kwargs) <NEW_LINE> <DEDENT> def _determine_shape(self, custom_shape): <NEW_LINE> <INDENT> if custom_shape is not None: <NEW_LINE> <INDENT> return custom_shape <NEW_LINE> <DEDENT> dims = self._meta_file['dimensions'] <NEW_LINE> if len(dims) != 1: <NEW_LINE> <INDENT> raise TypeError('You are trying to open a 1D parameter space, ' 'but the actual data contains more than 1 ' 'dimension (%dD).' % len(dims)) <NEW_LINE> <DEDENT> return (1, dims[0]) <NEW_LINE> <DEDENT> def _checkIteratedParameters(self, paramStr, toCheck): <NEW_LINE> <INDENT> raise RuntimeError('This method cannot be called from within a 1D ' 'dataset.') <NEW_LINE> <DEDENT> def get_iteration_labels(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> labels = self._meta_file['dimension_labels'] <NEW_LINE> return (None, labels[0]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise LookupError('Could not retrieve the iteration labels of ' 'this space from metadata. You are probably ' 'using an older version of data, in which case ' 'you cannot use this method.') | A 1D parameter sweep space with a number of trials per job.
.. note::
This parameter space behaves like the :class:`~JobTrialSpace2D`, but he
number of rows will be forced to be strictly 1. Thus when indexing, it
is necessary to use notation ``space[0][idx]``.
.. note::
Unlike :class:`~JobTrialSpace2D`, the shape here will be determined
automatically from the metadata file.
.. todo::
If this class is going to be used extensively, it is necessary to write
unit tests!
Parameters
----------
shape : None
Must be set to ``None``. For backward compatibility only.
rootDir : str
Root directory for the space.
kwargs : keyword arguments
Keyword arguments passed on to :class:`~JobTrialSpace2D`. | 6259907167a9b606de54770e |
class NewTweetDeleteRequest(UpdateTweet): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated, IsAdminUser) <NEW_LINE> def delete(self, request, *args, tweet_id, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tweet_mod_request = TweetModRequest.new_delete_request( admin_user=request.user, tweet_id=tweet_id) <NEW_LINE> <DEDENT> except Tweet.DoesNotExist: <NEW_LINE> <INDENT> raise drf_exceptions.NotFound('Invalid tweet id', 'not_found') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error(str(e)) <NEW_LINE> raise drf_exceptions.APIException('Internal server error', 'error') <NEW_LINE> <DEDENT> serialized_data = TweetModRequestSerializer(tweet_mod_request).data <NEW_LINE> return Response(serialized_data, status=201) | Adds a Tweet `Delete` request
| 625990714c3428357761bb8a |
class SrvVnetDir(SrvDir): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> SrvDir.__init__(self) <NEW_LINE> <DEDENT> def vnet(self, x): <NEW_LINE> <INDENT> val = None <NEW_LINE> vnetinfo = self.xvnet.vnet_get(x) <NEW_LINE> if not vnetinfo: <NEW_LINE> <INDENT> raise BNError('No such vnet ' + str(x)) <NEW_LINE> <DEDENT> val = SrvVnet(vnetinfo) <NEW_LINE> return val <NEW_LINE> <DEDENT> def get(self, x): <NEW_LINE> <INDENT> v = SrvDir.get(self, x) <NEW_LINE> if v is not None: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> v = self.vnet(x) <NEW_LINE> return v <NEW_LINE> <DEDENT> def op_create(self, op, req): <NEW_LINE> <INDENT> fn = FormFn(self.xvnet.vnet_create, [['config', 'sxpr']]) <NEW_LINE> val = fn(req.args, {}) <NEW_LINE> return val <NEW_LINE> <DEDENT> def render_POST(self, req): <NEW_LINE> <INDENT> return self.perform(req) <NEW_LINE> <DEDENT> def render_GET(self, req): <NEW_LINE> <INDENT> if self.use_sxp(req): <NEW_LINE> <INDENT> req.setHeader("Content-Type", sxp.mime_type) <NEW_LINE> self.ls_vnet(req, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> req.write("<html><head></head><body>") <NEW_LINE> self.print_path(req) <NEW_LINE> self.ls(req) <NEW_LINE> self.ls_vnet(req) <NEW_LINE> self.form(req) <NEW_LINE> req.write("</body></html>") <NEW_LINE> <DEDENT> <DEDENT> def ls_vnet(self, req, use_sxp=0): <NEW_LINE> <INDENT> url = req.prePathURL() <NEW_LINE> if not url.endswith('/'): <NEW_LINE> <INDENT> url += '/' <NEW_LINE> <DEDENT> if use_sxp: <NEW_LINE> <INDENT> vnets = self.xvnet.vnet_ls() <NEW_LINE> sxp.show(vnets, out=req) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vnets = self.xvnet.vnets() <NEW_LINE> vnets.sort(lambda x, y: cmp(x.id, y.id)) <NEW_LINE> req.write('<ul>') <NEW_LINE> for v in vnets: <NEW_LINE> <INDENT> req.write('<li><a href="%s%s"> Vnet %s</a>' % (url, v.id, v.id)) <NEW_LINE> req.write('</li>') <NEW_LINE> <DEDENT> req.write('</ul>') <NEW_LINE> <DEDENT> <DEDENT> def form(self, req): <NEW_LINE> <INDENT> req.write('<form method="post" action="%s" enctype="multipart/form-data">' % req.prePathURL()) <NEW_LINE> req.write('<button type="submit" name="op" value="create">Create Vnet</button>') <NEW_LINE> req.write('Config <input type="file" name="config"><br>') <NEW_LINE> req.write('</form>') | Vnet directory.
| 6259907199cbb53fe68327bf |
class Element(object): <NEW_LINE> <INDENT> source = None <NEW_LINE> def __init__(self, comments=None): <NEW_LINE> <INDENT> self.comments = comments if comments is not None else [] <NEW_LINE> <DEDENT> def _to_string(self, string=""): <NEW_LINE> <INDENT> if self.source: <NEW_LINE> <INDENT> string = self.source <NEW_LINE> <DEDENT> if self.comments: <NEW_LINE> <INDENT> return _add_comments_to_string(string, self.comments) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return string <NEW_LINE> <DEDENT> <DEDENT> def to_node(self): <NEW_LINE> <INDENT> raise NotImplementedError("Should be implemented by subclasses.") | A parsed element. | 6259907138b623060ffaa4bf |
@provides(IToggleField) <NEW_LINE> class ToggleField(MToggleField, Field): <NEW_LINE> <INDENT> def _get_control_value(self): <NEW_LINE> <INDENT> return self.control.isChecked() <NEW_LINE> <DEDENT> def _get_control_text(self): <NEW_LINE> <INDENT> return self.control.text() <NEW_LINE> <DEDENT> def _set_control_value(self, value): <NEW_LINE> <INDENT> return self.control.setChecked(value) <NEW_LINE> <DEDENT> def _set_control_text(self, text): <NEW_LINE> <INDENT> return self.control.setText(text) <NEW_LINE> <DEDENT> def _set_control_icon(self, icon): <NEW_LINE> <INDENT> if icon is not None: <NEW_LINE> <INDENT> self.control.setIcon(icon.create_icon()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.control.setIcon(QIcon()) <NEW_LINE> <DEDENT> <DEDENT> def _observe_control_value(self, remove=False): <NEW_LINE> <INDENT> if remove: <NEW_LINE> <INDENT> self.control.toggled.disconnect(self._update_value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.control.toggled.connect(self._update_value) | The Qt-specific implementation of the toggle field class | 62599071ad47b63b2c5a9123 |
class Entity(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'text': {'required': True}, 'category': {'required': True}, 'offset': {'required': True}, 'length': {'required': True}, 'confidence_score': {'required': True}, } <NEW_LINE> _attribute_map = { 'text': {'key': 'text', 'type': 'str'}, 'category': {'key': 'category', 'type': 'str'}, 'subcategory': {'key': 'subcategory', 'type': 'str'}, 'offset': {'key': 'offset', 'type': 'int'}, 'length': {'key': 'length', 'type': 'int'}, 'confidence_score': {'key': 'confidenceScore', 'type': 'float'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(Entity, self).__init__(**kwargs) <NEW_LINE> self.text = kwargs['text'] <NEW_LINE> self.category = kwargs['category'] <NEW_LINE> self.subcategory = kwargs.get('subcategory', None) <NEW_LINE> self.offset = kwargs['offset'] <NEW_LINE> self.length = kwargs['length'] <NEW_LINE> self.confidence_score = kwargs['confidence_score'] | Entity.
All required parameters must be populated in order to send to Azure.
:ivar text: Required. Entity text as appears in the request.
:vartype text: str
:ivar category: Required. Entity type.
:vartype category: str
:ivar subcategory: (Optional) Entity sub type.
:vartype subcategory: str
:ivar offset: Required. Start position for the entity text. Use of different 'stringIndexType'
values can affect the offset returned.
:vartype offset: int
:ivar length: Required. Length for the entity text. Use of different 'stringIndexType' values
can affect the length returned.
:vartype length: int
:ivar confidence_score: Required. Confidence score between 0 and 1 of the extracted entity.
:vartype confidence_score: float | 62599071ec188e330fdfa179 |
class BaseInlineFormSetMixin(BaseFormSetMixin): <NEW_LINE> <INDENT> model = None <NEW_LINE> inline_model = None <NEW_LINE> fk_name = None <NEW_LINE> formset_class = BaseInlineFormSet <NEW_LINE> exclude = None <NEW_LINE> fields = None <NEW_LINE> formfield_callback = None <NEW_LINE> can_delete = True <NEW_LINE> save_as_new = False <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = {} <NEW_LINE> if self.object: <NEW_LINE> <INDENT> context['object'] = self.object <NEW_LINE> context_object_name = self.get_context_object_name(self.object) <NEW_LINE> if context_object_name: <NEW_LINE> <INDENT> context[context_object_name] = self.object <NEW_LINE> <DEDENT> <DEDENT> context.update(kwargs) <NEW_LINE> return super(BaseInlineFormSetMixin, self).get_context_data(**context) <NEW_LINE> <DEDENT> def get_inline_model(self): <NEW_LINE> <INDENT> return self.inline_model <NEW_LINE> <DEDENT> def get_formset_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(BaseInlineFormSetMixin, self).get_formset_kwargs() <NEW_LINE> kwargs['save_as_new'] = self.save_as_new <NEW_LINE> kwargs['instance'] = self.object <NEW_LINE> return kwargs <NEW_LINE> <DEDENT> def get_factory_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(BaseInlineFormSetMixin, self).get_factory_kwargs() <NEW_LINE> if django.VERSION >= (1, 6) and self.fields is None: <NEW_LINE> <INDENT> self.fields = '__all__' <NEW_LINE> <DEDENT> kwargs.update({ 'exclude': self.exclude, 'fields': self.fields, 'formfield_callback': self.formfield_callback, 'fk_name': self.fk_name, }) <NEW_LINE> if self.get_form_class(): <NEW_LINE> <INDENT> kwargs['form'] = self.get_form_class() <NEW_LINE> <DEDENT> if self.get_formset_class(): <NEW_LINE> <INDENT> kwargs['formset'] = self.get_formset_class() <NEW_LINE> <DEDENT> return kwargs <NEW_LINE> <DEDENT> def get_formset(self): <NEW_LINE> <INDENT> return inlineformset_factory(self.model, self.get_inline_model(), **self.get_factory_kwargs()) | Base class for constructing an inline formSet within a view | 625990718e7ae83300eea966 |
class I3BarOutput(object): <NEW_LINE> <INDENT> def __init__(self, theme): <NEW_LINE> <INDENT> self._theme = theme <NEW_LINE> self._widgets = [] <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> sys.stdout.write(json.dumps({"version": 1, "click_events": True}) + "[\n") <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> sys.stdout.write("]\n") <NEW_LINE> <DEDENT> def draw(self, widget, module=None, engine=None): <NEW_LINE> <INDENT> full_text = widget.full_text() <NEW_LINE> padding = self._theme.padding(widget) <NEW_LINE> prefix = self._theme.prefix(widget, padding) <NEW_LINE> suffix = self._theme.suffix(widget, padding) <NEW_LINE> if prefix: <NEW_LINE> <INDENT> full_text = u"{}{}".format(prefix, full_text) <NEW_LINE> <DEDENT> if suffix: <NEW_LINE> <INDENT> full_text = u"{}{}".format(full_text, suffix) <NEW_LINE> <DEDENT> separator = self._theme.separator(widget) <NEW_LINE> if separator: <NEW_LINE> <INDENT> self._widgets.append({ u"full_text": separator, "separator": False, "color": self._theme.separator_fg(widget), "background": self._theme.separator_bg(widget), "separator_block_width": self._theme.separator_block_width(widget), }) <NEW_LINE> <DEDENT> self._widgets.append({ u"full_text": full_text, "color": self._theme.fg(widget), "background": self._theme.bg(widget), "separator_block_width": self._theme.separator_block_width(widget), "separator": True if separator is None else False, "instance": widget.id, "name": module.id, }) <NEW_LINE> <DEDENT> def begin(self): <NEW_LINE> <INDENT> self._widgets = [] <NEW_LINE> self._theme.reset() <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> sys.stdout.write(json.dumps(self._widgets)) <NEW_LINE> <DEDENT> def end(self): <NEW_LINE> <INDENT> sys.stdout.write(",\n") <NEW_LINE> sys.stdout.flush() | Manage output according to the i3bar protocol | 62599071e5267d203ee6d028 |
class SetPreset(MethodView): <NEW_LINE> <INDENT> def get(self, cam_name): <NEW_LINE> <INDENT> ctrl = control_for_camera(cam_name) <NEW_LINE> preset_num = int(request.args['preset']) <NEW_LINE> logger.info( 'Request to set camera %s preset %d', cam_name, preset_num ) <NEW_LINE> ctrl.set_preset_to_current(preset_num) <NEW_LINE> return jsonify({'success': True}) | Render the GET /api/control/<string:cam_name>/set_preset API response.
Query Parameters:
preset - int | 625990714428ac0f6e659e0a |
class DeployMapValueError(DeployMapError, ValueError): <NEW_LINE> <INDENT> pass | A value is not the required type. | 62599071aad79263cf43008c |
class BpeResults(Frozen, metaclass=SaveAndLoad): <NEW_LINE> <INDENT> save: Callable[[BpeResults, Optional[Path], DefaultNamedArg(bool, 'use_hdf5')], None] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.param_names = None <NEW_LINE> self.begin_params = None <NEW_LINE> self.begin_innovs = None <NEW_LINE> self.begin_cost = None <NEW_LINE> self.num_evals = 0 <NEW_LINE> self.num_iters = 0 <NEW_LINE> self.costs = [] <NEW_LINE> self.correlation = None <NEW_LINE> self.info_svd = None <NEW_LINE> self.covariance = None <NEW_LINE> self.final_params = None <NEW_LINE> self.final_innovs = None <NEW_LINE> self.final_cost = None <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> keys = [ 'begin_params', 'begin_cost', 'num_evals', 'num_iters', 'final_params', 'final_cost', 'correlation', 'info_svd', 'covariance', 'costs', ] <NEW_LINE> dct = {key: getattr(self, key) for key in keys} <NEW_LINE> name = ' BpeResults' <NEW_LINE> text = pprint_dict(dct, name=name, indent=2, align=True, disp=False) <NEW_LINE> return text <NEW_LINE> <DEDENT> def pprint(self) -> None: <NEW_LINE> <INDENT> if self.param_names is None or self.begin_params is None or self.final_params is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> names = [name.decode('utf-8') for name in self.param_names] <NEW_LINE> dct1 = {name.replace('param.', 'param.ix(c).'): self.begin_params[i] for (i, name) in enumerate(names)} <NEW_LINE> dct2 = {name.replace('param.', 'param.ix(c).'): self.final_params[i] for (i, name) in enumerate(names)} <NEW_LINE> print('Initial cost: {}'.format(self.begin_cost)) <NEW_LINE> pprint_dict(dct1, name='Initial parameters:', indent=8) <NEW_LINE> print('Final cost: {}'.format(self.final_cost)) <NEW_LINE> pprint_dict(dct2, name='Final parameters:', indent=8) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, filename: Path = None, use_hdf5: bool = True) -> BpeResults: <NEW_LINE> <INDENT> out: BpeResults = load_method(cls, filename=filename, use_hdf5=use_hdf5) <NEW_LINE> out.num_evals = int(out.num_evals) <NEW_LINE> out.num_iters = int(out.num_iters) <NEW_LINE> out.costs = [c for c in out.costs] <NEW_LINE> return out | Batch Parameter Estimator Results.
Examples
--------
>>> from dstauffman.estimation import BpeResults
>>> bpe_results = BpeResults() | 625990718e7ae83300eea967 |
class TestWordBreak(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.test_object = [{ 'test_s': 'ha', 'test_dict': ['h', 'a'], 'test_output': True, }, { 'test_s': 'ha', 'test_dict': ['h', 'b'], 'test_output': False, }, { 'test_s': 'ha', 'test_dict': ['a', 'h'], 'test_output': True, }, { 'test_s': 'leetcode', 'test_dict': ['leet', 'code'], 'test_output': True, }, { 'test_s': 'leetcode', 'test_dict': ['h', 'leet', 'co', 'df', 'de'], 'test_output': True, }] <NEW_LINE> <DEDENT> def test_result(self): <NEW_LINE> <INDENT> obj = WordBreak() <NEW_LINE> for test_case in self.test_object: <NEW_LINE> <INDENT> answer = obj.wordBreak(test_case['test_s'], test_case['test_dict']) <NEW_LINE> self.assertEqual(answer, test_case['test_output']) | Regtest | 6259907144b2445a339b75c9 |
class TestGetFileMTime(TestCaseWithWorkingTree): <NEW_LINE> <INDENT> def make_basic_tree(self): <NEW_LINE> <INDENT> tree = self.make_branch_and_tree('tree') <NEW_LINE> self.build_tree(['tree/one']) <NEW_LINE> tree.add(['one'], ['one-id']) <NEW_LINE> return tree <NEW_LINE> <DEDENT> def test_get_file_mtime(self): <NEW_LINE> <INDENT> tree = self.make_basic_tree() <NEW_LINE> st = os.lstat('tree/one') <NEW_LINE> tree.lock_read() <NEW_LINE> try: <NEW_LINE> <INDENT> mtime_file_id = tree.get_file_mtime(file_id='one-id') <NEW_LINE> self.assertIsInstance(mtime_file_id, (float, int)) <NEW_LINE> self.assertAlmostEqual(st.st_mtime, mtime_file_id) <NEW_LINE> mtime_path = tree.get_file_mtime(file_id='one-id', path='one') <NEW_LINE> self.assertAlmostEqual(mtime_file_id, mtime_path) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> tree.unlock() <NEW_LINE> <DEDENT> <DEDENT> def test_after_commit(self): <NEW_LINE> <INDENT> tree = self.make_basic_tree() <NEW_LINE> st = os.lstat('tree/one') <NEW_LINE> tree.commit('one', rev_id='rev-1') <NEW_LINE> tree.lock_read() <NEW_LINE> try: <NEW_LINE> <INDENT> mtime = tree.get_file_mtime(file_id='one-id') <NEW_LINE> self.assertAlmostEqual(st.st_mtime, mtime) <NEW_LINE> mtime = tree.get_file_mtime(file_id='one-id', path='one') <NEW_LINE> self.assertAlmostEqual(st.st_mtime, mtime) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> tree.unlock() <NEW_LINE> <DEDENT> <DEDENT> def test_get_renamed_time(self): <NEW_LINE> <INDENT> tree = self.make_basic_tree() <NEW_LINE> tree.rename_one('one', 'two') <NEW_LINE> st = os.lstat('tree/two') <NEW_LINE> tree.lock_read() <NEW_LINE> try: <NEW_LINE> <INDENT> mtime = tree.get_file_mtime(file_id='one-id') <NEW_LINE> self.assertAlmostEqual(st.st_mtime, mtime) <NEW_LINE> mtime = tree.get_file_mtime(file_id='one-id', path='two') <NEW_LINE> self.assertAlmostEqual(st.st_mtime, mtime) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> tree.unlock() <NEW_LINE> <DEDENT> <DEDENT> def test_get_renamed_in_subdir_time(self): <NEW_LINE> <INDENT> tree = self.make_branch_and_tree('tree') <NEW_LINE> self.build_tree(['tree/d/', 'tree/d/a']) <NEW_LINE> tree.add(['d', 'd/a'], ['d-id', 'a-id']) <NEW_LINE> tree.commit('1', rev_id='rev-1') <NEW_LINE> tree.rename_one('d', 'e') <NEW_LINE> st = os.lstat('tree/e/a') <NEW_LINE> tree.lock_read() <NEW_LINE> try: <NEW_LINE> <INDENT> mtime = tree.get_file_mtime(file_id='a-id') <NEW_LINE> self.assertAlmostEqual(st.st_mtime, mtime) <NEW_LINE> mtime = tree.get_file_mtime(file_id='a-id', path='e/a') <NEW_LINE> self.assertAlmostEqual(st.st_mtime, mtime) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> tree.unlock() <NEW_LINE> <DEDENT> <DEDENT> def test_missing(self): <NEW_LINE> <INDENT> tree = self.make_basic_tree() <NEW_LINE> os.remove('tree/one') <NEW_LINE> tree.lock_read() <NEW_LINE> try: <NEW_LINE> <INDENT> self.assertRaises(errors.FileTimestampUnavailable, tree.get_file_mtime, file_id='one-id') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> tree.unlock() | Test WorkingTree.get_file_mtime.
These are more involved because we need to handle files which have been
renamed, etc. | 6259907160cbc95b063659d9 |
class GeojsonGeometry(ga.Geometry): <NEW_LINE> <INDENT> from_text = "ST_GeomFromGeoJSON" <NEW_LINE> def result_processor(self, dialect: str, coltype): <NEW_LINE> <INDENT> def process(value: Optional[bytes]): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> geom = ga.shape.to_shape( ga.elements.WKBElement( value, srid=self.srid, extended=self.extended ) ) <NEW_LINE> return json.loads(json.dumps(geom.__geo_interface__)) <NEW_LINE> <DEDENT> <DEDENT> return process | Custom geoalchemy type which returns GeoJSON. | 625990713d592f4c4edbc7b8 |
class Null(Singleton): <NEW_LINE> <INDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def __eq__(self, obj): <NEW_LINE> <INDENT> return isinstance(obj, Null) <NEW_LINE> <DEDENT> def __ne__(self, obj): <NEW_LINE> <INDENT> return not self.__eq__(obj) <NEW_LINE> <DEDENT> def __cmp__(self, obj): <NEW_LINE> <INDENT> if self.__eq__(obj): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 | Object representing null value.
In Daphne primarily used for subclassing and the subclasses used for
blank form field values (so that we can preserve the type information).
Doctest:
>>> a = Null()
>>> b = Null()
>>> a is b
True | 6259907121bff66bcd72453e |
@interface.implementer(interfaces.IParticipation) <NEW_LINE> class Participation(object): <NEW_LINE> <INDENT> def __init__(self, principal): <NEW_LINE> <INDENT> self.principal = principal <NEW_LINE> self.interaction = None | A trivial implementation of :class:`zope.security.interfaces.IParticipation`. | 62599071a17c0f6771d5d816 |
class myThread4 (threading.Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.flag=0 <NEW_LINE> self.t = 0 <NEW_LINE> self.applist = [] <NEW_LINE> self.winlist = [] <NEW_LINE> <DEDENT> def set(self): <NEW_LINE> <INDENT> self.flag=1 <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> time.sleep(period) <NEW_LINE> if self.flag==1: <NEW_LINE> <INDENT> plot() <NEW_LINE> break <NEW_LINE> <DEDENT> frpid = get(["xdotool", "getactivewindow", "getwindowpid"]) <NEW_LINE> frname = get(["xdotool", "getactivewindow", "getwindowname"]) <NEW_LINE> app = get(["ps", "-p", frpid, "-o", "comm="]) if frpid != None else "Unknown" <NEW_LINE> if "gnome-terminal" in app: <NEW_LINE> <INDENT> app = "gnome-terminal" <NEW_LINE> <DEDENT> elif app == "soffice.bin": <NEW_LINE> <INDENT> app = "libreoffice" <NEW_LINE> <DEDENT> if not app in self.applist: <NEW_LINE> <INDENT> self.applist.append(app) <NEW_LINE> <DEDENT> checklist = [item[1] for item in self.winlist] <NEW_LINE> if not frname in checklist: <NEW_LINE> <INDENT> self.winlist.append([app, frname, 1*period]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.winlist[checklist.index(frname)][2] = self.winlist[checklist.index(frname)][2]+1*period <NEW_LINE> <DEDENT> if self.t == 60/period: <NEW_LINE> <INDENT> summarize(self.t,self.winlist,self.applist) <NEW_LINE> self.t = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.t += 1 | It is a threading class which is controlled by our main application file | 62599071091ae3566870650e |
class TestDownloadHelper(object): <NEW_LINE> <INDENT> def test_keyboard_interrupt_situation(self, monkeypatch): <NEW_LINE> <INDENT> KNOWN_URL = 'https://ftp.isc.org/isc/bind9/9.10.4-P1/srcid' <NEW_LINE> LOCAL_FILE = os.path.basename(KNOWN_URL) <NEW_LINE> def interrupter(): <NEW_LINE> <INDENT> raise KeyboardInterrupt <NEW_LINE> <DEDENT> monkeypatch.setattr('time.time', interrupter) <NEW_LINE> with pytest.raises(KeyboardInterrupt): <NEW_LINE> <INDENT> DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) <NEW_LINE> <DEDENT> assert not os.path.exists(LOCAL_FILE) <NEW_LINE> <DEDENT> @pytest.mark.parametrize('total, downloaded, output', [ (100, 25, '\r 25%[=======> ] 25.00 eta 00:00:30 '), (100.0, 25.0, '\r 25%[=======> ] 25.00 eta 00:00:30 '), (-1, 1024 * 1024, '\r [ <=> ] 1.00M in 00:00:10 '), ], ids=[ 'integer', 'float', 'unknown_size', ]) <NEW_LINE> def test_progress(self, total, downloaded, output, monkeypatch): <NEW_LINE> <INDENT> buf = StringIO() <NEW_LINE> monkeypatch.setattr('sys.stdout', buf) <NEW_LINE> monkeypatch.setattr('time.time', lambda: 10.0) <NEW_LINE> DownloadHelper.progress(total, downloaded, 0.0) <NEW_LINE> assert buf.getvalue() == output <NEW_LINE> <DEDENT> @pytest.mark.parametrize('url, content', [ ('http://integration:8000/existing_file.txt', 'content'), ('https://integration:4430/existing_file.txt', 'content'), ('ftp://integration:2100/existing_file.txt', 'content'), ('http://integration:8001/existing_file.txt', 'content'), ('https://integration:4431/existing_file.txt', 'content'), ('ftp://integration:2101/existing_file.txt', 'content'), ], ids=[ 'HTTP', 'HTTPS', 'FTP', 'HTTP-unknown_size', 'HTTPS-unknown_size', 'FTP-unknown_size', ]) <NEW_LINE> @pytest.mark.integration <NEW_LINE> def test_download_existing_file(self, url, content): <NEW_LINE> <INDENT> local_file = 'local_file' <NEW_LINE> DownloadHelper.download_file(url, local_file) <NEW_LINE> assert os.path.isfile(local_file) <NEW_LINE> with open(local_file) as f: <NEW_LINE> <INDENT> assert f.readline().strip() == content <NEW_LINE> <DEDENT> <DEDENT> @pytest.mark.parametrize('url', [ 'http://integration:8000/non_existing_file.txt', 'https://integration:4430/non_existing_file.txt', 'ftp://integration:2100/non_existing_file.txt', ], ids=[ 'HTTP', 'HTTPS', 'FTP', ]) <NEW_LINE> @pytest.mark.integration <NEW_LINE> def test_download_non_existing_file(self, url): <NEW_LINE> <INDENT> local_file = 'local_file' <NEW_LINE> with pytest.raises(DownloadError): <NEW_LINE> <INDENT> DownloadHelper.download_file(url, local_file) <NEW_LINE> <DEDENT> assert not os.path.isfile(local_file) | DownloadHelper tests | 625990714a966d76dd5f07c1 |
class TestHomeView(TestCase): <NEW_LINE> <INDENT> desired_url = "/" <NEW_LINE> desired_name = "portal:home" <NEW_LINE> def test_desired_location(self): <NEW_LINE> <INDENT> resp = self.client.get(self.desired_url) <NEW_LINE> self.assertEqual(resp.status_code, 200) <NEW_LINE> <DEDENT> def test_desired_name(self): <NEW_LINE> <INDENT> reverse_url = reverse(self.desired_name) <NEW_LINE> self.assertEquals(reverse_url, self.desired_url) | HomeView test suite | 62599071f548e778e596ce65 |
class HostnameConversion(object): <NEW_LINE> <INDENT> def __init__(self, options): <NEW_LINE> <INDENT> self._options = options <NEW_LINE> self._ns_cache = {} <NEW_LINE> self._ip_fields = ['source', 'destination', 'ip', 'measurement_agent'] <NEW_LINE> <DEDENT> def convert(self, ddict): <NEW_LINE> <INDENT> if not self._options.ip: <NEW_LINE> <INDENT> for i in self._ip_fields: <NEW_LINE> <INDENT> if ddict.get(i): <NEW_LINE> <INDENT> ip = ddict.get(i) <NEW_LINE> if not self._ns_cache.get(ip): <NEW_LINE> <INDENT> self._ns_cache[ip] = socket.getfqdn(ip) <NEW_LINE> <DEDENT> ddict[i] = self._ns_cache.get(ip) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ddict | Convert ip addresses to fully qualified domain names and cache the results. | 625990713317a56b869bf1b0 |
class OptionsTests(SynchronousTestCase): <NEW_LINE> <INDENT> def test_malformedSecondary(self): <NEW_LINE> <INDENT> options = Options() <NEW_LINE> self.assertRaises(UsageError, options.parseOptions, ["--secondary", ""]) <NEW_LINE> self.assertRaises(UsageError, options.parseOptions, ["--secondary", "1.2.3.4"]) <NEW_LINE> self.assertRaises( UsageError, options.parseOptions, ["--secondary", "1.2.3.4:hello"] ) <NEW_LINE> self.assertRaises( UsageError, options.parseOptions, ["--secondary", "1.2.3.4:hello/example.com"], ) <NEW_LINE> <DEDENT> def test_secondary(self): <NEW_LINE> <INDENT> options = Options() <NEW_LINE> options.parseOptions(["--secondary", "1.2.3.4/example.com"]) <NEW_LINE> self.assertEqual([(("1.2.3.4", PORT), ["example.com"])], options.secondaries) <NEW_LINE> <DEDENT> def test_secondaryExplicitPort(self): <NEW_LINE> <INDENT> options = Options() <NEW_LINE> options.parseOptions(["--secondary", "1.2.3.4:5353/example.com"]) <NEW_LINE> self.assertEqual([(("1.2.3.4", 5353), ["example.com"])], options.secondaries) <NEW_LINE> <DEDENT> def test_secondaryAuthorityServices(self): <NEW_LINE> <INDENT> options = Options() <NEW_LINE> options.parseOptions( [ "--secondary", "1.2.3.4:5353/example.com", "--secondary", "1.2.3.5:5354/example.com", ] ) <NEW_LINE> self.assertEqual(len(options.svcs), 2) <NEW_LINE> secondary = options.svcs[0] <NEW_LINE> self.assertIsInstance(options.svcs[0], SecondaryAuthorityService) <NEW_LINE> self.assertEqual(secondary.primary, "1.2.3.4") <NEW_LINE> self.assertEqual(secondary._port, 5353) <NEW_LINE> secondary = options.svcs[1] <NEW_LINE> self.assertIsInstance(options.svcs[1], SecondaryAuthorityService) <NEW_LINE> self.assertEqual(secondary.primary, "1.2.3.5") <NEW_LINE> self.assertEqual(secondary._port, 5354) <NEW_LINE> <DEDENT> def test_recursiveConfiguration(self): <NEW_LINE> <INDENT> options = Options() <NEW_LINE> options.parseOptions(["--hosts-file", "hosts.txt", "--recursive"]) <NEW_LINE> ca, cl = _buildResolvers(options) <NEW_LINE> for x in cl: <NEW_LINE> <INDENT> if isinstance(x, ResolverChain): <NEW_LINE> <INDENT> recurser = x.resolvers[-1] <NEW_LINE> if isinstance(recurser, Resolver): <NEW_LINE> <INDENT> recurser._parseCall.cancel() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if platform.getType() != "posix": <NEW_LINE> <INDENT> from twisted.internet import reactor <NEW_LINE> for x in reactor._newTimedCalls: <NEW_LINE> <INDENT> self.assertEqual(x.func.__func__, ThreadedResolver._cleanup) <NEW_LINE> x.cancel() <NEW_LINE> <DEDENT> <DEDENT> self.assertIsInstance(cl[-1], ResolverChain) | Tests for L{Options}, defining how command line arguments for the DNS server
are parsed. | 625990714a966d76dd5f07c2 |
class BigQueryExecutorTables(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.create_my_table() <NEW_LINE> <DEDENT> except exceptions.Conflict as exc: <NEW_LINE> <INDENT> logging.info(exc) <NEW_LINE> <DEDENT> self.bq_client = dw.BigQueryExecutor() <NEW_LINE> <DEDENT> def create_my_table(self): <NEW_LINE> <INDENT> client = bigquery.Client() <NEW_LINE> dataset_id = "{}.test_bq_executor_table".format(os.environ['BIGQUERY_PROJECT']) <NEW_LINE> dataset = bigquery.Dataset(dataset_id) <NEW_LINE> dataset.location = "US" <NEW_LINE> client.create_dataset(dataset) <NEW_LINE> table_ref = dataset.table('my_table') <NEW_LINE> table = bigquery.Table(table_ref) <NEW_LINE> table = client.create_table(table) <NEW_LINE> <DEDENT> def test_table_exists(self): <NEW_LINE> <INDENT> self.assertTrue( self.bq_client.table_exists( dataset_id="test_bq_executor_table", table_id="my_table" ), "Table does not exists" ) <NEW_LINE> <DEDENT> def test_table_does_not_exists(self): <NEW_LINE> <INDENT> self.assertFalse( self.bq_client.table_exists( dataset_id="test_bq_executor_table_2", table_id="my_table_2" ), "Table exists" ) <NEW_LINE> <DEDENT> def test_initiate_table(self): <NEW_LINE> <INDENT> self.assertFalse( self.bq_client.table_exists( dataset_id='test_bq_executor_table', table_id='test' ) ) <NEW_LINE> self.bq_client.initiate_table( dataset_id='test_bq_executor_table', table_id='test', schema_path='tests/schema/initiate_table.json' ) <NEW_LINE> self.assertTrue( self.bq_client.table_exists( dataset_id='test_bq_executor_table', table_id='test' ) ) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.bq_client.delete_table( dataset_id='test_bq_executor_table', table_id='test' ) | Test | 625990717c178a314d78e857 |
class BaseInterval(Constraint, ABC): <NEW_LINE> <INDENT> def __init__(self, ll: float, ul: float, left_open: bool, right_open: bool, mono: int = 0, order: int = 0): <NEW_LINE> <INDENT> super().__init__(order=order) <NEW_LINE> self.ll = ll <NEW_LINE> self.ul = ul <NEW_LINE> self.left_open = left_open <NEW_LINE> self.right_open = right_open <NEW_LINE> self.mono = mono <NEW_LINE> <DEDENT> def get_filter(self, x: Iterable): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> def limits(self): <NEW_LINE> <INDENT> return self.ll, self.ul | Base class for interval constraints
:param float ll: Lower limit of interval
:param float ul: Upper limit of interval
:param bool left_open: If True the left side of the interval is open
:param bool right_open: If True the right side of the interval is open
:param int mono: Monotonicity constraint of the interval in {-1,0,1}
:param int order: Absolute ordering w.r.t. target variable treatment | 6259907126068e7796d4e213 |
class WebSocketServer(asyncio.AbstractServer): <NEW_LINE> <INDENT> def __init__(self, loop): <NEW_LINE> <INDENT> self.loop = loop <NEW_LINE> self.closing = False <NEW_LINE> self.websockets = set() <NEW_LINE> <DEDENT> def wrap(self, server): <NEW_LINE> <INDENT> self.server = server <NEW_LINE> <DEDENT> def register(self, protocol): <NEW_LINE> <INDENT> self.websockets.add(protocol) <NEW_LINE> <DEDENT> def unregister(self, protocol): <NEW_LINE> <INDENT> self.websockets.remove(protocol) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.closing = True <NEW_LINE> self.server.close() <NEW_LINE> for websocket in self.websockets: <NEW_LINE> <INDENT> websocket.handler_task.cancel() <NEW_LINE> <DEDENT> <DEDENT> @asyncio.coroutine <NEW_LINE> def wait_closed(self): <NEW_LINE> <INDENT> if self.websockets: <NEW_LINE> <INDENT> yield from asyncio.wait( [websocket.handler_task for websocket in self.websockets] + [websocket.worker_task for websocket in self.websockets], loop=self.loop) <NEW_LINE> <DEDENT> yield from self.server.wait_closed() | Wrapper for :class:`~asyncio.Server` that triggers the closing handshake. | 625990718a43f66fc4bf3a6c |
class UDPServer: <NEW_LINE> <INDENT> def __init__(self, udp_port): <NEW_LINE> <INDENT> self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) <NEW_LINE> self.server_address = ('0.0.0.0', udp_port) <NEW_LINE> self.sock.bind(self.server_address) <NEW_LINE> <DEDENT> def get_messages(self): <NEW_LINE> <INDENT> return self.sock.recvfrom(4096) | Main udt server listner | 625990712c8b7c6e89bd50bf |
class AppendOperator(Operator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def execute(source, destination): <NEW_LINE> <INDENT> if not os.path.exists(os.path.dirname(destination)): <NEW_LINE> <INDENT> os.makedirs(os.path.dirname(destination)) <NEW_LINE> <DEDENT> with open(destination, 'ab') as f: <NEW_LINE> <INDENT> f.write(source) | Append the source to destination file | 62599071097d151d1a2c294a |
class itkImageToImageFilterICVF22ICVF22(itkImageSourcePython.itkImageSourceICVF22): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> InputImageDimension = _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_InputImageDimension <NEW_LINE> OutputImageDimension = _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_OutputImageDimension <NEW_LINE> def SetInput(self, *args): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_SetInput(self, *args) <NEW_LINE> <DEDENT> def GetInput(self, *args): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_GetInput(self, *args) <NEW_LINE> <DEDENT> def PushBackInput(self, *args): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_PushBackInput(self, *args) <NEW_LINE> <DEDENT> def PopBackInput(self): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_PopBackInput(self) <NEW_LINE> <DEDENT> def PushFrontInput(self, *args): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_PushFrontInput(self, *args) <NEW_LINE> <DEDENT> def PopFrontInput(self): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_PopFrontInput(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkImageToImageFilterAPython.delete_itkImageToImageFilterICVF22ICVF22 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkImageToImageFilterAPython.itkImageToImageFilterICVF22ICVF22_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkImageToImageFilterICVF22ICVF22.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkImageToImageFilterICVF22ICVF22 class | 62599071b7558d5895464b9f |
class PonyLexer(RegexLexer): <NEW_LINE> <INDENT> name = 'Pony' <NEW_LINE> aliases = ['pony'] <NEW_LINE> filenames = ['*.pony'] <NEW_LINE> _caps = r'(iso|trn|ref|val|box|tag)' <NEW_LINE> tokens = { 'root': [ (r'\n', Text), (r'[^\S\n]+', Text), (r'//.*\n', Comment.Single), (r'/\*', Comment.Multiline, 'nested_comment'), (r'"""(?:.|\n)*?"""', String.Doc), (r'"', String, 'string'), (r'\'.*\'', String.Char), (r'=>|[]{}:().~;,|&!^?[]', Punctuation), (words(( 'addressof', 'and', 'as', 'consume', 'digestof', 'is', 'isnt', 'not', 'or'), suffix=r'\b'), Operator.Word), (r'!=|==|<<|>>|[-+/*%=<>]', Operator), (words(( 'box', 'break', 'compile_error', 'compile_intrinsic', 'continue', 'do', 'else', 'elseif', 'embed', 'end', 'error', 'for', 'if', 'ifdef', 'in', 'iso', 'lambda', 'let', 'match', 'object', 'recover', 'ref', 'repeat', 'return', 'tag', 'then', 'this', 'trn', 'try', 'until', 'use', 'var', 'val', 'where', 'while', 'with', '#any', '#read', '#send', '#share'), suffix=r'\b'), Keyword), (r'(actor|class|struct|primitive|interface|trait|type)((?:\s)+)', bygroups(Keyword, Text), 'typename'), (r'(new|fun|be)((?:\s)+)', bygroups(Keyword, Text), 'methodname'), (words(( 'I8', 'U8', 'I16', 'U16', 'I32', 'U32', 'I64', 'U64', 'I128', 'U128', 'ILong', 'ULong', 'ISize', 'USize', 'F32', 'F64', 'Bool', 'Pointer', 'None', 'Any', 'Array', 'String', 'Iterator'), suffix=r'\b'), Name.Builtin.Type), (r'_?[A-Z]\w*', Name.Type), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'\d+', Number.Integer), (r'(true|false)\b', Name.Builtin), (r'_\d*', Name), (r'_?[a-z][\w\'_]*', Name) ], 'typename': [ (_caps + r'?((?:\s)*)(_?[A-Z]\w*)', bygroups(Keyword, Text, Name.Class), '#pop') ], 'methodname': [ (_caps + r'?((?:\s)*)(_?[a-z]\w*)', bygroups(Keyword, Text, Name.Function), '#pop') ], 'nested_comment': [ (r'[^*/]+', Comment.Multiline), (r'/\*', Comment.Multiline, '#push'), (r'\*/', Comment.Multiline, '#pop'), (r'[*/]', Comment.Multiline) ], 'string': [ (r'"', String, '#pop'), (r'\\"', String), (r'[^\\"]+', String) ] } | For Pony source code.
.. versionadded:: 2.4 | 6259907138b623060ffaa4c0 |
class micro_net_wiring(base_wiring): <NEW_LINE> <INDENT> def getStripIndexFrom2D(self, x, y): <NEW_LINE> <INDENT> if y % 2 == 0: <NEW_LINE> <INDENT> return y * self.WCA_WIDTH + x + 4 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (y + 1) * self.WCA_WIDTH - x + 3 <NEW_LINE> <DEDENT> <DEDENT> def mapMinutesInternal(self, min): <NEW_LINE> <INDENT> if min == 1: <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> elif min == 2: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif min == 3: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> elif min == 4: <NEW_LINE> <INDENT> return 1 | This class implements the wiring layout as described in
https://www.mikrocontroller.net/articles/WordClock_mit_WS2812#Anschluss_WS2812-Streifen_f.C3.BCr_WordClock12h | 62599071435de62698e9d6de |
class TestLocationSiteCRUD(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_LocationSite_create(self): <NEW_LINE> <INDENT> model = LocationSiteF.create() <NEW_LINE> self.assertTrue(model.pk is not None) <NEW_LINE> self.assertTrue(model.location_type is not None) <NEW_LINE> self.assertTrue(model.geometry_point is not None) <NEW_LINE> <DEDENT> def test_LocationSite_read(self): <NEW_LINE> <INDENT> location_type = LocationTypeF.create( name=u'custom type', ) <NEW_LINE> model = LocationSiteF.create( location_type=location_type ) <NEW_LINE> self.assertTrue(model.location_type.name == 'custom type') <NEW_LINE> <DEDENT> def test_LocationSite_update(self): <NEW_LINE> <INDENT> location_type = LocationTypeF.create( name=u'custom type', ) <NEW_LINE> model = LocationSiteF.create() <NEW_LINE> new_data = { 'location_type': location_type, } <NEW_LINE> model.__dict__.update(new_data) <NEW_LINE> model.save() <NEW_LINE> for key, val in new_data.items(): <NEW_LINE> <INDENT> self.assertEqual(model.__dict__.get(key), val) <NEW_LINE> <DEDENT> <DEDENT> def test_LocationSite_delete(self): <NEW_LINE> <INDENT> model = LocationSiteF.create() <NEW_LINE> model.delete() <NEW_LINE> self.assertTrue(model.pk is None) <NEW_LINE> <DEDENT> def test_LocationSite_update_not_in_allowed_geometry(self): <NEW_LINE> <INDENT> location_site = LocationSiteF.create() <NEW_LINE> new_data = { 'geometry_point': None, 'geometry_line': LineString((1, 1), (2, 2)), } <NEW_LINE> location_site.__dict__.update(new_data) <NEW_LINE> self.assertRaises(ValidationError, location_site.save) | Tests location site. | 62599071283ffb24f3cf5180 |
class XMLToJSONResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_JSON(self): <NEW_LINE> <INDENT> return self._output.get('JSON', None) | A ResultSet with methods tailored to the values returned by the XMLToJSON Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution. | 62599071ad47b63b2c5a9125 |
class Uses(Lister): <NEW_LINE> <INDENT> log = logging.getLogger(__name__) <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(Uses, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'module', help='the module name', ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> session = self.app.get_db_session() <NEW_LINE> query = session.query(oslo_models.Module).join(models.Project).filter( oslo_models.Module.name == parsed_args.module ).order_by(models.Project.name) <NEW_LINE> return (('Project',), ((r.project.name,) for r in query.all())) | List the projects that use the Oslo module | 625990718e7ae83300eea968 |
class SnippetVisitor: <NEW_LINE> <INDENT> def visit(self, snippet): <NEW_LINE> <INDENT> return snippet.accept(self) <NEW_LINE> <DEDENT> def visit_composite_snippet(self, composite_snippet): <NEW_LINE> <INDENT> composite_snippet.left.accept(self) <NEW_LINE> composite_snippet.right.accept(self) <NEW_LINE> <DEDENT> def visit_simple_snippet(self, simple_snippet): <NEW_LINE> <INDENT> pass | Base class for visitors working on syntax tree. | 62599071d268445f2663a7c9 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.