code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class RabbitMQProcessorBase(Processor): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._publisher = None <NEW_LINE> <DEDENT> def configure(self, config): <NEW_LINE> <INDENT> logger.info("Configuring RabbitMQStoryProcessor") <NEW_LINE> if self._publisher is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._publisher.close() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> logger.exception("failed to disconnect from RabbitMQ") <NEW_LINE> <DEDENT> <DEDENT> self._publisher = self._create_publisher(config) <NEW_LINE> <DEDENT> def job_started(self, job): <NEW_LINE> <INDENT> logger.info("connecting to RabbitMQ") <NEW_LINE> try: <NEW_LINE> <INDENT> self._publisher.open() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> logger.exception("failed to connect to RabbitMQ") <NEW_LINE> <DEDENT> <DEDENT> def job_finished(self, job): <NEW_LINE> <INDENT> logger.info("disconnecting from RabbitMQ") <NEW_LINE> try: <NEW_LINE> <INDENT> self._publisher.close() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> logger.exception("failed to disconnect from RabbitMQ") <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def _create_publisher(self, config): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _create_message(self, item): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_item(self, source, item): <NEW_LINE> <INDENT> message = self._create_message(item) <NEW_LINE> self._publisher.publish_message(message)
Base class for processor that use RabbitMQ
6259905e3eb6a72ae038bcc4
class JourneyCreationForm2(forms.Form): <NEW_LINE> <INDENT> journey_name = forms.CharField(required=True) <NEW_LINE> travel_date = forms.DateTimeField(required=True) <NEW_LINE> cotravel_number = forms.IntegerField() <NEW_LINE> layout = Layout(Fieldset("Provide the journey information here", "journey_name", Row("travel_date", "cotravel_number")))
Foem to take other journey related information from the user
6259905e07f4c71912bb0aa1
class Article(models.Model): <NEW_LINE> <INDENT> author = models.ForeignKey(User,on_delete=models.CASCADE) <NEW_LINE> avatar = models.ImageField(upload_to='article/%Y%m%d/', blank=True) <NEW_LINE> title = models.CharField(max_length=20, blank=True) <NEW_LINE> category = models.ForeignKey(ArticleCategory, null=True, blank=True, on_delete=models.CASCADE,related_name='article') <NEW_LINE> tags = models.CharField(max_length=20, blank=True) <NEW_LINE> sumary = models.CharField(max_length=200, null=False, blank=False) <NEW_LINE> content = models.TextField() <NEW_LINE> total_views = models.PositiveIntegerField(default=0) <NEW_LINE> comment_count = models.PositiveIntegerField(default=0) <NEW_LINE> created = models.DateTimeField(default=timezone.now) <NEW_LINE> updated = models.DateTimeField(auto_now=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'tb_article' <NEW_LINE> ordering = ('-created',) <NEW_LINE> verbose_name = '文章管理' <NEW_LINE> verbose_name_plural = verbose_name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title
作者 标题图 标题 分类 标签 摘要信息 文章正文 浏览量 评论量 文章的创建时间 文章的修改时间
6259905e7cff6e4e811b70a9
class CategoryDeleteView(DeleteView): <NEW_LINE> <INDENT> model = Category <NEW_LINE> template_name = 'shop/category_delete.html' <NEW_LINE> success_url = reverse_lazy('shop:category_list')
Category Delete View
6259905e4428ac0f6e659ba2
class LinearityModel(model_base.DataModel): <NEW_LINE> <INDENT> schema_url = "linearity.schema.yaml" <NEW_LINE> def __init__(self, init=None, coeffs=None, dq=None, dq_def=None, **kwargs): <NEW_LINE> <INDENT> super(LinearityModel, self).__init__(init=init, **kwargs) <NEW_LINE> if coeffs is not None: <NEW_LINE> <INDENT> self.coeffs = coeffs <NEW_LINE> <DEDENT> if dq is not None: <NEW_LINE> <INDENT> self.dq = dq <NEW_LINE> <DEDENT> if dq_def is not None: <NEW_LINE> <INDENT> self.dq_def = dq_def <NEW_LINE> <DEDENT> self.dq = dynamic_mask(self) <NEW_LINE> self.dq = self.dq
A data model for linearity correction information. Parameters ---------- init : any Any of the initializers supported by `~jwst_lib.models.DataModel`. coeffs : numpy array Coefficients defining the nonlinearity function. dq : numpy array The data quality array. dq_def : numpy array The data quality definitions table.
6259905e8a43f66fc4bf37f3
class MyTCPSocketHandler(socketserver.BaseRequestHandler): <NEW_LINE> <INDENT> def handle(self): <NEW_LINE> <INDENT> self.data = self.request.recv(1024).strip() <NEW_LINE> print("{} wrote:".format(self.client_address[0])) <NEW_LINE> print(self.data.decode()) <NEW_LINE> self.request.sendall("Nix")
The RequestHandler class for our server. It is instantiated once per connection to the server, and must override the handle() method to implement communication to the client.
6259905e1f5feb6acb16424e
class Solution(object): <NEW_LINE> <INDENT> def minMeetingRooms(self, intervals): <NEW_LINE> <INDENT> Start = 0 <NEW_LINE> End = 1 <NEW_LINE> helper = list() <NEW_LINE> max_rooms = 0 <NEW_LINE> if not intervals or len(intervals) == 0: <NEW_LINE> <INDENT> return max_rooms <NEW_LINE> <DEDENT> for interval in intervals: <NEW_LINE> <INDENT> helper.append(Ele((interval.start, Start))) <NEW_LINE> helper.append(Ele((interval.end, End))) <NEW_LINE> <DEDENT> helper.sort() <NEW_LINE> num = 0 <NEW_LINE> for h in helper: <NEW_LINE> <INDENT> if h[1] == Start: <NEW_LINE> <INDENT> num += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> num -= 1 <NEW_LINE> <DEDENT> max_rooms = max(max_rooms, num) <NEW_LINE> <DEDENT> return max_rooms
@param intervals: an array of meeting time intervals @return: the minimum number of conference rooms required
6259905e76e4537e8c3f0bf1
class CommandPlus(QtWidgets.QWidget): <NEW_LINE> <INDENT> def __init__(self, controller, parent): <NEW_LINE> <INDENT> super(CommandPlus, self).__init__() <NEW_LINE> self.controller = controller <NEW_LINE> self.parent = parent <NEW_LINE> self.build_ui() <NEW_LINE> <DEDENT> def build_ui(self): <NEW_LINE> <INDENT> self.create_widgets() <NEW_LINE> self.create_layouts() <NEW_LINE> self.create_signals() <NEW_LINE> <DEDENT> def create_widgets(self): <NEW_LINE> <INDENT> self.button = QtWidgets.QToolButton() <NEW_LINE> self.button.setIcon(QtGui.QIcon(ICONS["plus"])) <NEW_LINE> size = COMMAND_SIZE[self.controller.settings["command_size"]] <NEW_LINE> widget_size = size["widget"] <NEW_LINE> icon_size = size["icon"] <NEW_LINE> self.button.setFixedSize(QtCore.QSize(widget_size, widget_size)) <NEW_LINE> self.button.setIconSize(QtCore.QSize(widget_size, icon_size)) <NEW_LINE> <DEDENT> def create_layouts(self): <NEW_LINE> <INDENT> self.layout_main = QtWidgets.QHBoxLayout() <NEW_LINE> self.layout_main.addStretch() <NEW_LINE> self.layout_main.addWidget(self.button) <NEW_LINE> self.layout_main.addStretch() <NEW_LINE> self.setLayout(self.layout_main) <NEW_LINE> <DEDENT> def create_signals(self): <NEW_LINE> <INDENT> self.button.clicked.connect(self.execute) <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> from smartScripter.controller import RegisterCommandController <NEW_LINE> from smartScripter import view <NEW_LINE> global REGISTER_VIEW <NEW_LINE> REGISTER_VIEW = view.RegisterCommandView(self.parent) <NEW_LINE> REGISTER_VIEW.raise_() <NEW_LINE> REGISTER_VIEW.show() <NEW_LINE> RegisterCommandController(REGISTER_VIEW, "")
Button to create a new command when clicking it. This button is always located as last item at the very right of the scripts table. Clicking it opens the RegisterCommand view.
6259905e21a7993f00c675d2
class NwsViewer(TethysAppBase): <NEW_LINE> <INDENT> name = 'Nws Viewer' <NEW_LINE> index = 'nws_viewer:home' <NEW_LINE> icon = 'nws_viewer/images/icon.gif' <NEW_LINE> package = 'nws_viewer' <NEW_LINE> root_url = 'nws-viewer' <NEW_LINE> color = '#e67e22' <NEW_LINE> def url_maps(self): <NEW_LINE> <INDENT> UrlMap = url_map_maker(self.root_url) <NEW_LINE> url_maps = (UrlMap(name='home', url='nws-viewer', controller='nws_viewer.controllers.home'), ) <NEW_LINE> return url_maps
Tethys app class for Nws Viewer.
6259905ed486a94d0ba2d62d
class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, 'entity_path': {'key': 'entityPath', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, 'name': {'key': 'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: str, id: Optional[str] = None, connection_string: Optional[str] = None, endpoint_uri: Optional[str] = None, entity_path: Optional[str] = None, authentication_type: Optional[Union[str, "AuthenticationType"]] = None, identity: Optional["ManagedIdentity"] = None, subscription_id: Optional[str] = None, resource_group: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) <NEW_LINE> self.id = id <NEW_LINE> self.connection_string = connection_string <NEW_LINE> self.endpoint_uri = endpoint_uri <NEW_LINE> self.entity_path = entity_path <NEW_LINE> self.authentication_type = authentication_type <NEW_LINE> self.identity = identity <NEW_LINE> self.name = name <NEW_LINE> self.subscription_id = subscription_id <NEW_LINE> self.resource_group = resource_group
The properties related to service bus queue endpoint types. All required parameters must be populated in order to send to Azure. :ivar id: Id of the service bus queue endpoint. :vartype id: str :ivar connection_string: The connection string of the service bus queue endpoint. :vartype connection_string: str :ivar endpoint_uri: The url of the service bus queue endpoint. It must include the protocol sb://. :vartype endpoint_uri: str :ivar entity_path: Queue name on the service bus namespace. :vartype entity_path: str :ivar authentication_type: Method used to authenticate against the service bus queue endpoint. Possible values include: "keyBased", "identityBased". :vartype authentication_type: str or ~azure.mgmt.iothub.v2021_07_02.models.AuthenticationType :ivar identity: Managed identity properties of routing service bus queue endpoint. :vartype identity: ~azure.mgmt.iothub.v2021_07_02.models.ManagedIdentity :ivar name: Required. The name that identifies this endpoint. The name can only include alphanumeric characters, periods, underscores, hyphens and has a maximum length of 64 characters. The following names are reserved: events, fileNotifications, $default. Endpoint names must be unique across endpoint types. The name need not be the same as the actual queue name. :vartype name: str :ivar subscription_id: The subscription identifier of the service bus queue endpoint. :vartype subscription_id: str :ivar resource_group: The name of the resource group of the service bus queue endpoint. :vartype resource_group: str
6259905e462c4b4f79dbd06a
class QplotlyWidgetPermission(BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> if request.user.is_superuser: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if request.method in ('POST'): <NEW_LINE> <INDENT> if 'layer' in request.POST: <NEW_LINE> <INDENT> layer = Layer.objects.get(pk=request.POST['layer']) <NEW_LINE> <DEDENT> elif 'layer_id' in view.kwargs: <NEW_LINE> <INDENT> layer = Layer.objects.get(pk=view.kwargs['layer_id']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if 'pk' in view.kwargs and 'project_id' in view.kwargs: <NEW_LINE> <INDENT> layer = QplotlyWidget.objects.get(pk=view.kwargs['pk']). layers.filter(project_id=view.kwargs['project_id'])[0] <NEW_LINE> <DEDENT> elif 'layer_id' in view.kwargs: <NEW_LINE> <INDENT> layer = Layer.objects.get(id=view.kwargs['layer_id']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return request.user.has_perm('qdjango.change_project', layer.project) <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> return False
API permission for Qplotly urls Allows access only to users have permission change_project on project
6259905e2c8b7c6e89bd4e54
class customer: <NEW_LINE> <INDENT> def __init__(self, idnum, lname, fname, phone): <NEW_LINE> <INDENT> self._cust_id = vFunc.valid_id_check(idnum) <NEW_LINE> self._last_name = vFunc.valid_name_check(lname) <NEW_LINE> self._first_name = vFunc.valid_name_check(fname) <NEW_LINE> self._phone_num = vFunc.valid_phone_check(phone) <NEW_LINE> <DEDENT> '''Functions to change individual characteristics of an customer object''' <NEW_LINE> def set_cust_id(self, idnum): <NEW_LINE> <INDENT> self._cust_id = vFunc.valid_id_check(idnum) <NEW_LINE> <DEDENT> def set_last_name(self, lname): <NEW_LINE> <INDENT> self._last_name = vFunc.valid_name_check(lname) <NEW_LINE> <DEDENT> def set_first_name(self, fname): <NEW_LINE> <INDENT> self._first_name = vFunc.valid_name_check(fname) <NEW_LINE> <DEDENT> def set_phone_num(self, phone): <NEW_LINE> <INDENT> self._phone_num = vFunc.valid_phone_check(phone) <NEW_LINE> <DEDENT> '''Function to create output string based off an employee class''' <NEW_LINE> def display(self): <NEW_LINE> <INDENT> return (str(self._cust_id) + '\n' + (self._first_name) + ' ' + str(self._last_name) + '\n' + str(self._phone_num) + '\n')
customer class constructor
6259905ef548e778e596cbee
class SecurityGroupServerRpcCallback(n_rpc.RpcCallback): <NEW_LINE> <INDENT> RPC_API_VERSION = '1.2' <NEW_LINE> @property <NEW_LINE> def plugin(self): <NEW_LINE> <INDENT> return manager.NeutronManager.get_plugin() <NEW_LINE> <DEDENT> def _get_devices_info(self, devices): <NEW_LINE> <INDENT> return dict( (port['id'], port) for port in self.plugin.get_ports_from_devices(devices) if port and not port['device_owner'].startswith('network:') ) <NEW_LINE> <DEDENT> def security_group_rules_for_devices(self, context, **kwargs): <NEW_LINE> <INDENT> devices_info = kwargs.get('devices') <NEW_LINE> ports = self._get_devices_info(devices_info) <NEW_LINE> return self.plugin.security_group_rules_for_ports(context, ports) <NEW_LINE> <DEDENT> def security_group_info_for_devices(self, context, **kwargs): <NEW_LINE> <INDENT> devices_info = kwargs.get('devices') <NEW_LINE> ports = self._get_devices_info(devices_info) <NEW_LINE> return self.plugin.security_group_info_for_ports(context, ports)
Callback for SecurityGroup agent RPC in plugin implementations.
6259905e4e4d562566373a6c
class _ExceptionLoggingContext(object): <NEW_LINE> <INDENT> def __init__(self, logger): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self, typ, value, tb): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> self.logger.error("Uncaught exception", exc_info=(typ, value, tb)) <NEW_LINE> raise _QuietException
Used with the ``with`` statement when calling delegate methods to log any exceptions with the given logger. Any exceptions caught are converted to _QuietException
6259905e21bff66bcd7242ca
class ShiftedKernel(Kernel, ShiftedFunction): <NEW_LINE> <INDENT> def _compute(self, x, y): <NEW_LINE> <INDENT> shifts1, shifts2 = expand(self.shifts) <NEW_LINE> return B.subtract(x, shifts1), B.subtract(y, shifts2) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _stationary(self): <NEW_LINE> <INDENT> if len(self.shifts) == 1: <NEW_LINE> <INDENT> return self[0].stationary <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @_dispatch <NEW_LINE> def __eq__(self, other: "ShiftedKernel"): <NEW_LINE> <INDENT> identical_shifts = identical(expand(self.shifts), expand(other.shifts)) <NEW_LINE> return self[0] == other[0] and identical_shifts
Shifted kernel.
6259905ed6c5a102081e3788
class HyperionComponentSwitch(SwitchEntity): <NEW_LINE> <INDENT> _attr_entity_category = EntityCategory.CONFIG <NEW_LINE> def __init__( self, server_id: str, instance_num: int, instance_name: str, component_name: str, hyperion_client: client.HyperionClient, ) -> None: <NEW_LINE> <INDENT> self._unique_id = _component_to_unique_id( server_id, component_name, instance_num ) <NEW_LINE> self._device_id = get_hyperion_device_id(server_id, instance_num) <NEW_LINE> self._name = _component_to_switch_name(component_name, instance_name) <NEW_LINE> self._instance_name = instance_name <NEW_LINE> self._component_name = component_name <NEW_LINE> self._client = hyperion_client <NEW_LINE> self._client_callbacks = { f"{KEY_COMPONENTS}-{KEY_UPDATE}": self._update_components } <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def entity_registry_enabled_default(self) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self) -> bool: <NEW_LINE> <INDENT> for component in self._client.components or []: <NEW_LINE> <INDENT> if component[KEY_NAME] == self._component_name: <NEW_LINE> <INDENT> return bool(component.setdefault(KEY_ENABLED, False)) <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self) -> bool: <NEW_LINE> <INDENT> return bool(self._client.has_loaded_state) <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self) -> DeviceInfo: <NEW_LINE> <INDENT> return DeviceInfo( identifiers={(DOMAIN, self._device_id)}, manufacturer=HYPERION_MANUFACTURER_NAME, model=HYPERION_MODEL_NAME, name=self._instance_name, ) <NEW_LINE> <DEDENT> async def _async_send_set_component(self, value: bool) -> None: <NEW_LINE> <INDENT> await self._client.async_send_set_component( **{ KEY_COMPONENTSTATE: { KEY_COMPONENT: self._component_name, KEY_STATE: value, } } ) <NEW_LINE> <DEDENT> async def async_turn_on(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> await self._async_send_set_component(True) <NEW_LINE> <DEDENT> async def async_turn_off(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> await self._async_send_set_component(False) <NEW_LINE> <DEDENT> @callback <NEW_LINE> def _update_components(self, _: dict[str, Any] | None = None) -> None: <NEW_LINE> <INDENT> self.async_write_ha_state() <NEW_LINE> <DEDENT> async def async_added_to_hass(self) -> None: <NEW_LINE> <INDENT> self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_ENTITY_REMOVE.format(self._unique_id), functools.partial(self.async_remove, force_remove=True), ) ) <NEW_LINE> self._client.add_callbacks(self._client_callbacks) <NEW_LINE> <DEDENT> async def async_will_remove_from_hass(self) -> None: <NEW_LINE> <INDENT> self._client.remove_callbacks(self._client_callbacks)
ComponentBinarySwitch switch class.
6259905e38b623060ffaa382
class GetProjectsInputSet(InputSet): <NEW_LINE> <INDENT> def set_AccountName(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'AccountName', value) <NEW_LINE> <DEDENT> def set_Password(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Password', value) <NEW_LINE> <DEDENT> def set_Username(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Username', value)
An InputSet with methods appropriate for specifying the inputs to the GetProjects Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
6259905e3d592f4c4edbc541
class VolumeOpsTestCase(test_base.HyperVBaseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(VolumeOpsTestCase, self).setUp() <NEW_LINE> self._volumeops = volumeops.VolumeOps() <NEW_LINE> <DEDENT> def test_get_volume_driver_exception(self): <NEW_LINE> <INDENT> fake_conn_info = {'driver_volume_type': 'fake_driver'} <NEW_LINE> self.assertRaises(exception.VolumeDriverNotFound, self._volumeops._get_volume_driver, connection_info=fake_conn_info) <NEW_LINE> <DEDENT> def test_fix_instance_volume_disk_paths(self): <NEW_LINE> <INDENT> block_device_info = db_fakes.get_fake_block_device_info( FAKE_TARGET_PORTAL, FAKE_VOLUME_ID) <NEW_LINE> fake_vol_conn_info = ( block_device_info['block_device_mapping'][0]['connection_info']) <NEW_LINE> with contextlib.nested( mock.patch.object(self._volumeops, '_get_volume_driver'), mock.patch.object(self._volumeops, 'ebs_root_in_block_devices') ) as (mock_get_volume_driver, mock_ebs_in_block_devices): <NEW_LINE> <INDENT> fake_vol_driver = mock_get_volume_driver.return_value <NEW_LINE> mock_ebs_in_block_devices.return_value = False <NEW_LINE> self._volumeops.fix_instance_volume_disk_paths( mock.sentinel.instance_name, block_device_info) <NEW_LINE> func = fake_vol_driver.fix_instance_volume_disk_path <NEW_LINE> func.assert_called_once_with( mock.sentinel.instance_name, fake_vol_conn_info, 0) <NEW_LINE> <DEDENT> <DEDENT> @mock.patch.object(volumeops.VolumeOps, '_get_volume_driver') <NEW_LINE> def test_disconnect_volumes(self, mock_get_volume_driver): <NEW_LINE> <INDENT> block_device_info = db_fakes.get_fake_block_device_info( FAKE_TARGET_PORTAL, FAKE_VOLUME_ID) <NEW_LINE> block_device_mapping = ( block_device_info['block_device_mapping']) <NEW_LINE> fake_volume_driver = mock_get_volume_driver.return_value <NEW_LINE> self._volumeops.disconnect_volumes(block_device_info) <NEW_LINE> fake_volume_driver.disconnect_volumes.assert_called_once_with( block_device_mapping)
Unit tests for VolumeOps class.
6259905edd821e528d6da4b3
class Task(dict): <NEW_LINE> <INDENT> def __init__(self, info, parent_list=None, subtasks=None, *args): <NEW_LINE> <INDENT> self.parent_list = parent_list <NEW_LINE> self.info = info <NEW_LINE> if subtasks: <NEW_LINE> <INDENT> self.subtasks = subtasks <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.subtasks = [] <NEW_LINE> <DEDENT> dict.__init__(self, args) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return dict.__getitem__(self.info, key) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> dict.__setitem__(self.info, key, value) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<wunderpy.wunderlist.Task: {} {}>".format(self.title, self.id) <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> return self.info.get("title") <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self.info.get("id") <NEW_LINE> <DEDENT> @property <NEW_LINE> def created_at(self): <NEW_LINE> <INDENT> created = self.info.get("created_at") <NEW_LINE> if created: <NEW_LINE> <INDENT> return dateutil.parser.parse(created) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def due_date(self): <NEW_LINE> <INDENT> due = self.info.get("due_date") <NEW_LINE> if due: <NEW_LINE> <INDENT> return dateutil.parser.parse(due).date() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def due_date_iso(self): <NEW_LINE> <INDENT> return self.info.get("due_date") <NEW_LINE> <DEDENT> @property <NEW_LINE> def completed(self): <NEW_LINE> <INDENT> if self.info.get("completed_at"): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def starred(self): <NEW_LINE> <INDENT> if self.info.get("starred") == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Object representing a single task in Wunderlist.
6259905e4f88993c371f1051
class Action(Enum): <NEW_LINE> <INDENT> buy = 1 <NEW_LINE> sell = -1
Direction of a trade (buy or sell)
6259905e4e4d562566373a6d
class Buses(list): <NEW_LINE> <INDENT> def __init__(self, buses): <NEW_LINE> <INDENT> self._buses = None <NEW_LINE> self.buses = buses <NEW_LINE> super().__init__(self.buses) <NEW_LINE> <DEDENT> @property <NEW_LINE> def buses(self): <NEW_LINE> <INDENT> return self._buses <NEW_LINE> <DEDENT> @buses.setter <NEW_LINE> def buses(self, buses): <NEW_LINE> <INDENT> self._buses = [Bus.from_json(b) for b in buses] <NEW_LINE> <DEDENT> def filter(self, adherence=None, block_id=None, block_abbr=None, direction=None, latitude=None, longitude=None, msg_time=None, route=None, stop_id=None, timepoint=None, trip_id=None, vehicle=None): <NEW_LINE> <INDENT> filter_kws = { 'adherence': adherence, 'block_id': block_id, 'block_abbr': block_abbr, 'direction': direction, 'latitude': latitude, 'longitude': longitude, 'msg_time': msg_time, 'route': route, 'stop_id': stop_id, 'timepoint': timepoint, 'trip_id': trip_id, 'vehicle': vehicle } <NEW_LINE> for k, v in filter_kws.items(): <NEW_LINE> <INDENT> if not v: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self = self._filter(k, v) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def _filter(self, attribute_name, value): <NEW_LINE> <INDENT> v = getattr(self[0], attribute_name) <NEW_LINE> print(v) <NEW_LINE> return [bus for bus in self if getattr(bus, attribute_name) == value]
List of active buses
6259905ea8370b77170f1a34
class WindowsCrashDumpSpace64BitMap(crash.WindowsCrashDumpSpace32): <NEW_LINE> <INDENT> order = 29 <NEW_LINE> dumpsig = 'PAGEDU64' <NEW_LINE> headertype = "_DMP_HEADER64" <NEW_LINE> headerpages = 0x13 <NEW_LINE> bitmaphdroffset = 0x2000 <NEW_LINE> def __init__(self, base, config, **kwargs): <NEW_LINE> <INDENT> self.as_assert(base, "No base Address Space") <NEW_LINE> addrspace.AbstractRunBasedMemory.__init__(self, base, config, **kwargs) <NEW_LINE> self.as_assert((base.read(0, 8) == self.dumpsig), "Header signature invalid") <NEW_LINE> self.as_assert(self.profile.has_type(self.headertype), self.headertype + " not available in profile") <NEW_LINE> self.header = obj.Object(self.headertype, 0, base) <NEW_LINE> self.as_assert((self.header.DumpType == 5), "Unsupported dump format") <NEW_LINE> self.bitmaphdr = obj.Object("_FULL_DUMP64", self.bitmaphdroffset, base) <NEW_LINE> fdmp_buff = base.read(self.bitmaphdroffset, self.bitmaphdr.HeaderSize-self.bitmaphdroffset) <NEW_LINE> bufferas = addrspace.BufferAddressSpace(self._config, data = fdmp_buff) <NEW_LINE> self.bitmaphdr2 = obj.Object('_FULL_DUMP64', vm = bufferas, offset = 0) <NEW_LINE> firstbit = None <NEW_LINE> firstoffset = 0 <NEW_LINE> lastbit = None <NEW_LINE> lastbitseen = 0 <NEW_LINE> offset = self.bitmaphdr2.HeaderSize <NEW_LINE> for i in range(0,((self.bitmaphdr2.BitmapSize + 31) / 32)): <NEW_LINE> <INDENT> if self.bitmaphdr.Buffer2[i] == 0: <NEW_LINE> <INDENT> if firstbit != None: <NEW_LINE> <INDENT> lastbit = ((i-1) * 32) + 31 <NEW_LINE> self.runs.append((firstbit *0x1000, firstoffset, (lastbit-firstbit+1) * 0x1000)) <NEW_LINE> firstbit = None <NEW_LINE> <DEDENT> <DEDENT> elif self.bitmaphdr.Buffer2[i] == 0xFFFFFFFF: <NEW_LINE> <INDENT> if firstbit == None: <NEW_LINE> <INDENT> firstoffset = offset <NEW_LINE> firstbit = i * 32 <NEW_LINE> <DEDENT> offset = offset + (32 * 0x1000) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> wordoffset = i * 32 <NEW_LINE> for j in range(0,32): <NEW_LINE> <INDENT> BitAddr = wordoffset + j <NEW_LINE> ByteOffset = BitAddr >> 3 <NEW_LINE> ByteAddress = (self.bitmaphdr2.Buffer[ByteOffset]) <NEW_LINE> ShiftCount = (BitAddr & 0x7) <NEW_LINE> if ((ByteAddress >> ShiftCount) & 1): <NEW_LINE> <INDENT> if firstbit == None: <NEW_LINE> <INDENT> firstoffset = offset <NEW_LINE> firstbit = BitAddr <NEW_LINE> <DEDENT> offset = offset + 0x1000 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if firstbit != None: <NEW_LINE> <INDENT> lastbit = BitAddr-1 <NEW_LINE> self.runs.append((firstbit *0x1000, firstoffset, (lastbit-firstbit+1) * 0x1000)) <NEW_LINE> firstbit = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> lastbitseen = (i * 32) + 31 <NEW_LINE> <DEDENT> if firstbit != None: <NEW_LINE> <INDENT> self.runs.append((firstbit *0x1000, firstoffset, (lastbitseen-firstbit+1) * 0x1000)) <NEW_LINE> <DEDENT> self.dtb = self.header.DirectoryTableBase.v()
This AS supports Windows BitMap Crash Dump format
6259905e3539df3088ecd902
class NivelComplejidadRiesgoForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = NivelComplejidadRiesgo <NEW_LINE> fields = '__all__' <NEW_LINE> widgets = { 'nivel_complejidad_riesgo': TextInput(attrs={'required': 'required', 'tabindex':'1'}), 'factor_inicial': NumberInput(attrs={'required': 'required', 'tabindex':'2'}), 'factor_final': NumberInput(attrs={'required': 'required', 'tabindex':'3'}) } <NEW_LINE> labels = { 'nivel_complejidad_riesgo': ('Nombre del nivel complejidad y riesgo'), 'factor_inicial': ('Factor inicial del nivel de complejidad y riesgo'), 'factor_final': ('Factor final del nivel de complejidad y riesgo'), 'porcentaje': ('Porcentaje del nivel de complejidad y riesgo (%)') }
Docstring documentación pendiente
6259905e2ae34c7f260ac74d
class PromptForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Prompt <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.owner = kwargs.pop('owner') if kwargs.has_key('owner') else None <NEW_LINE> super(PromptForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> prompt = super(PromptForm, self).save(commit=False) <NEW_LINE> prompt.owner = self.owner <NEW_LINE> if commit: <NEW_LINE> <INDENT> prompt.save() <NEW_LINE> <DEDENT> return prompt <NEW_LINE> <DEDENT> def clean_difficulty(self): <NEW_LINE> <INDENT> data = self.cleaned_data['difficulty'] <NEW_LINE> if not (1 <= data <= 10): <NEW_LINE> <INDENT> raise forms.ValidationError('Difficulty must be between 1 and 10.') <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def clean_name(self): <NEW_LINE> <INDENT> name=self.cleaned_data['name'] <NEW_LINE> if Prompt.objects.filter(name=name).count(): <NEW_LINE> <INDENT> raise forms.ValidationError("Prompt name not unique") <NEW_LINE> <DEDENT> return name
Provides the form for the prompt object
6259905ebaa26c4b54d50909
class chdir(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.original_dir = os.getcwd() <NEW_LINE> self.path = path <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(self.path) <NEW_LINE> <DEDENT> except (OSError, IOError) as e: <NEW_LINE> <INDENT> if e.errno not in (EEXIST, EISDIR): <NEW_LINE> <INDENT> raise Exception('Error creating %s: %s' % (self.path, e)) <NEW_LINE> <DEDENT> <DEDENT> os.chdir(self.path) <NEW_LINE> os.environ['PWD'] = os.getcwd() <NEW_LINE> <DEDENT> def __exit__(self, etype, evalue, traceback): <NEW_LINE> <INDENT> os.chdir(self.original_dir) <NEW_LINE> os.environ['PWD'] = self.original_dir
Executes the body of a "with chdir(dir)" block in the given directory. Warning: NOT THREAD SAFE
6259905e07f4c71912bb0aa3
class PPL(namedtuple("Library", "base_path config_dir config_file_type config_file_name")): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> _base_path = kwargs.get(key="base_path", default=os.path.dirname(os.path.abspath((inspect.stack()[0])[1]))) <NEW_LINE> self._base_path = _base_path if _base_path[-1] == "/" else _base_path + "/" <NEW_LINE> config_path = kwargs.get(key="config_path", default="config/") <NEW_LINE> self._config_path = config_path if config_path[0] == "/" else self.base_path + config_path <NEW_LINE> self._config_file_name = kwargs.get(key="config_file_name", default="config") <NEW_LINE> self._config_file_type = kwargs.get(key="config_file_type", default="ini") <NEW_LINE> if self._config_file_type == "yaml": <NEW_LINE> <INDENT> from Configuration import ConfigurationYaml as Class <NEW_LINE> <DEDENT> elif self._config_file_type == "toml": <NEW_LINE> <INDENT> from Configuration import ConfigurationToml as Class <NEW_LINE> <DEDENT> elif self._config_file_type == "json": <NEW_LINE> <INDENT> from Configuration import ConfigurationJson as Class <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> from Configuration import ConfigurationIni as Class
Pegasus-ICT Python Library
6259905e91f36d47f22319c2
class Wordlist(object): <NEW_LINE> <INDENT> def __init__(self, worddict): <NEW_LINE> <INDENT> self.worddict = worddict <NEW_LINE> self._sorted_words = None <NEW_LINE> <DEDENT> @lazy_property <NEW_LINE> def sorted_words(self): <NEW_LINE> <INDENT> return sorted(self.worddict.keys(), key=lambda word: (-self.worddict[word], word)) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.worddict) <NEW_LINE> <DEDENT> def words(self): <NEW_LINE> <INDENT> return list(self.sorted_words) <NEW_LINE> <DEDENT> keys = words <NEW_LINE> def iterwords(self): <NEW_LINE> <INDENT> return iter(self.sorted_words) <NEW_LINE> <DEDENT> iterkeys = iterwords <NEW_LINE> __iter__ = iterwords <NEW_LINE> def iteritems(self): <NEW_LINE> <INDENT> for word in self.sorted_words: <NEW_LINE> <INDENT> yield word, self.worddict[word] <NEW_LINE> <DEDENT> <DEDENT> def get(self, word, default=0): <NEW_LINE> <INDENT> return self.worddict.get(word, default) <NEW_LINE> <DEDENT> def __getitem__(self, word): <NEW_LINE> <INDENT> return self.get(word) <NEW_LINE> <DEDENT> def __contains__(self, word): <NEW_LINE> <INDENT> return word in self.worddict <NEW_LINE> <DEDENT> @lazy_property <NEW_LINE> def max_freq(self): <NEW_LINE> <INDENT> if len(self) == 0: <NEW_LINE> <INDENT> raise ValueError("This list is empty.") <NEW_LINE> <DEDENT> return max(self.worddict.itervalues()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, filename): <NEW_LINE> <INDENT> if filename in CACHE: <NEW_LINE> <INDENT> return CACHE[filename] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stream = pkg_resources.resource_string( __name__, 'data/wordlists/%s' % filename ).decode('utf-8').splitlines() <NEW_LINE> wordlist = cls._load_stream(stream) <NEW_LINE> CACHE[filename] = wordlist <NEW_LINE> <DEDENT> return wordlist <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _load_stream(cls, stream): <NEW_LINE> <INDENT> worddict = {} <NEW_LINE> for line in stream: <NEW_LINE> <INDENT> word, freq = line.split(u',') <NEW_LINE> worddict[word] = float(freq) <NEW_LINE> <DEDENT> return cls(worddict) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load_new(cls, path_and_filename): <NEW_LINE> <INDENT> filename = os.path.split(path_and_filename)[-1] <NEW_LINE> stream = open(path_and_filename).read().decode('utf-8').splitlines() <NEW_LINE> wordlist = cls._load_new_stream(stream) <NEW_LINE> CACHE[filename] = wordlist <NEW_LINE> out_filename = os.path.join( pkg_resources.resource_filename('metanl', 'data/wordlists'), filename ) <NEW_LINE> wordlist.save(out_filename) <NEW_LINE> return wordlist <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _load_new_stream(cls, stream): <NEW_LINE> <INDENT> worddict = defaultdict(int) <NEW_LINE> for line in stream: <NEW_LINE> <INDENT> word, freq = line.split(u',') <NEW_LINE> word = preprocess_text(word).lower() <NEW_LINE> worddict[word] += float(freq) <NEW_LINE> <DEDENT> return cls(dict(worddict)) <NEW_LINE> <DEDENT> def save(self, filename): <NEW_LINE> <INDENT> out = codecs.open(filename, 'w', encoding='utf-8') <NEW_LINE> for word in self.sorted_words: <NEW_LINE> <INDENT> print >> out, "%s,%1.1f" % (word, self.get(word)) <NEW_LINE> <DEDENT> out.close()
A list mapping words to frequencies, loaded from a .txt file on disk, and cached so that it's loaded at most once.
6259905e4428ac0f6e659ba4
class StringResponseHandler(ResponseHandler): <NEW_LINE> <INDENT> test_key_suffix = 'strings' <NEW_LINE> test_key_value = [] <NEW_LINE> def action(self, test, expected, value=None): <NEW_LINE> <INDENT> expected = test.replace_template(expected) <NEW_LINE> test.assertIn(expected, test.output)
Test for matching strings in the the response body.
6259905e7d847024c075da39
class account_statement_from_invoice_lines(osv.osv_memory): <NEW_LINE> <INDENT> _name = "account.statement.from.invoice.lines" <NEW_LINE> _description = "Entries by Statement from Invoices" <NEW_LINE> _columns = { 'line_ids': fields.many2many('account.move.line', 'account_move_line_relation', 'move_id', 'line_id', 'Invoices'), } <NEW_LINE> def populate_statement(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> if context is None: <NEW_LINE> <INDENT> context = {} <NEW_LINE> <DEDENT> statement_id = context.get('statement_id', False) <NEW_LINE> if not statement_id: <NEW_LINE> <INDENT> return {'type': 'ir.actions.act_window_close'} <NEW_LINE> <DEDENT> data = self.read(cr, uid, ids, context=context)[0] <NEW_LINE> line_ids = data['line_ids'] <NEW_LINE> if not line_ids: <NEW_LINE> <INDENT> return {'type': 'ir.actions.act_window_close'} <NEW_LINE> <DEDENT> line_obj = self.pool.get('account.move.line') <NEW_LINE> statement_obj = self.pool.get('account.bank.statement') <NEW_LINE> statement_line_obj = self.pool.get('account.bank.statement.line') <NEW_LINE> currency_obj = self.pool.get('res.currency') <NEW_LINE> line_date = time.strftime('%Y-%m-%d') <NEW_LINE> statement = statement_obj.browse(cr, uid, statement_id, context=context) <NEW_LINE> for line in line_obj.browse(cr, uid, line_ids, context=context): <NEW_LINE> <INDENT> ctx = context.copy() <NEW_LINE> ctx['date'] = line_date <NEW_LINE> amount = 0.0 <NEW_LINE> if line.debit > 0: <NEW_LINE> <INDENT> amount = line.debit <NEW_LINE> <DEDENT> elif line.credit > 0: <NEW_LINE> <INDENT> amount = -line.credit <NEW_LINE> <DEDENT> if line.amount_currency: <NEW_LINE> <INDENT> amount = currency_obj.compute(cr, uid, line.currency_id.id, statement.currency.id, line.amount_currency, context=ctx) <NEW_LINE> <DEDENT> elif (line.invoice and line.invoice.currency_id.id != statement.currency.id): <NEW_LINE> <INDENT> amount = currency_obj.compute(cr, uid, line.invoice.currency_id.id, statement.currency.id, amount, context=ctx) <NEW_LINE> <DEDENT> context.update({'move_line_ids': [line.id], 'invoice_id': line.invoice.id}) <NEW_LINE> statement_line_obj.create(cr, uid, { 'name': line.name or '?', 'amount': amount, 'partner_id': line.partner_id.id, 'statement_id': statement_id, 'ref': line.ref, 'date': statement.date, }, context=context) <NEW_LINE> <DEDENT> return {'type': 'ir.actions.act_window_close'}
Generate Entries by Statement from Invoices
6259905e498bea3a75a59131
class ProductionConfig(Config): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ProductionConfig, self).__init__() <NEW_LINE> self.ENV = 'prod' <NEW_LINE> self.DEBUG = False <NEW_LINE> self.log_level = logging.ERROR
prod config
6259905e0a50d4780f7068f2
class PayloadGenerator(object): <NEW_LINE> <INDENT> def __init__(self, coordinator): <NEW_LINE> <INDENT> self.coordinator = coordinator <NEW_LINE> self._lorem_ipsum = '' <NEW_LINE> <DEDENT> def _load_lorem(self): <NEW_LINE> <INDENT> if self._lorem_ipsum != '': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> with open('lorem.txt', 'r') as lorem: <NEW_LINE> <INDENT> lines = lorem.readlines() <NEW_LINE> <DEDENT> for line in lines: <NEW_LINE> <INDENT> self._lorem_ipsum += line.strip() <NEW_LINE> <DEDENT> <DEDENT> def get_random_text(self, bytecount): <NEW_LINE> <INDENT> out = "" <NEW_LINE> i = 0 <NEW_LINE> self._load_lorem() <NEW_LINE> while getsizeof(out) < bytecount: <NEW_LINE> <INDENT> if i >= len(self._lorem_ipsum): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> <DEDENT> out += self._lorem_ipsum[i] <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> return out
This class is responsible for generating random payloads of different types and sizes.
6259905e21bff66bcd7242cc
class SwaggerCoreTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_backward_compatible_v1_2(self): <NEW_LINE> <INDENT> self.assertEqual(pyswagger.SwaggerAuth, pyswagger.SwaggerSecurity) <NEW_LINE> self.assertEqual(pyswagger.SwaggerApp._create_, pyswagger.SwaggerApp.create) <NEW_LINE> <DEDENT> @httpretty.activate <NEW_LINE> def test_auto_schemes(self): <NEW_LINE> <INDENT> data = None <NEW_LINE> with open(os.path.join(get_test_data_folder( version='2.0', which=os.path.join('schema', 'model') ), 'swagger.json')) as f: <NEW_LINE> <INDENT> data = f.read() <NEW_LINE> <DEDENT> httpretty.register_uri( httpretty.GET, 'http://test.com/api-doc/swagger.json', body=data ) <NEW_LINE> app = SwaggerApp._create_('http://test.com/api-doc/swagger.json') <NEW_LINE> self.assertEqual(app.schemes, ['http']) <NEW_LINE> <DEDENT> @httpretty.activate <NEW_LINE> def test_load_from_url_without_file(self): <NEW_LINE> <INDENT> data = None <NEW_LINE> with open(os.path.join(get_test_data_folder( version='2.0', which='wordnik'), 'swagger.json')) as f: <NEW_LINE> <INDENT> data = f.read() <NEW_LINE> <DEDENT> httpretty.register_uri( httpretty.GET, 'http://10.0.0.10:8080/swaggerapi/api/v1beta2', body=data ) <NEW_LINE> app = SwaggerApp.create('http://10.0.0.10:8080/swaggerapi/api/v1beta2') <NEW_LINE> self.assertTrue(app.schemes, ['http']) <NEW_LINE> self.assertTrue(isinstance(app.root, BaseObj)) <NEW_LINE> <DEDENT> def test_no_host_basePath(self): <NEW_LINE> <INDENT> path = get_test_data_folder( version='2.0', which=os.path.join('patch', 'no_host_schemes') ) <NEW_LINE> fu = utils.normalize_url(path) <NEW_LINE> app = SwaggerApp.create(path) <NEW_LINE> req, _ = app.s('t1').get() <NEW_LINE> self.assertEqual(req.url, path+'/t1') <NEW_LINE> self.assertEqual(req.schemes, ['file']) <NEW_LINE> req.prepare(scheme='file', handle_files=False) <NEW_LINE> self.assertEqual(req.url, fu+'/t1') <NEW_LINE> self.assertNotEqual(six.moves.urllib.parse.urlparse(fu).scheme, '') <NEW_LINE> app = SwaggerApp.create(fu) <NEW_LINE> req, _ = app.s('t1').get() <NEW_LINE> self.assertEqual(req.url, path+'/t1') <NEW_LINE> self.assertEqual(req.schemes, ['file']) <NEW_LINE> req.prepare(scheme='file', handle_files=False) <NEW_LINE> self.assertEqual(req.url, fu+'/t1') <NEW_LINE> def _hook(url): <NEW_LINE> <INDENT> return fu <NEW_LINE> <DEDENT> url = 'test.com/api/v1' <NEW_LINE> app = SwaggerApp.load('https://'+url, url_load_hook=_hook) <NEW_LINE> app.prepare() <NEW_LINE> req, _ = app.s('t1').get() <NEW_LINE> self.assertEqual(req.url, url+'/t1') <NEW_LINE> self.assertEqual(req.schemes, ['https']) <NEW_LINE> req.prepare(scheme='https', handle_files=False) <NEW_LINE> self.assertEqual(req.url, 'https://'+url+'/t1')
test core part
6259905ed6c5a102081e378a
class Author_info(models.Model): <NEW_LINE> <INDENT> addr = models.CharField(max_length=32) <NEW_LINE> tel = models.IntegerField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.addr
作者详情信息 结构表
6259905e38b623060ffaa383
class BadResponse(dns.exception.FormError): <NEW_LINE> <INDENT> pass
Raised if a query response does not respond to the question asked.
6259905ecb5e8a47e493ccb9
class DmbjItem(scrapy.Item): <NEW_LINE> <INDENT> section_name = scrapy.Field() <NEW_LINE> section_description = scrapy.Field() <NEW_LINE> chapter_name = scrapy.Field() <NEW_LINE> chapter_num = scrapy.Field() <NEW_LINE> chapter_text = scrapy.Field() <NEW_LINE> chapter_date = scrapy.Field()
section_name:分段名称
6259905e3539df3088ecd903
class LocalsDictExecHandle(LocalsDictHandleBase): <NEW_LINE> <INDENT> __slots__ = ("closure_variables",) <NEW_LINE> def __init__(self, locals_name, owner): <NEW_LINE> <INDENT> LocalsDictHandleBase.__init__(self, locals_name=locals_name, owner=owner) <NEW_LINE> self.closure_variables = None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def isFunctionScope(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def isUnoptimizedFunctionScope(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def getLocalsRelevantVariables(self): <NEW_LINE> <INDENT> if self.closure_variables is None: <NEW_LINE> <INDENT> return self.providing.values() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [ variable for variable in self.providing.values() if variable not in self.closure_variables ] <NEW_LINE> <DEDENT> <DEDENT> def registerClosureVariable(self, variable): <NEW_LINE> <INDENT> self.registerProvidedVariable(variable) <NEW_LINE> if self.closure_variables is None: <NEW_LINE> <INDENT> self.closure_variables = set() <NEW_LINE> <DEDENT> self.closure_variables.add(variable) <NEW_LINE> <DEDENT> def unregisterClosureVariable(self, variable): <NEW_LINE> <INDENT> self.unregisterProvidedVariable(variable) <NEW_LINE> variable_name = variable.getName() <NEW_LINE> if variable_name in self.providing: <NEW_LINE> <INDENT> del self.providing[variable_name]
Locals dict of a Python2 function with an exec.
6259905ea8ecb0332587287f
class SecurityGroupNetworkInterface(Model): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'security_rule_associations': {'key': 'securityRuleAssociations', 'type': 'SecurityRuleAssociations'}, } <NEW_LINE> def __init__(self, id=None, security_rule_associations=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.security_rule_associations = security_rule_associations
Network interface and all its associated security rules. :param id: ID of the network interface. :type id: str :param security_rule_associations: :type security_rule_associations: ~azure.mgmt.network.v2017_11_01.models.SecurityRuleAssociations
6259905e16aa5153ce401b45
class Asset(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> type = models.CharField(max_length=1, choices=STAT_TYPE_CHOICES) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Asset to be tracked.
6259905efff4ab517ebcee8d
class Keyword(Token): <NEW_LINE> <INDENT> def __new__(cls, matchString, identChars=None, caseless=None): <NEW_LINE> <INDENT> if len(matchString) == 0: <NEW_LINE> <INDENT> Log.error("Expecting more than one character in keyword") <NEW_LINE> <DEDENT> if caseless: <NEW_LINE> <INDENT> return object.__new__(CaselessKeyword) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return object.__new__(cls) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, matchString, identChars=None, caseless=None): <NEW_LINE> <INDENT> Token.__init__(self) <NEW_LINE> if identChars is None: <NEW_LINE> <INDENT> self.identChars = self.engine.keyword_chars <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.identChars = "".join(sorted(set(identChars))) <NEW_LINE> <DEDENT> self.match = matchString <NEW_LINE> self.parser_name = self.match <NEW_LINE> self.parser_config.mayReturnEmpty = False <NEW_LINE> self.parser_config.mayIndexError = False <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> output = ParserElement.copy(self) <NEW_LINE> output.match = self.match <NEW_LINE> output.identChars = self.identChars <NEW_LINE> return output <NEW_LINE> <DEDENT> def parseImpl(self, string, loc, doActions=True): <NEW_LINE> <INDENT> if string.startswith(self.match, loc): <NEW_LINE> <INDENT> end = loc + len(self.match) <NEW_LINE> try: <NEW_LINE> <INDENT> if string[end] not in self.identChars: <NEW_LINE> <INDENT> return end, ParseResults(self, [self.match]) <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return end, ParseResults(self, [self.match]) <NEW_LINE> <DEDENT> <DEDENT> raise ParseException(self, loc, string)
Token to exactly match a specified string as a keyword, that is, it must be immediately followed by a non-keyword character. Compare with :class:`Literal`: - ``Literal("if")`` will match the leading ``'if'`` in ``'ifAndOnlyIf'``. - ``Keyword("if")`` will not; it will only match the leading ``'if'`` in ``'if x=1'``, or ``'if(y==2)'`` Accepts two optional constructor arguments in addition to the keyword string: - ``identChars`` is a string of characters that would be valid identifier characters, defaulting to all alphanumerics + "_" and "$" - ``caseless`` allows case-insensitive matching, default is ``False``. Example:: Keyword("start").parseString("start") # -> ['start'] Keyword("start").parseString("starting") # -> Exception For case-insensitive matching, use :class:`CaselessKeyword`.
6259905eadb09d7d5dc0bbd2
class MinimaxAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> bestAction = self.maxAgent(gameState, 0) <NEW_LINE> return bestAction <NEW_LINE> <DEDENT> def maxAgent(self, gameState, depth): <NEW_LINE> <INDENT> if gameState.isLose() or gameState.isWin(): <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> actions = gameState.getLegalActions(0) <NEW_LINE> value = float("-inf") <NEW_LINE> values = [] <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> values.append(self.minAgent(gameState.generateSuccessor(0, action), 1, depth)) <NEW_LINE> <DEDENT> if depth == 0: <NEW_LINE> <INDENT> index = max(xrange(len(values)), key=values.__getitem__) <NEW_LINE> return actions[index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return max(values) <NEW_LINE> <DEDENT> <DEDENT> def minAgent(self, gameState, agent, depth): <NEW_LINE> <INDENT> if gameState.isWin() or gameState.isLose(): <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> actions = gameState.getLegalActions(agent) <NEW_LINE> value = float("inf") <NEW_LINE> next_agent = (agent+1) % gameState.getNumAgents() <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> next_State = gameState.generateSuccessor(agent, action) <NEW_LINE> if next_agent == 0: <NEW_LINE> <INDENT> if depth == self.depth - 1: <NEW_LINE> <INDENT> next_value = self.evaluationFunction(next_State) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> next_value = self.maxAgent(next_State, depth + 1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> next_value = self.minAgent(next_State, next_agent, depth) <NEW_LINE> <DEDENT> value = min(next_value, value) <NEW_LINE> <DEDENT> return value
Your minimax agent (question 2)
6259905e2ae34c7f260ac74f
class FundRaiserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> owner = UserPreviewSerializer(read_only=True) <NEW_LINE> project = serializers.SlugRelatedField(source='project', slug_field='slug') <NEW_LINE> image = ImageSerializerExt() <NEW_LINE> amount = EuroField() <NEW_LINE> amount_donated = EuroField(source='amount_donated', read_only=True) <NEW_LINE> video_html = OEmbedField(source='video_url', maxwidth='560', maxheight='315') <NEW_LINE> meta_data = MetaField( title = 'get_meta_title', image_source = 'image', tweet = 'get_tweet', ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = FundRaiser <NEW_LINE> fields = ('id', 'owner', 'project', 'title', 'description', 'image','created', 'video_html', 'video_url', 'amount', 'amount_donated', 'deadline', 'meta_data')
Serializer to view/create fundraisers
6259905e7d43ff2487427f44
class CoercionExactRealsNumberField(Morphism): <NEW_LINE> <INDENT> def _call_(self, x): <NEW_LINE> <INDENT> return self.codomain().base_ring()(x) * self.codomain().rational(1)
Coercion morphism from a number field to the exact reals over that number field. EXAMPLES:: sage: from pyexactreal import ExactReals sage: R = ExactReals(QQ) sage: R.coerce_map_from(QQ) Generic morphism: From: Rational Field To: Real Numbers as (Rational Field)-Module
6259905e3c8af77a43b68a75
class pricesuggest: <NEW_LINE> <INDENT> def readData(self): <NEW_LINE> <INDENT> with open("algorithmstored") as stored_algorithm: <NEW_LINE> <INDENT> stored_algorithm = json.load(stored_algorithm) <NEW_LINE> print(stored_algorithm) <NEW_LINE> <DEDENT> <DEDENT> def primaryPrice(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def secondaryPrice(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def estimatePrice(self): <NEW_LINE> <INDENT> return self.primaryPrice() + self.secondaryPrice()
ok so over here im just going to suggest the variables and try to classify them Primary variable(variabels which affect each other): -square feet -amount of rooms -amount of bathrooms Secondary Variables(variables whose effect is constent): -Pool -Location -age(not sure about this one) Now how I think that the rest of the algorithm should be like is as follows: price = primaryPrice()+secondaryPrice() primaryPrice would be a function that takes into account all of the primary variables and tries to combine them into a price this will be the harder part to implement secondaryPrice will be alot easier to implement it will be something like this float secondaryPrice(): secondaryprice = 0; if(pool): secondaryprice += poolaverage] ... return secondary price
6259905e3617ad0b5ee077b5
class MyAssistant: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._task = threading.Thread(target=self._run_task) <NEW_LINE> self._can_start_conversation = False <NEW_LINE> self._assistant = None <NEW_LINE> self._board = Board() <NEW_LINE> self._board.button.when_pressed = self._on_button_pressed <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._task.start() <NEW_LINE> <DEDENT> def _run_task(self): <NEW_LINE> <INDENT> credentials = auth_helpers.get_assistant_credentials() <NEW_LINE> with Assistant(credentials) as assistant: <NEW_LINE> <INDENT> self._assistant = assistant <NEW_LINE> for event in assistant.start(): <NEW_LINE> <INDENT> self._process_event(event) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _process_event(self, event): <NEW_LINE> <INDENT> logging.info(event) <NEW_LINE> if event.type == EventType.ON_START_FINISHED: <NEW_LINE> <INDENT> self._board.led.status = Led.BEACON_DARK <NEW_LINE> self._can_start_conversation = True <NEW_LINE> logging.info('Say "OK, Google" or press the button, then speak. ' 'Press Ctrl+C to quit...') <NEW_LINE> <DEDENT> elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: <NEW_LINE> <INDENT> self._can_start_conversation = False <NEW_LINE> self._board.led.state = Led.ON <NEW_LINE> <DEDENT> elif event.type == EventType.ON_END_OF_UTTERANCE: <NEW_LINE> <INDENT> self._board.led.state = Led.PULSE_QUICK <NEW_LINE> <DEDENT> elif (event.type == EventType.ON_CONVERSATION_TURN_FINISHED or event.type == EventType.ON_CONVERSATION_TURN_TIMEOUT or event.type == EventType.ON_NO_RESPONSE): <NEW_LINE> <INDENT> self._board.led.state = Led.BEACON_DARK <NEW_LINE> self._can_start_conversation = True <NEW_LINE> <DEDENT> elif event.type == EventType.ON_ASSISTANT_ERROR and event.args and event.args['is_fatal']: <NEW_LINE> <INDENT> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> def _on_button_pressed(self): <NEW_LINE> <INDENT> if self._can_start_conversation: <NEW_LINE> <INDENT> self._assistant.start_conversation()
An assistant that runs in the background. The Google Assistant Library event loop blocks the running thread entirely. To support the button trigger, we need to run the event loop in a separate thread. Otherwise, the on_button_pressed() method will never get a chance to be invoked.
6259905e76e4537e8c3f0bf5
class ValueSet_IncludeSchema: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_schema( max_nesting_depth: Optional[int] = 6, nesting_depth: int = 0, nesting_list: List[str] = [], max_recursion_limit: Optional[int] = 2, include_extension: Optional[bool] = False, extension_fields: Optional[List[str]] = [ "valueBoolean", "valueCode", "valueDate", "valueDateTime", "valueDecimal", "valueId", "valueInteger", "valuePositiveInt", "valueString", "valueTime", "valueUnsignedInt", "valueUri", "valueQuantity", ], extension_depth: int = 0, max_extension_depth: Optional[int] = 2, ) -> Union[StructType, DataType]: <NEW_LINE> <INDENT> from spark_fhir_schemas.stu3.complex_types.extension import ExtensionSchema <NEW_LINE> from spark_fhir_schemas.stu3.complex_types.valueset_concept import ( ValueSet_ConceptSchema, ) <NEW_LINE> from spark_fhir_schemas.stu3.complex_types.valueset_filter import ( ValueSet_FilterSchema, ) <NEW_LINE> if ( max_recursion_limit and nesting_list.count("ValueSet_Include") >= max_recursion_limit ) or (max_nesting_depth and nesting_depth >= max_nesting_depth): <NEW_LINE> <INDENT> return StructType([StructField("id", StringType(), True)]) <NEW_LINE> <DEDENT> my_nesting_list: List[str] = nesting_list + ["ValueSet_Include"] <NEW_LINE> schema = StructType( [ StructField("id", StringType(), True), StructField( "extension", ArrayType( ExtensionSchema.get_schema( max_nesting_depth=max_nesting_depth, nesting_depth=nesting_depth + 1, nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth, ) ), True, ), StructField("system", StringType(), True), StructField("version", StringType(), True), StructField( "concept", ArrayType( ValueSet_ConceptSchema.get_schema( max_nesting_depth=max_nesting_depth, nesting_depth=nesting_depth + 1, nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth, ) ), True, ), StructField( "filter", ArrayType( ValueSet_FilterSchema.get_schema( max_nesting_depth=max_nesting_depth, nesting_depth=nesting_depth + 1, nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth, ) ), True, ), StructField("valueSet", ArrayType(StringType()), True), ] ) <NEW_LINE> if not include_extension: <NEW_LINE> <INDENT> schema.fields = [ c if c.name != "extension" else StructField("extension", StringType(), True) for c in schema.fields ] <NEW_LINE> <DEDENT> return schema
A value set specifies a set of codes drawn from one or more code systems.
6259905ebe8e80087fbc06ee
class ModifyMode(AuditMode): <NEW_LINE> <INDENT> def __init__(self, glob_dic, logger): <NEW_LINE> <INDENT> AuditMode.__init__(self, glob_dic, logger) <NEW_LINE> if self.dev == 'dev_lun': <NEW_LINE> <INDENT> self.only_mode = True <NEW_LINE> <DEDENT> if self.select: <NEW_LINE> <INDENT> self.flag_mode = True <NEW_LINE> self.flag_mode_arg = select_choice(['BCV', 'SRDF'], type='Flag to Modify') <NEW_LINE> <DEDENT> <DEDENT> def mode_check(self): <NEW_LINE> <INDENT> if self.flag_mode: <NEW_LINE> <INDENT> self.flag_mode_arg = self.flag_mode_arg.upper() <NEW_LINE> if not self.lun_cls_lst: <NEW_LINE> <INDENT> mprint('No Lun to Modify Find', 'err', logger=self.logger) <NEW_LINE> <DEDENT> self.lun_to_modify_lst = [] <NEW_LINE> for lun in self.lun_cls_lst: <NEW_LINE> <INDENT> if self.flag_mode_arg == 'BCV': <NEW_LINE> <INDENT> if not lun.bcv_flag: <NEW_LINE> <INDENT> lun.info = True <NEW_LINE> self.lun_to_modify_lst.append(lun.id) <NEW_LINE> <DEDENT> <DEDENT> elif self.flag_mode_arg == 'SRDF': <NEW_LINE> <INDENT> if not lun.srdf_cap: <NEW_LINE> <INDENT> lun.info = True <NEW_LINE> self.lun_to_modify_lst.append(lun.id) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> AuditMode.info_display(self) <NEW_LINE> if not self.lun_to_modify_lst: <NEW_LINE> <INDENT> mprint('All Luns have Already {0} Flag'.format(self.flag_mode_arg), 'err', logger=self.logger) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mprint('{0} : {1} Lun(s) with No {2} Flag'.format(color_str(' ', 'rev'), len(self.lun_to_modify_lst), self.flag_mode_arg), tbc=1) <NEW_LINE> if self.logger: <NEW_LINE> <INDENT> self.logger.info('[DEV:FLAG] {0} Lun(s) Find with No {1} Flag'.format(len(self.lun_to_modify_lst), self.flag_mode_arg)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def mode_exec(self): <NEW_LINE> <INDENT> if self.flag_mode: <NEW_LINE> <INDENT> if self.flag_mode_arg == 'BCV': <NEW_LINE> <INDENT> change_flag = ['convert dev {0} to BCV+TDEV;'.format(l) for l in self.lun_to_modify_lst] <NEW_LINE> syntax_type = 'convert dev <Dev> to BCV+TDEV' <NEW_LINE> <DEDENT> elif self.flag_mode_arg == 'SRDF': <NEW_LINE> <INDENT> change_flag = ['set dev {0} attribute=dyn_rdf;'.format(l) for l in self.lun_to_modify_lst] <NEW_LINE> syntax_type = 'set dev <Dev> attribute=dyn_rdf' <NEW_LINE> <DEDENT> <DEDENT> for mode in ['display', 'exec']: <NEW_LINE> <INDENT> cmd_display_header(mode, self.error_dict, logger=self.logger) <NEW_LINE> if self.flag_mode: <NEW_LINE> <INDENT> symconf_exec( change_flag, 'Change {0} Flag'.format(self.flag_mode_arg), mode, 'Lun(s) to Modify [{0} Dev] (Syntax : {1})'.format(len(change_flag), syntax_type), self.sid, self.tmp_file, verbose=self.verbose_mode, logger=self.logger, export=self.export ) <NEW_LINE> <DEDENT> cmd_display_footer(mode, self.warning_dict, self.mode, logger=self.logger, start_time=self.script_start_time, nop=self.no_prompt)
Remove Mode class
6259905ed268445f2663a691
class Sentence: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.entities = [] <NEW_LINE> self.lastToken = None <NEW_LINE> self.raw = "" <NEW_LINE> self.rawCorrected = "" <NEW_LINE> <DEDENT> def addToken(self, token, emphasised, initial=None, alternative=None): <NEW_LINE> <INDENT> emphasis = "true" if emphasised else None <NEW_LINE> attrs = {"emphasis":emphasis,"initial":initial,"alternative":alternative} <NEW_LINE> attrs = {k:attrs[k] for k in attrs if attrs[k]} <NEW_LINE> self.entities.append(("w", token, attrs)) <NEW_LINE> corrected = alternative if alternative else token <NEW_LINE> self.lastToken = corrected <NEW_LINE> self.raw += token <NEW_LINE> self.rawCorrected += corrected <NEW_LINE> <DEDENT> def addRawChar(self, c): <NEW_LINE> <INDENT> self.raw += c <NEW_LINE> self.rawCorrected += c <NEW_LINE> <DEDENT> def addStamp(self, stamp, timing): <NEW_LINE> <INDENT> self.entities.append(("time", {"id":stamp,"value":timing})) <NEW_LINE> <DEDENT> def getStamps(self): <NEW_LINE> <INDENT> return [x for x in self.entities if x[0]=="time"] <NEW_LINE> <DEDENT> def getTokens(self): <NEW_LINE> <INDENT> return [x for x in self.entities if x[0]=="w"] <NEW_LINE> <DEDENT> def getNbStamps(self): <NEW_LINE> <INDENT> return len(self.getStamps()) <NEW_LINE> <DEDENT> def getNbTokens(self): <NEW_LINE> <INDENT> return len(self.getTokens()) <NEW_LINE> <DEDENT> def isEmphasised(self): <NEW_LINE> <INDENT> foundToken = False <NEW_LINE> for x in self.entities: <NEW_LINE> <INDENT> if x[0]=="w" and "emphasis" not in x[2]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> foundToken=True <NEW_LINE> <DEDENT> return foundToken <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return bool(self.entities) <NEW_LINE> <DEDENT> def getEntities(self): <NEW_LINE> <INDENT> return self.entities
Representation of a tokenised sentence (with time stamps).
6259905e21bff66bcd7242ce
class ConfigFile(object): <NEW_LINE> <INDENT> def __init__(self, config_file, ids=None): <NEW_LINE> <INDENT> self.ids = ids or [] <NEW_LINE> self.config = {} <NEW_LINE> if isinstance(config_file, basestring): <NEW_LINE> <INDENT> with open(config_file, 'r') as f: <NEW_LINE> <INDENT> self.load_from_file(f) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.load_from_file(config_file) <NEW_LINE> <DEDENT> <DEDENT> def load_from_file(self, f): <NEW_LINE> <INDENT> for line in f.readlines(): <NEW_LINE> <INDENT> if not line.startswith("#"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> key, value = line.strip().split('=', 1) <NEW_LINE> k, v = key.strip(), value.strip() <NEW_LINE> try: <NEW_LINE> <INDENT> self.config[k] = ast.literal_eval(v) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> if ((v.startswith('"') and v.endswith('"')) or (v.startswith("'") and v.endswith("'"))): <NEW_LINE> <INDENT> self.config[k] = v[1:-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.config[k] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get(self, key, default=None, ids=[]): <NEW_LINE> <INDENT> ids = ids or [] <NEW_LINE> for id in ids + self.ids: <NEW_LINE> <INDENT> v = "%%%s.%s" % (id, key) <NEW_LINE> if v in self.config: <NEW_LINE> <INDENT> return self.config[v] <NEW_LINE> <DEDENT> <DEDENT> return self.config.get(key, default) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "ConfigFile(%s) : ids - %s\n%s" % (self.filename, self.ids, self.config)
A configuration file
6259905ed6c5a102081e378c
class TestCategories(base.BaseTestCase): <NEW_LINE> <INDENT> PATH = '/api/listings/categories' <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> responses.start() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> responses.stop() <NEW_LINE> responses.reset() <NEW_LINE> <DEDENT> def test_listing_types(self): <NEW_LINE> <INDENT> resource = 'listing_types' <NEW_LINE> responses.add( responses.GET, utils.get_url(self.URL, resource), body=utils.load_fixture('category_listing_types_en.json') ) <NEW_LINE> client = ListingResource(self.BASE_URL) <NEW_LINE> category = client.category(resource) <NEW_LINE> expected = [ ('ll', 'Land Listing'), ('rl', 'Residential Lease'), ('rs', 'Residential Sale'), ] <NEW_LINE> assert category == expected <NEW_LINE> <DEDENT> def test_view_types(self): <NEW_LINE> <INDENT> resource = 'view_types' <NEW_LINE> responses.add( responses.GET, utils.get_url(self.URL, resource), body=utils.load_fixture('category_view_types_en.json') ) <NEW_LINE> client = ListingResource(self.BASE_URL) <NEW_LINE> category = client.category(resource) <NEW_LINE> expected = [ ('beach_view', 'Beach View'), ] <NEW_LINE> assert category == expected
Listing categories test case.
6259905ea219f33f346c7e6f
@final <NEW_LINE> class MultipleIfsInComprehensionViolation(ASTViolation): <NEW_LINE> <INDENT> error_template = 'Found list comprehension with multiple `if`s' <NEW_LINE> code = 307
Forbid multiple ``if`` statements inside list comprehensions. Reasoning: It is very hard to read multiple ``if`` statements inside a list comprehension. Since it is even hard to tell all of them should pass or fail. Solution: Use a single ``if`` statement inside list comprehensions. Use ``filter()`` if you have complicated logic. Example:: # Correct: nodes = [node for node in html if node not in {'b', 'i'}] # Wrong: nodes = [node for node in html if node != 'b' if node != 'i'] .. versionadded:: 0.1.0
6259905e8e71fb1e983bd134
class Meta(object): <NEW_LINE> <INDENT> model = OrganizationElection <NEW_LINE> fields = ['email_wrapper']
Meta options for form
6259905e435de62698e9d46f
class RecRel: <NEW_LINE> <INDENT> def __init__(self, a, b, k, p, fmt): <NEW_LINE> <INDENT> self.a = a <NEW_LINE> self.b = b <NEW_LINE> self.k = k <NEW_LINE> self.p = p <NEW_LINE> self.format = fmt <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.format == 1: <NEW_LINE> <INDENT> return "T(n) = {} T(n/{}) + (n^{}(logn)^{})".format(self.a, self.b, self.k, self.p) <NEW_LINE> <DEDENT> elif self.format == 2: <NEW_LINE> <INDENT> if self.b==0: <NEW_LINE> <INDENT> return "T(n) = T(n-{}) + (n^{}(logn)^{})".format(self.a, self.k, self.p) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "T(n) = T(n-{}) + T(n-{}) + (n^{}(logn)^{})".format(self.a, self.b, self.k, self.p)
a structure storing the final results of recurrence relation
6259905ea17c0f6771d5d6d8
class EDL21Entity(Entity): <NEW_LINE> <INDENT> def __init__(self, obis, name, telegram): <NEW_LINE> <INDENT> self._obis = obis <NEW_LINE> self._name = name <NEW_LINE> self._telegram = telegram <NEW_LINE> self._min_time = MIN_TIME_BETWEEN_UPDATES <NEW_LINE> self._last_update = utcnow() <NEW_LINE> self._state_attrs = { "status": "status", "valTime": "val_time", "scaler": "scaler", "valueSignature": "value_signature", } <NEW_LINE> self._async_remove_dispatcher = None <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> @callback <NEW_LINE> def handle_telegram(telegram): <NEW_LINE> <INDENT> if self._obis != telegram.get("objName"): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._telegram == telegram: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> now = utcnow() <NEW_LINE> if now - self._last_update < self._min_time: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._telegram = telegram <NEW_LINE> self._last_update = now <NEW_LINE> self.async_write_ha_state() <NEW_LINE> <DEDENT> self._async_remove_dispatcher = async_dispatcher_connect( self.hass, SIGNAL_EDL21_TELEGRAM, handle_telegram ) <NEW_LINE> <DEDENT> async def async_will_remove_from_hass(self): <NEW_LINE> <INDENT> if self._async_remove_dispatcher: <NEW_LINE> <INDENT> self._async_remove_dispatcher() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def should_poll(self) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> return self._obis <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self) -> Optional[str]: <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self) -> str: <NEW_LINE> <INDENT> return self._telegram.get("value") <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> return { self._state_attrs[k]: v for k, v in self._telegram.items() if k in self._state_attrs } <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._telegram.get("unit") <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return ICON_POWER
Entity reading values from EDL21 telegram.
6259905e1f037a2d8b9e53a0
class WorkshopListForm(forms.Form): <NEW_LINE> <INDENT> state = forms.ModelMultipleChoiceField( label="State", required=False, queryset='') <NEW_LINE> level = forms.MultipleChoiceField( label="Level", required=False, choices=WorkshopLevel.CHOICES) <NEW_LINE> section = forms.ModelMultipleChoiceField( label="Section", required=False, queryset='') <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> user = kwargs.pop('user') <NEW_LINE> super(WorkshopListForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['state'].queryset = self.get_all_states(user) <NEW_LINE> self.fields['section'].queryset = WorkshopSections.objects.all() <NEW_LINE> <DEDENT> def get_all_locations(self, user): <NEW_LINE> <INDENT> if Profile.is_admin(user): <NEW_LINE> <INDENT> return Location.objects.all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return user.profile.interested_locations.all() <NEW_LINE> <DEDENT> <DEDENT> def get_all_states(self, user): <NEW_LINE> <INDENT> if Profile.is_admin(user): <NEW_LINE> <INDENT> return State.objects.all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return user.profile.interested_states.all()
Form to filter workshop list
6259905e45492302aabfdb43
class Lzma(Codec): <NEW_LINE> <INDENT> def compress(self, stream): <NEW_LINE> <INDENT> if lzma is None: <NEW_LINE> <INDENT> return Codec.compress(self, stream) <NEW_LINE> <DEDENT> return BytesIO(lzma.compress(stream.read())) <NEW_LINE> <DEDENT> def decompress(self, stream): <NEW_LINE> <INDENT> if lzma is None: <NEW_LINE> <INDENT> return Codec.decompress(self, stream) <NEW_LINE> <DEDENT> return BytesIO(lzma.decompress(stream.read()))
Implementation of :class:`.Codec` for lzma compression.
6259905e8e71fb1e983bd135
class TLSPlugin(Plugin): <NEW_LINE> <INDENT> type = "tls" <NEW_LINE> def session(self, server_application): <NEW_LINE> <INDENT> raise NotImplementedError
This is the base class from which all supported tls session providers will inherit from.
6259905ed7e4931a7ef3d6b9
class MacroAction(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.initialized = False <NEW_LINE> self.num_actions = 0 <NEW_LINE> self.actions = [] <NEW_LINE> self.parameters = [] <NEW_LINE> self.flat_parameters = [] <NEW_LINE> self.count = 0 <NEW_LINE> self.type = 'dbmp' <NEW_LINE> self.evaluation = {} <NEW_LINE> self.domain = '' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{}, {}".format(self.actions, self.parameters) <NEW_LINE> <DEDENT> def from_db(self, db_macro): <NEW_LINE> <INDENT> self.initialized = True <NEW_LINE> self.actions = db_macro['actions'] <NEW_LINE> self.num_actions = len(self.actions) <NEW_LINE> self.parameters = db_macro['parameters'] <NEW_LINE> self.flat_parameters = list(itertools.chain.from_iterable(self.parameters)) <NEW_LINE> self.parameter_reduction = db_macro['parameter_reduction'] <NEW_LINE> self.evaluation = db_macro['evaluation'] <NEW_LINE> self.count = db_macro['count'] <NEW_LINE> self.domain = db_macro['domain'] <NEW_LINE> <DEDENT> def generate(self, domain_file_path, actions, parameters): <NEW_LINE> <INDENT> self.actions = actions <NEW_LINE> self.num_actions = len(actions) <NEW_LINE> self.parameters = parameters <NEW_LINE> self.flat_parameters = list(itertools.chain.from_iterable(parameters)) <NEW_LINE> self.parameter_reduction = len(self.flat_parameters) - max(self.flat_parameters) <NEW_LINE> self.generate_with_run(domain_file_path, actions, parameters) <NEW_LINE> <DEDENT> def generate_with_pyswip(self, domain_file_path, actions, parameters): <NEW_LINE> <INDENT> macro_file = tempfile.NamedTemporaryFile() <NEW_LINE> prolog = pyswip.Prolog() <NEW_LINE> prolog.consult('../generation/macro_generator.pl') <NEW_LINE> query_string = 'generate_macro_to_file({}, {}, {}, {})'.format( domain_file_path, actions, parameters, macro_file.name) <NEW_LINE> prolog_query = prolog.query(query_string) <NEW_LINE> query_result = next(prolog_query) <NEW_LINE> self.initialized = True <NEW_LINE> <DEDENT> def generate_with_run(self, domain_file_path, actions, parameters): <NEW_LINE> <INDENT> self.actions = actions <NEW_LINE> macro_file = tempfile.NamedTemporaryFile(mode='r') <NEW_LINE> query = 'generate_macro_to_file("{}", {}, {}, "{}").'.format( domain_file_path, actions, parameters, macro_file.name) <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.call([ "swipl", "-q", "-l", "../generation/macro_generator.pl", "-t", query ], timeout=10) <NEW_LINE> self.macro = macro_file.read() <NEW_LINE> self.initialized = True <NEW_LINE> <DEDENT> except subprocess.TimeoutExpired: <NEW_LINE> <INDENT> pass
A macro with all its properties.
6259905e97e22403b383c577
class LoopDevice(ndb.Model): <NEW_LINE> <INDENT> ctime = ndb.DateTimeProperty(auto_now_add=True) <NEW_LINE> api_secret = ndb.TextProperty() <NEW_LINE> raw_data = ndb.JsonProperty()
sha1(API secret) in hex is the id
6259905e0c0af96317c57894
class AppEngineAPI(api.API): <NEW_LINE> <INDENT> def __init__(self, base_url="api.eveonline.com", cache=None, api_key=None): <NEW_LINE> <INDENT> cache = cache or AppEngineCache() <NEW_LINE> super(AppEngineAPI, self).__init__(base_url=base_url, cache=cache, api_key=api_key) <NEW_LINE> <DEDENT> @ndb.tasklet <NEW_LINE> def get_async(self, path, params=None): <NEW_LINE> <INDENT> params = params or {} <NEW_LINE> params = dict((k, api._clean(v)) for k,v in params.iteritems()) <NEW_LINE> if self.api_key: <NEW_LINE> <INDENT> params['keyID'] = self.api_key[0] <NEW_LINE> params['vCode'] = self.api_key[1] <NEW_LINE> <DEDENT> key = self._cache_key(path, params) <NEW_LINE> response = yield self.cache.get_async(key) <NEW_LINE> cached = response is not None <NEW_LINE> if not cached: <NEW_LINE> <INDENT> params = urlencode(params) <NEW_LINE> full_path = "https://%s/%s.xml.aspx" % (self.base_url, path) <NEW_LINE> response = yield self.send_request_async(full_path, params) <NEW_LINE> <DEDENT> tree = ElementTree.fromstring(response) <NEW_LINE> current_time = api.get_ts_value(tree, 'currentTime') <NEW_LINE> expires_time = api.get_ts_value(tree, 'cachedUntil') <NEW_LINE> self._set_last_timestamps(current_time, expires_time) <NEW_LINE> if not cached: <NEW_LINE> <INDENT> yield self.cache.put_async(key, response, expires_time - current_time) <NEW_LINE> <DEDENT> error = tree.find('error') <NEW_LINE> if error is not None: <NEW_LINE> <INDENT> code = error.attrib['code'] <NEW_LINE> message = error.text.strip() <NEW_LINE> exc = api.APIError(code, message, current_time, expires_time) <NEW_LINE> raise exc <NEW_LINE> <DEDENT> result = tree.find('result') <NEW_LINE> raise ndb.Return(api.APIResult(result, current_time, expires_time)) <NEW_LINE> <DEDENT> def send_request(self, url, params): <NEW_LINE> <INDENT> return self.send_request_async(url, params).get_result() <NEW_LINE> <DEDENT> @ndb.tasklet <NEW_LINE> def send_request_async(self, url, params): <NEW_LINE> <INDENT> ctx = ndb.get_context() <NEW_LINE> result = yield ctx.urlfetch( url=url, payload=params, method=urlfetch.POST if params else urlfetch.GET, headers={'Content-Type': 'application/x-www-form-urlencoded'} if params else {} ) <NEW_LINE> raise ndb.Return(result.content)
Subclass of api.API that is compatible with Google Appengine.
6259905e435de62698e9d470
class ConfigurationSaleMethod(ModelSQL, ValueMixin): <NEW_LINE> <INDENT> __name__ = 'sale.configuration.sale_method' <NEW_LINE> sale_invoice_method = sale_invoice_method <NEW_LINE> get_sale_invoice_methods = get_sale_methods('invoice_method') <NEW_LINE> sale_shipment_method = sale_shipment_method <NEW_LINE> get_sale_shipment_methods = get_sale_methods('shipment_method') <NEW_LINE> @classmethod <NEW_LINE> def __register__(cls, module_name): <NEW_LINE> <INDENT> TableHandler = backend.get('TableHandler') <NEW_LINE> exist = TableHandler.table_exist(cls._table) <NEW_LINE> super(ConfigurationSaleMethod, cls).__register__(module_name) <NEW_LINE> if not exist: <NEW_LINE> <INDENT> cls._migrate_property([], [], []) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _migrate_property(cls, field_names, value_names, fields): <NEW_LINE> <INDENT> field_names.extend(['sale_invoice_method', 'sale_shipment_method']) <NEW_LINE> value_names.extend(['sale_invoice_method', 'sale_shipment_method']) <NEW_LINE> migrate_property( 'sale.configuration', field_names, cls, value_names, fields=fields) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def default_sale_invoice_method(cls): <NEW_LINE> <INDENT> return 'order' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def default_sale_shipment_method(cls): <NEW_LINE> <INDENT> return 'order'
Sale Configuration Sale Method
6259905e45492302aabfdb44
class LogGaussian: <NEW_LINE> <INDENT> def __call__(self, x: torch.Tensor, mu: torch.Tensor, var: torch.Tensor): <NEW_LINE> <INDENT> assert (var >= 0).all(), "variance < 0 !" <NEW_LINE> logli = -0.5 * (var.mul(2*np.pi) + 1e-6).log() - (x-mu).pow(2).div(var.mul(2.0) + 1e-6) <NEW_LINE> if (logli >= 0).any(): <NEW_LINE> <INDENT> logli = torch.clamp(logli, max=0.0-1e-6) <NEW_LINE> <DEDENT> assert (logli < 0).all(), "log of probability must < 0" <NEW_LINE> return logli.sum(1).mean().mul(-1)
Calculate the negative log likelihood of normal distribution. Treat Q(c|x) as a factored Gaussian. Custom loss for Q network.
6259905e7047854f46340a27
class DatabaseClient(object): <NEW_LINE> <INDENT> executable_name = None <NEW_LINE> def __init__(self, connection): <NEW_LINE> <INDENT> self.connection = connection <NEW_LINE> <DEDENT> def runshell(self): <NEW_LINE> <INDENT> raise NotImplementedError('subclasses of BaseDatabaseClient must provide a runshell() method')
This class encapsulates all backend-specific methods for opening a client shell.
6259905e498bea3a75a59133
class singleton_decorator(object): <NEW_LINE> <INDENT> def __init__(self, class_): <NEW_LINE> <INDENT> self.class_ = class_ <NEW_LINE> self.instance = None <NEW_LINE> <DEDENT> def __call__(self, *a, **ad): <NEW_LINE> <INDENT> if self.instance == None: <NEW_LINE> <INDENT> self.instance = self.class_(*a, **ad) <NEW_LINE> <DEDENT> return self.instance
Singleton pattern decorator. There will be only one instance of the decorated class. Decorator always returns same instance.
6259905e76e4537e8c3f0bf7
class ReleaseRelationshipType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ReleaseRelationshipType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('http://ddex.net/xml/20120214/ddex.xsd', 3646, 3) <NEW_LINE> _Documentation = 'A ddex:Type of relationship between two ddex:Releases.'
A ddex:Type of relationship between two ddex:Releases.
6259905e0a50d4780f7068f4
class TaskAddOptions(Model): <NEW_LINE> <INDENT> _attribute_map = { 'timeout': {'key': '', 'type': 'int'}, 'client_request_id': {'key': '', 'type': 'str'}, 'return_client_request_id': {'key': '', 'type': 'bool'}, 'ocp_date': {'key': '', 'type': 'rfc-1123'}, } <NEW_LINE> def __init__(self, *, timeout: int=30, client_request_id: str=None, return_client_request_id: bool=False, ocp_date=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(TaskAddOptions, self).__init__(**kwargs) <NEW_LINE> self.timeout = timeout <NEW_LINE> self.client_request_id = client_request_id <NEW_LINE> self.return_client_request_id = return_client_request_id <NEW_LINE> self.ocp_date = ocp_date
Additional parameters for add operation. :param timeout: The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. Default value: 30 . :type timeout: int :param client_request_id: The caller-generated request identity, in the form of a GUID with no decoration such as curly braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0. :type client_request_id: str :param return_client_request_id: Whether the server should return the client-request-id in the response. Default value: False . :type return_client_request_id: bool :param ocp_date: The time the request was issued. Client libraries typically set this to the current system clock time; set it explicitly if you are calling the REST API directly. :type ocp_date: datetime
6259905e4f6381625f199fd8
class GlobalGrammarProcessor: <NEW_LINE> <INDENT> def __init__(self, *, properties: Dict[str, Any], project: project.Project) -> None: <NEW_LINE> <INDENT> self._project = project <NEW_LINE> self._build_package_grammar = properties.get('build-packages', []) <NEW_LINE> self.__build_packages = set() <NEW_LINE> <DEDENT> def get_build_packages(self) -> Set[str]: <NEW_LINE> <INDENT> if not self.__build_packages: <NEW_LINE> <INDENT> processor = grammar.GrammarProcessor( self._build_package_grammar, self._project, repo.Repo.build_package_is_valid, transformer=package_transformer) <NEW_LINE> self.__build_packages = processor.process() <NEW_LINE> <DEDENT> return self.__build_packages
Process global properties that support grammar. Build packages example: >>> import snapcraft >>> from snapcraft import repo >>> processor = GlobalGrammarProcessor( ... properties={'build-packages': [{'try': ['hello']}]}, ... project=snapcraft.project.Project()) >>> processor.get_build_packages() {'hello'}
6259905e462c4b4f79dbd070
class Material(Element): <NEW_LINE> <INDENT> def __init__( self, ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._attribute_names = []
This element sets the attributes of the dummy material element of the defaults class. All material attributes are available here except: name, class.
6259905ebe8e80087fbc06f0
class FrameSource(BaseSource): <NEW_LINE> <INDENT> def __init__(self, resolution=None, resolution_units=None, *args, **kwargs): <NEW_LINE> <INDENT> if resolution is None: <NEW_LINE> <INDENT> resolution = 1 <NEW_LINE> <DEDENT> if resolution_units is None: <NEW_LINE> <INDENT> resolution_units = 'pix' <NEW_LINE> <DEDENT> self._resolution = np.array(resolution) <NEW_LINE> self._resolution_units = resolution_units <NEW_LINE> super(FrameSource, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def resolution(self): <NEW_LINE> <INDENT> return self._resolution <NEW_LINE> <DEDENT> @property <NEW_LINE> def resolution_units(self): <NEW_LINE> <INDENT> return self._resolution_units <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_frame(self, frame_num): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_frame_metadata(self, frame_num, key): <NEW_LINE> <INDENT> raise KeyError() <NEW_LINE> <DEDENT> def get_metadata(self, key): <NEW_LINE> <INDENT> raise KeyError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __len__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __getitem__(self, arg): <NEW_LINE> <INDENT> return self.get_frame(arg) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> def kwarg_dict(self): <NEW_LINE> <INDENT> dd = super(FrameSource, self).kwarg_dict <NEW_LINE> dd.update({'resolution': self.resolution, 'resolution_units': self.resolution_units}) <NEW_LINE> return dd
An ABC for the interface to read in images Images are N-D arrays of any type. All handlers are assumed to wrap a sequence of images, but may be length 1. The first pass will only have a single access function 'get_frame` which will return what ever the natural 'frame' is. More specific sub-classes should provide a way to more sensibly slice volumes (ex iterate over sinograms or projections). Still up in the air on if we want to do this with a few class with lots of functions or many classes with a few functions. Leaning toward lots of simple classes
6259905e097d151d1a2c26d9
class Server(models.Model): <NEW_LINE> <INDENT> asset = models.OneToOneField('Asset') <NEW_LINE> sub_assset_type_choices = ( (0, '云主机'), (1, 'PC服务器'), (2, '刀片机'), (3, '小型机'), ) <NEW_LINE> created_by_choices = ( ('auto', 'Auto'), ('manual', 'Manual'), ) <NEW_LINE> sub_asset_type = models.SmallIntegerField(choices=sub_assset_type_choices, verbose_name="服务器类型", default=0) <NEW_LINE> created_by = models.CharField(choices=created_by_choices, max_length=32, default='auto') <NEW_LINE> model = models.CharField(verbose_name=u'型号', max_length=128, null=True, blank=True) <NEW_LINE> raid_type = models.CharField(u'raid类型', max_length=512, blank=True, null=True) <NEW_LINE> os_type = models.CharField(u'操作系统类型', max_length=64, blank=True, null=True) <NEW_LINE> os_distribution = models.CharField(u'发行版本', max_length=64, blank=True, null=True) <NEW_LINE> os_release = models.CharField(u'操作系统版本', max_length=64, blank=True, null=True) <NEW_LINE> hostname = models.CharField(max_length=64) <NEW_LINE> ip_addr = models.GenericIPAddressField(unique=True,null=True, blank=True) <NEW_LINE> port = models.IntegerField(default=22) <NEW_LINE> system_type_choice = ( ('linux', "Linux"), ("windows", "Windows") ) <NEW_LINE> system_type = models.CharField(choices=system_type_choice, max_length=32, default='linux') <NEW_LINE> enabled = models.BooleanField(default=True) <NEW_LINE> memo = models.TextField(blank=True, null=True) <NEW_LINE> date = models.DateTimeField(auto_now_add=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "%s(%s)" % (self.hostname,self.ip_addr) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = '服务器' <NEW_LINE> verbose_name_plural = "服务器"
服务器设备
6259905ecc0a2c111447c604
class PartialColumn2DKspaceGenerator(Column2DKspaceGenerator): <NEW_LINE> <INDENT> def __getitem__(self, it: int): <NEW_LINE> <INDENT> if it >= self._len: <NEW_LINE> <INDENT> raise IndexError <NEW_LINE> <DEDENT> idx = min(it, len(self.cols) - 1) <NEW_LINE> return self.kspace_mask(idx) <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.iter >= self._len: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> idx = min(self.iter, len(self.cols) - 1) <NEW_LINE> self.iter += 1 <NEW_LINE> return self.kspace_mask(idx) <NEW_LINE> <DEDENT> def kspace_mask(self, idx: int): <NEW_LINE> <INDENT> mask = np.zeros(self.shape[-2:]) <NEW_LINE> mask[:, self.cols[idx]] = 1 <NEW_LINE> kspace = np.squeeze(self._full_kspace * mask[np.newaxis, ...]) <NEW_LINE> return kspace, mask
k-space Generator yielding only the newly acquired line, to be used we classical FFT operator
6259905edd821e528d6da4b6
class ExternalActivity(Activity): <NEW_LINE> <INDENT> def __init__(self, timeout=None, heartbeat=None): <NEW_LINE> <INDENT> self.runner = runner.External(timeout=timeout, heartbeat=heartbeat) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> return False
External activity One of the main advantages of SWF is the ability to write a workflow that has activities written in any languages. The external activity class allows to write the workflow in Garcon and benefit from some features (timeout calculation among other things, sending context data.)
6259905ecb5e8a47e493ccbb
class Binary(MathOp): <NEW_LINE> <INDENT> def __init__(self, tokens): <NEW_LINE> <INDENT> tokens = tokens[0] <NEW_LINE> if len(tokens)%2 == 1: <NEW_LINE> <INDENT> self.args = [("nop", tokens.pop(0))] <NEW_LINE> <DEDENT> else: self.args = [] <NEW_LINE> while tokens: <NEW_LINE> <INDENT> self.args.append( (tokens.pop(0), tokens.pop(0)) ) <NEW_LINE> <DEDENT> <DEDENT> def dump(self): <NEW_LINE> <INDENT> outStr = "" <NEW_LINE> for op, elem in self.args: <NEW_LINE> <INDENT> if op != "nop": <NEW_LINE> <INDENT> outStr += f"{op} " <NEW_LINE> <DEDENT> if isinstance(elem, ParseResults): <NEW_LINE> <INDENT> outStr += f'{elem["var"]} ' <NEW_LINE> <DEDENT> elif hasattr(elem, "dump"): <NEW_LINE> <INDENT> outStr += elem.dump() <NEW_LINE> <DEDENT> elif isinstance(elem, (tuple, list)): <NEW_LINE> <INDENT> for subElem in elem: <NEW_LINE> <INDENT> if hasattr(subElem, "dump"): <NEW_LINE> <INDENT> outStr += subElem.dump() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outStr += f"{subElem} " <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> outStr += f"{elem} " <NEW_LINE> <DEDENT> <DEDENT> return outStr
Mathematical binary and unary operations (including nop). Parses tokens to be a list of elementary operations.
6259905e009cb60464d02ba2
class Client(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, thread_id, thread_name): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> threading.thread_id = thread_id <NEW_LINE> threading.name = thread_name <NEW_LINE> self.port = int(Configure().read_config('client.conf', 'server', 'port')) <NEW_LINE> self.host = Configure().read_config('client.conf', 'server', 'host') <NEW_LINE> self.sleep_time = int(Configure().read_config('client.conf', 'client', 'sleep')) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> sr = SystemResource <NEW_LINE> init_data = { 'cpu': sr.get_cpu_info(self), 'mem': sr.get_men_info(self), 'net': sr.get_net_info(self), 'disk': sr.get_disk_info(self), 'user': sr.get_user_info(self), 'port': sr.get_port_info(self) } <NEW_LINE> while True: <NEW_LINE> <INDENT> tcpclinet = socket(AF_INET, SOCK_STREAM) <NEW_LINE> tcpclinet.connect((self.host, self.port)) <NEW_LINE> sr = SystemResource() <NEW_LINE> data = sr.return_all_info(init_data) <NEW_LINE> print(data) <NEW_LINE> init_data = data <NEW_LINE> data = simplejson.dumps(data) <NEW_LINE> tcpclinet.send(data.encode()) <NEW_LINE> tcpclinet.close() <NEW_LINE> sleep(self.sleep_time)
客户端,主要将信息发送给服务器
6259905e1b99ca400229006c
@pytest.mark.draft <NEW_LINE> @pytest.mark.components <NEW_LINE> @pytest.allure.story('Distributions') <NEW_LINE> @pytest.allure.feature('POST') <NEW_LINE> class Test_PFE_Components(object): <NEW_LINE> <INDENT> @pytest.allure.link('https://jira.qumu.com/browse/TC-44621') <NEW_LINE> @pytest.mark.Distributions <NEW_LINE> @pytest.mark.POST <NEW_LINE> def test_TC_44621_POST_Distributions_Mimetype_Empty_Value(self, context): <NEW_LINE> <INDENT> with pytest.allure.step("""Verify that user is unable to create Distribution if empty value for 'Mimetype' parameter is provided using request POST /distributions ."""): <NEW_LINE> <INDENT> distributionDetails = context.sc.DistributionDetails( activationDate='2017-09-06T07:36:46.542Z', distributionPolicy='REQUIRED', expirationDate=None, files=[{ 'id': 'FileID123', 'sourceUrl': 'qedorigin://Auto_storage/MP4File.mp4', 'streamMetadata': { 'bitrateKbps': 100, 'width': 10, 'height': 5, 'mimeType': '' } }], id='streamMetadata_mimeType_EmptyValue', name='streamMetadata_mimeType_EmptyValue', status=None, tags=None, targetAudiences=[{ 'id': 'Broadcast_Standalone_Audience' }]) <NEW_LINE> response = check( context.cl.Distributions.createEntity( body=distributionDetails ) ) <NEW_LINE> <DEDENT> with pytest.allure.step("""Verify that user is unable to create Distribution if empty value for 'Mimetype' parameter is provided using request POST /distributions ."""): <NEW_LINE> <INDENT> distributionDetails = context.sc.DistributionDetails( activationDate='2017-09-06T07:36:46.542Z', distributionPolicy='REQUIRED', expirationDate=None, files=[{ 'id': 'FileID123', 'sourceUrl': 'qedorigin://Auto_storage/MP4File.mp4', 'streamMetadata': { 'bitrateKbps': 100, 'width': 10, 'height': 5, 'mimeType': '' } }], id='streamMetadata_mimeType_EmptyValue', name='streamMetadata_mimeType_EmptyValue', status=None, tags=None, targetAudiences=[{ 'id': 'Broadcast_Standalone_Audience' }]) <NEW_LINE> request = context.cl.Distributions.createEntity( body=distributionDetails ) <NEW_LINE> try: <NEW_LINE> <INDENT> client, response = check( request, quiet=True, returnResponse=True ) <NEW_LINE> <DEDENT> except (HTTPBadRequest, HTTPForbidden) as e: <NEW_LINE> <INDENT> get_error_message(e) | expect.any( should.start_with('may not be empty'), should.start_with('Invalid page parameter specified'), should.contain('Invalid Authorization Token') ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception( "Expected error message, got {} status code instead.".format( response.status_code))
PFE Distributions test cases.
6259905eadb09d7d5dc0bbd6
class Listdir(plugin.Plugin): <NEW_LINE> <INDENT> def validator(self): <NEW_LINE> <INDENT> from flexget import validator <NEW_LINE> root = validator.factory() <NEW_LINE> root.accept('path') <NEW_LINE> bundle = root.accept('list') <NEW_LINE> bundle.accept('path') <NEW_LINE> return root <NEW_LINE> <DEDENT> def on_task_input(self, task, config): <NEW_LINE> <INDENT> if isinstance(config, basestring): <NEW_LINE> <INDENT> config = [config] <NEW_LINE> <DEDENT> entries = [] <NEW_LINE> for path in config: <NEW_LINE> <INDENT> path = os.path.expanduser(path) <NEW_LINE> for name in os.listdir(unicode(path)): <NEW_LINE> <INDENT> e = Entry() <NEW_LINE> e['title'] = name <NEW_LINE> filepath = os.path.join(path, name) <NEW_LINE> if not filepath.startswith('/'): <NEW_LINE> <INDENT> filepath = '/' + filepath <NEW_LINE> <DEDENT> e['url'] = 'file://%s' % filepath <NEW_LINE> e['location'] = os.path.join(path, name) <NEW_LINE> e['filename'] = name <NEW_LINE> entries.append(e) <NEW_LINE> <DEDENT> <DEDENT> return entries
Uses local path content as an input. Example:: listdir: /storage/movies/
6259905e99cbb53fe683254c
class PublicUserApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> <DEDENT> def test_create_valid_user_success(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'testpass', 'name': 'Test name' } <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_201_CREATED) <NEW_LINE> user = get_user_model().objects.get(**res.data) <NEW_LINE> self.assertTrue(user.check_password(payload['password'])) <NEW_LINE> self.assertNotIn('password', res.data) <NEW_LINE> <DEDENT> def test_user_exists(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'testpass', } <NEW_LINE> create_user(**payload) <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_password_too_short(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'pass', } <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> user_exists = get_user_model().objects.filter( email=payload['email'] ).exists() <NEW_LINE> self.assertFalse(user_exists) <NEW_LINE> <DEDENT> def test_create_token_for_user(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'testpass' } <NEW_LINE> create_user(**payload) <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> <DEDENT> def test_create_token_invalid_credentials(self): <NEW_LINE> <INDENT> create_user(email='[email protected]', password='testpass') <NEW_LINE> payload = { 'email': '[email protected]', 'password': 'wrongpass' } <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_create_token_no_user(self): <NEW_LINE> <INDENT> payload = { 'email': '[email protected]', 'password': 'wrongpass' } <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_create_token_missing_field(self): <NEW_LINE> <INDENT> res = self.client.post(TOKEN_URL, {'email': 'one', 'password': ''}) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_retrieve_user_unauthorized(self): <NEW_LINE> <INDENT> res = self.client.post(ME_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
Test the users API (public)
6259905e32920d7e50bc76b2
class AbstractItem(core_models.TimeStampedModel): <NEW_LINE> <INDENT> name = models.CharField(max_length=80) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Absreact Item
6259905e2ae34c7f260ac752
class RequestedAuthnContext(RequestedAuthnContextType_): <NEW_LINE> <INDENT> c_tag = 'RequestedAuthnContext' <NEW_LINE> c_namespace = NAMESPACE <NEW_LINE> c_children = RequestedAuthnContextType_.c_children.copy() <NEW_LINE> c_attributes = RequestedAuthnContextType_.c_attributes.copy() <NEW_LINE> c_child_order = RequestedAuthnContextType_.c_child_order[:] <NEW_LINE> c_cardinality = RequestedAuthnContextType_.c_cardinality.copy()
The urn:oasis:names:tc:SAML:2.0:protocol:RequestedAuthnContext element
6259905e45492302aabfdb45
class NetworkInterfaceAssociation(Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(NetworkInterfaceAssociation, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.security_rules = kwargs.get('security_rules', None)
Network interface and its custom security rules. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Network interface ID. :vartype id: str :param security_rules: Collection of custom security rules. :type security_rules: list[~azure.mgmt.network.v2016_12_01.models.SecurityRule]
6259905e435de62698e9d472
class HGraph(): <NEW_LINE> <INDENT> def __init__(self, connections=None, directed=True): <NEW_LINE> <INDENT> self._graph = igraph.Graph(directed=True) <NEW_LINE> self._node2idx = {} <NEW_LINE> self._idx2node = {} <NEW_LINE> self._curr_idx = 0 <NEW_LINE> if connections: <NEW_LINE> <INDENT> self.add_connections(connections) <NEW_LINE> <DEDENT> <DEDENT> def add_connections(self, connections): <NEW_LINE> <INDENT> for i,j in connections: <NEW_LINE> <INDENT> if i not in self._node2idx: <NEW_LINE> <INDENT> self._node2idx[i] = self._curr_idx <NEW_LINE> self._idx2node[self._curr_idx] = i <NEW_LINE> self._curr_idx += 1 <NEW_LINE> <DEDENT> if j not in self._node2idx: <NEW_LINE> <INDENT> self._node2idx[j] = self._curr_idx <NEW_LINE> self._idx2node[self._curr_idx] = j <NEW_LINE> self._curr_idx += 1 <NEW_LINE> <DEDENT> <DEDENT> self._graph.add_vertices(len(self._node2idx)) <NEW_LINE> self._graph.add_edges([(self._node2idx[i],self._node2idx[j]) for i,j in connections]) <NEW_LINE> self._roots = [n for n in self._node2idx if not self.get_parents(n)] <NEW_LINE> <DEDENT> def is_connected(self, node1, node2): <NEW_LINE> <INDENT> return self._graph.are_connected(self._node2idx[node1],self._node2idx[node2]) <NEW_LINE> <DEDENT> def get_parents(self, node): <NEW_LINE> <INDENT> return [self._idx2node[n] for n in self._graph.predecessors(self._node2idx[node])] <NEW_LINE> <DEDENT> def get_childs(self, node): <NEW_LINE> <INDENT> return [self._idx2node[n] for n in self._graph.successors(self._node2idx[node])] <NEW_LINE> <DEDENT> def find_shortest_path(self, node1, node2): <NEW_LINE> <INDENT> path = [self._idx2node[i] for i in self._graph.get_shortest_paths(self._node2idx[node1], self._node2idx[node2])[-1]] <NEW_LINE> if path: <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_shortest_possible_path(self, node): <NEW_LINE> <INDENT> if node in self._roots: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> candidate_paths = [self.find_shortest_path(n, node) for n in self._roots] <NEW_LINE> candidate_paths = [p for p in candidate_paths if p] <NEW_LINE> if candidate_paths: <NEW_LINE> <INDENT> return min(candidate_paths, key=len) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_degree(self, node1, node2): <NEW_LINE> <INDENT> shortest_path_f = self.find_shortest_path(node1, node2) <NEW_LINE> shortest_path_b = self.find_shortest_path(node2, node1) <NEW_LINE> return len(shortest_path_f or shortest_path_b or []) <NEW_LINE> <DEDENT> def load(self, connections): <NEW_LINE> <INDENT> self.add_connections(connections)
Graph data structure, undirected by default.
6259905e63d6d428bbee3dbe
class Node: <NEW_LINE> <INDENT> def __init__(self, state, parent=None, action=None, path_cost=0): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.parent = parent <NEW_LINE> self.action = action <NEW_LINE> if parent: <NEW_LINE> <INDENT> self.path_cost = parent.path_cost + path_cost <NEW_LINE> self.depth = parent.depth + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.path_cost = path_cost <NEW_LINE> self.depth = 0 <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Node %s>" % (self.state,) <NEW_LINE> <DEDENT> def nodePath(self): <NEW_LINE> <INDENT> x, result = self, [self] <NEW_LINE> while x.parent: <NEW_LINE> <INDENT> result.append(x.parent) <NEW_LINE> x = x.parent <NEW_LINE> <DEDENT> result.reverse() <NEW_LINE> return result <NEW_LINE> <DEDENT> def path(self): <NEW_LINE> <INDENT> actions = [] <NEW_LINE> currnode = self <NEW_LINE> while currnode.parent: <NEW_LINE> <INDENT> actions.append(currnode.action) <NEW_LINE> currnode = currnode.parent <NEW_LINE> <DEDENT> actions.reverse() <NEW_LINE> return actions <NEW_LINE> <DEDENT> def expand(self, problem): <NEW_LINE> <INDENT> return [Node(next, self, act, cost) for (next, act, cost) in problem.getSuccessors(self.state)]
AIMA: A node in a search tree. Contains a pointer to the parent (the node that this is a successor of) and to the actual state for this node. Note that if a state is arrived at by two paths, then there are two nodes with the same state. Also includes the action that got us to this state, and the total path_cost (also known as g) to reach the node. Other functions may add an f and h value; see best_first_graph_search and astar_search for an explanation of how the f and h values are handled. You will not need to subclass this class.
6259905e3617ad0b5ee077b9
class BoundingBoxFittingParams(base_orm.BaseORM): <NEW_LINE> <INDENT> DB_FIELDS = OrderedDict( [ ("labels", dict), ("video", dict), ] )
Labeling algorithm parameters for bounding box fitting algorithm ORM: labels, video
6259905e63b5f9789fe867df
class InvalidCredentials(HTTPException): <NEW_LINE> <INDENT> description = 'the provided credentials are not valid!' <NEW_LINE> code = 401
Implementation of InvalidCredentials exception. This exception is raised when the credentials specified by user are invalid
6259905e498bea3a75a59134
class LUVConfigParam(object): <NEW_LINE> <INDENT> def __init__(self,short_option, long_option, dest_varname, help_text): <NEW_LINE> <INDENT> self.short_option = short_option <NEW_LINE> self.long_option = long_option <NEW_LINE> self.dest_varname = dest_varname <NEW_LINE> self.help_text =help_text
create objects to all optional, destination and help arguments
6259905ed99f1b3c44d06d0f
class Item(PropDict): <NEW_LINE> <INDENT> VALID_KEYS = ITEM_KEYS | {'deleted', 'nlink', } <NEW_LINE> __slots__ = ("_dict", ) <NEW_LINE> path = PropDict._make_property('path', str, 'surrogate-escaped str', encode=safe_encode, decode=safe_decode) <NEW_LINE> source = PropDict._make_property('source', str, 'surrogate-escaped str', encode=safe_encode, decode=safe_decode) <NEW_LINE> user = PropDict._make_property('user', (str, type(None)), 'surrogate-escaped str or None', encode=safe_encode, decode=safe_decode) <NEW_LINE> group = PropDict._make_property('group', (str, type(None)), 'surrogate-escaped str or None', encode=safe_encode, decode=safe_decode) <NEW_LINE> acl_access = PropDict._make_property('acl_access', bytes) <NEW_LINE> acl_default = PropDict._make_property('acl_default', bytes) <NEW_LINE> acl_extended = PropDict._make_property('acl_extended', bytes) <NEW_LINE> acl_nfs4 = PropDict._make_property('acl_nfs4', bytes) <NEW_LINE> mode = PropDict._make_property('mode', int) <NEW_LINE> uid = PropDict._make_property('uid', int) <NEW_LINE> gid = PropDict._make_property('gid', int) <NEW_LINE> rdev = PropDict._make_property('rdev', int) <NEW_LINE> bsdflags = PropDict._make_property('bsdflags', int) <NEW_LINE> atime = PropDict._make_property('atime', int, 'bigint', encode=int_to_bigint, decode=bigint_to_int) <NEW_LINE> ctime = PropDict._make_property('ctime', int, 'bigint', encode=int_to_bigint, decode=bigint_to_int) <NEW_LINE> mtime = PropDict._make_property('mtime', int, 'bigint', encode=int_to_bigint, decode=bigint_to_int) <NEW_LINE> hardlink_master = PropDict._make_property('hardlink_master', bool) <NEW_LINE> chunks = PropDict._make_property('chunks', (list, type(None)), 'list or None') <NEW_LINE> chunks_healthy = PropDict._make_property('chunks_healthy', (list, type(None)), 'list or None') <NEW_LINE> xattrs = PropDict._make_property('xattrs', StableDict) <NEW_LINE> deleted = PropDict._make_property('deleted', bool) <NEW_LINE> nlink = PropDict._make_property('nlink', int) <NEW_LINE> part = PropDict._make_property('part', int) <NEW_LINE> def file_size(self, hardlink_masters=None): <NEW_LINE> <INDENT> hardlink_masters = hardlink_masters or {} <NEW_LINE> chunks, _ = hardlink_masters.get(self.get('source'), (None, None)) <NEW_LINE> chunks = self.get('chunks', chunks) <NEW_LINE> if chunks is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return sum(chunk.size for chunk in chunks)
Item abstraction that deals with validation and the low-level details internally: Items are created either from msgpack unpacker output, from another dict, from kwargs or built step-by-step by setting attributes. msgpack gives us a dict with bytes-typed keys, just give it to Item(d) and use item.key_name later. msgpack gives us byte-typed values for stuff that should be str, we automatically decode when getting such a property and encode when setting it. If an Item shall be serialized, give as_dict() method output to msgpack packer.
6259905e76e4537e8c3f0bf9
class BadSniffException(Exception): <NEW_LINE> <INDENT> pass
Raised when the csv sniffer fails to determine the dialect of a file
6259905e0a50d4780f7068f5
class Resample(AFNICommand): <NEW_LINE> <INDENT> _cmd = '3dresample' <NEW_LINE> input_spec = ResampleInputSpec <NEW_LINE> output_spec = AFNICommandOutputSpec
Resample or reorient an image using AFNI 3dresample command For complete details, see the `3dresample Documentation. <https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dresample.html>`_ Examples ======== >>> from nipype.interfaces import afni >>> resample = afni.Resample() >>> resample.inputs.in_file = 'functional.nii' >>> resample.inputs.orientation= 'RPI' >>> resample.inputs.outputtype = 'NIFTI' >>> resample.cmdline # doctest: +ALLOW_UNICODE '3dresample -orient RPI -prefix functional_resample.nii -inset functional.nii' >>> res = resample.run() # doctest: +SKIP
6259905e56ac1b37e630381d
class HashTable(object): <NEW_LINE> <INDENT> EMPTY = None <NEW_LINE> DELETED = True <NEW_LINE> def __init__(self, capacity = 29, hashFunction = hash, linear = True): <NEW_LINE> <INDENT> self.table = Array(capacity, HashTable.EMPTY) <NEW_LINE> self.size = 0 <NEW_LINE> self.hash = hashFunction <NEW_LINE> self.homeIndex = -1 <NEW_LINE> self.actualIndex = -1 <NEW_LINE> self.linear = linear <NEW_LINE> self.probeCount = 0 <NEW_LINE> self.capacity = capacity <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> temp = "[" <NEW_LINE> count = 0 <NEW_LINE> for item in self.table: <NEW_LINE> <INDENT> if count == 0: <NEW_LINE> <INDENT> temp = temp + str(item) <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> temp = temp + ", " + str(item) <NEW_LINE> <DEDENT> <DEDENT> temp = temp + "]" <NEW_LINE> return temp <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.size <NEW_LINE> <DEDENT> def getLoadFactor(self): <NEW_LINE> <INDENT> return (self.size / self.capacity) <NEW_LINE> <DEDENT> def getHomeIndex(self): <NEW_LINE> <INDENT> return self.homeIndex <NEW_LINE> <DEDENT> def getActualIndex(self): <NEW_LINE> <INDENT> return self.actualIndex <NEW_LINE> <DEDENT> def getProbeCount(self): <NEW_LINE> <INDENT> return self.probeCount <NEW_LINE> <DEDENT> def insert(self, item): <NEW_LINE> <INDENT> self.probeCount = 0 <NEW_LINE> self.homeIndex = abs(self.hash(item)) % len(self.table) <NEW_LINE> distance = 1 <NEW_LINE> index = self.homeIndex <NEW_LINE> while not self.table[index] in (HashTable.EMPTY, HashTable.DELETED): <NEW_LINE> <INDENT> if self.linear: <NEW_LINE> <INDENT> increment = index + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> increment = self.homeIndex + distance ** 2 <NEW_LINE> distance += 1 <NEW_LINE> <DEDENT> index = increment % len(self.table) <NEW_LINE> self.probeCount += 1 <NEW_LINE> <DEDENT> self.table[index] = item <NEW_LINE> self.size += 1 <NEW_LINE> self.actualIndex = index <NEW_LINE> <DEDENT> def get(self, item): <NEW_LINE> <INDENT> self.probeCount = 0 <NEW_LINE> self.homeIndex = abs(self.hash(item)) % len(self.table) <NEW_LINE> distance = 1 <NEW_LINE> index = self.homeIndex <NEW_LINE> checked = 0 <NEW_LINE> while self.table[index] != item: <NEW_LINE> <INDENT> if checked == self.probeCount: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> if self.linear: <NEW_LINE> <INDENT> increment = index + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> increment = self.homeIndex + distance ** 2 <NEW_LINE> distance += 1 <NEW_LINE> checked += 1 <NEW_LINE> <DEDENT> index = increment % len(self.table) <NEW_LINE> self.probeCount += 1 <NEW_LINE> <DEDENT> return index <NEW_LINE> <DEDENT> def remove(self, item): <NEW_LINE> <INDENT> self.probeCount = 0 <NEW_LINE> self.homeIndex = abs(self.hash(item)) % len(self.table) <NEW_LINE> distance = 1 <NEW_LINE> index = self.homeIndex <NEW_LINE> checked = 0 <NEW_LINE> while self.table[index] != item: <NEW_LINE> <INDENT> if checked == self.capacity: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> if self.linear: <NEW_LINE> <INDENT> increment = index + 1 <NEW_LINE> checked += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> increment = self.homeIndex + distance ** 2 <NEW_LINE> distance += 1 <NEW_LINE> checked += 1 <NEW_LINE> <DEDENT> index = increment % len(self.table) <NEW_LINE> self.probeCount += 1 <NEW_LINE> <DEDENT> self.table[index] = self.DELETED <NEW_LINE> self.size -= 1 <NEW_LINE> return index
Represents a hash table.
6259905ed53ae8145f919acf
class Train(object): <NEW_LINE> <INDENT> def __init__(self, options): <NEW_LINE> <INDENT> self._options = options <NEW_LINE> <DEDENT> def save_model(self, net): <NEW_LINE> <INDENT> net.save_parameters(os.path.join(self._options.check_path, 'best_perf_model')) <NEW_LINE> <DEDENT> def train(self, train_iter): <NEW_LINE> <INDENT> ctx = create_context(self._options.num_gpu) <NEW_LINE> net = LorenzBuilder(self._options, ctx=ctx, for_train=True).build() <NEW_LINE> trainer = gluon.Trainer(net.collect_params(), 'adam', {'learning_rate': self._options.learning_rate, 'wd': self._options.l2_regularization}) <NEW_LINE> loss = gluon.loss.L1Loss() <NEW_LINE> loss_save = [] <NEW_LINE> best_loss = sys.maxsize <NEW_LINE> start = time.time() <NEW_LINE> for epoch in trange(self._options.epochs): <NEW_LINE> <INDENT> total_epoch_loss, nb = mx.nd.zeros(1, ctx), 0 <NEW_LINE> for x, y in train_iter: <NEW_LINE> <INDENT> x = x.reshape((self._options.batch_size, self._options.in_channels, -1)).as_in_context(ctx) <NEW_LINE> y = y.as_in_context(ctx) <NEW_LINE> with autograd.record(): <NEW_LINE> <INDENT> y_hat = net(x) <NEW_LINE> l = loss(y_hat, y) <NEW_LINE> <DEDENT> l.backward() <NEW_LINE> trainer.step(self._options.batch_size, ignore_stale_grad=True) <NEW_LINE> total_epoch_loss += l.sum() <NEW_LINE> nb += x.shape[0] <NEW_LINE> <DEDENT> current_loss = total_epoch_loss.asscalar()/nb <NEW_LINE> loss_save.append(current_loss) <NEW_LINE> print('Epoch {}, loss {}'.format(epoch, current_loss)) <NEW_LINE> if current_loss < best_loss: <NEW_LINE> <INDENT> best_loss = current_loss <NEW_LINE> self.save_model(net) <NEW_LINE> <DEDENT> print('best epoch loss: ', best_loss) <NEW_LINE> <DEDENT> end = time.time() <NEW_LINE> np.savetxt(os.path.join(self._options.assets_dir, 'losses.txt'), np.array(loss_save)) <NEW_LINE> print("Training took ", end - start, " seconds.")
Training engine for Lorenz architecture.
6259905e3539df3088ecd909
class MD_Connection_Net_Tcp_Request(MD_Connection): <NEW_LINE> <INDENT> def _open(self): <NEW_LINE> <INDENT> self.logger.debug(f'{self.__class__.__name__} opening connection as {__name__} with params {self._params}') <NEW_LINE> return True <NEW_LINE> <DEDENT> def _close(self): <NEW_LINE> <INDENT> self.logger.debug(f'{self.__class__.__name__} closing connection as {__name__} with params {self._params}') <NEW_LINE> <DEDENT> def _send(self, data_dict): <NEW_LINE> <INDENT> url = data_dict.get('payload', None) <NEW_LINE> if not url: <NEW_LINE> <INDENT> self.logger.error(f'can not send without url parameter from data_dict {data_dict}, aborting') <NEW_LINE> return False <NEW_LINE> <DEDENT> request_method = data_dict.get('request_method', 'get') <NEW_LINE> par = {} <NEW_LINE> for arg in (REQUEST_DICT_ARGS): <NEW_LINE> <INDENT> par[arg] = data_dict.get(arg, {}) <NEW_LINE> <DEDENT> if request_method == 'get': <NEW_LINE> <INDENT> par['params'] = par['data'] <NEW_LINE> par['data'] = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> par['params'] = {} <NEW_LINE> <DEDENT> par['data'] = json.dumps(par['data']) <NEW_LINE> response = requests.request(request_method, url, params=par['params'], headers=par['headers'], data=par['data'], cookies=par['cookies'], files=par['files']) <NEW_LINE> self.logger.debug(f'{self.__class__.__name__} received response {response.text} with code {response.status_code}') <NEW_LINE> if 200 <= response.status_code < 400: <NEW_LINE> <INDENT> return response.text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response.raise_for_status() <NEW_LINE> <DEDENT> except requests.HTTPError as e: <NEW_LINE> <INDENT> raise requests.HTTPError(f'TCP request returned code {response.status_code}, error was: {e}') <NEW_LINE> <DEDENT> <DEDENT> return None
Connection via TCP / HTTP requests This class implements TCP connections in the query-reply matter using the requests library, e.g. for HTTP communication. The data_dict['payload']-Data needs to be the full query URL. Additional parameter dicts can be added to be given to requests.request, as - request_method: get (default) or post - headers, data, cookies, files: passed thru to request() - data is encoded in the url for GET or sent as dict for POST Response data is returned as text. Errors raise HTTPException
6259905e4f88993c371f1055
class ModifyParamTemplateRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TemplateId = None <NEW_LINE> self.Name = None <NEW_LINE> self.Description = None <NEW_LINE> self.ParamList = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TemplateId = params.get("TemplateId") <NEW_LINE> self.Name = params.get("Name") <NEW_LINE> self.Description = params.get("Description") <NEW_LINE> if params.get("ParamList") is not None: <NEW_LINE> <INDENT> self.ParamList = [] <NEW_LINE> for item in params.get("ParamList"): <NEW_LINE> <INDENT> obj = Parameter() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.ParamList.append(obj)
ModifyParamTemplate请求参数结构体
6259905e56b00c62f0fb3f39
@attr.s(slots=True) <NEW_LINE> class TriggerInstance: <NEW_LINE> <INDENT> action: AutomationActionType = attr.ib() <NEW_LINE> automation_info: AutomationTriggerInfo = attr.ib() <NEW_LINE> trigger: Trigger = attr.ib() <NEW_LINE> remove: CALLBACK_TYPE | None = attr.ib(default=None) <NEW_LINE> async def async_attach_trigger(self) -> None: <NEW_LINE> <INDENT> assert self.trigger.tasmota_trigger is not None <NEW_LINE> event_config = { event_trigger.CONF_PLATFORM: "event", event_trigger.CONF_EVENT_TYPE: TASMOTA_EVENT, event_trigger.CONF_EVENT_DATA: { "mac": self.trigger.tasmota_trigger.cfg.mac, "source": self.trigger.tasmota_trigger.cfg.subtype, "event": self.trigger.tasmota_trigger.cfg.event, }, } <NEW_LINE> event_config = event_trigger.TRIGGER_SCHEMA(event_config) <NEW_LINE> if self.remove: <NEW_LINE> <INDENT> self.remove() <NEW_LINE> <DEDENT> self.remove = await event_trigger.async_attach_trigger( self.trigger.hass, event_config, self.action, self.automation_info, platform_type="device", )
Attached trigger settings.
6259905e55399d3f05627b8d
class TestPerformance(TestCase): <NEW_LINE> <INDENT> def test_init(self): <NEW_LINE> <INDENT> data = [1,4,5,6,7] <NEW_LINE> p = Performace(data) <NEW_LINE> self.assertEqual(p.data, data)
Test case for a Performance class
6259905e7b25080760ed8817
class OddRegressionModel(Model): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Model.__init__(self) <NEW_LINE> self.get_data_and_monitor = backend.get_data_and_monitor_regression <NEW_LINE> self.learning_rate = .03 <NEW_LINE> self.W1, self.b1 = nn.Variable(1, 300), nn.Variable(300) <NEW_LINE> self.W2, self.b2 = nn.Variable(300, 1), nn.Variable(1) <NEW_LINE> <DEDENT> def run(self, x, y=None): <NEW_LINE> <INDENT> self.graph = nn.Graph([self.W1, self.W2, self.b1, self.b2]) <NEW_LINE> func_x = nn.Input(self.graph, x) <NEW_LINE> neg_mat = nn.Input(self.graph, np.full((1,1), -1.0)) <NEW_LINE> in_relu = nn.MatrixVectorAdd(self.graph, nn.MatrixMultiply(self.graph, func_x, self.W1), self.b1) <NEW_LINE> first_relu = nn.ReLU(self.graph, in_relu) <NEW_LINE> mult_W2 = nn.MatrixMultiply(self.graph, first_relu, self.W2) <NEW_LINE> add_b2 = nn.MatrixVectorAdd(self.graph, mult_W2, self.b2) <NEW_LINE> func_neg_x = nn.Input(self.graph, np.multiply(np.full_like(x, -1), x)) <NEW_LINE> in_relu_neg = nn.MatrixVectorAdd(self.graph, nn.MatrixMultiply(self.graph, func_neg_x, self.W1), self.b1) <NEW_LINE> first_relu_neg = nn.ReLU(self.graph, in_relu_neg) <NEW_LINE> mult_W2_neg = nn.MatrixMultiply(self.graph, first_relu_neg, self.W2) <NEW_LINE> add_b2_neg = nn.MatrixVectorAdd(self.graph, mult_W2_neg, self.b2) <NEW_LINE> negative_fx = nn.MatrixMultiply(self.graph, mult_W2_neg, neg_mat) <NEW_LINE> total = nn.Add(self.graph, mult_W2, negative_fx) <NEW_LINE> if y is not None: <NEW_LINE> <INDENT> given_y = nn.Input(self.graph, y) <NEW_LINE> loss = nn.SquareLoss(self.graph, total, given_y) <NEW_LINE> return self.graph <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.graph.get_output(self.graph.get_nodes()[-1])
TODO: Question 5 - [Application] OddRegression A neural network model for approximating a function that maps from real numbers to real numbers. Unlike RegressionModel, the OddRegressionModel must be structurally constrained to represent an odd function, i.e. it must always satisfy the property f(x) = -f(-x) at all points during training.
6259905e99cbb53fe683254f
class ArrangementValidator(BaseValidator): <NEW_LINE> <INDENT> def validate_put_fields(self): <NEW_LINE> <INDENT> self.validate_dates_are_ordered('start_dato', 'slutt_dato', 'Startdato må være før sluttdato') <NEW_LINE> self.validate_date_is_newer_than_year_1900('start_dato', 'Startdato kan ikke være før 1900') <NEW_LINE> self.validate_date_is_newer_than_year_1900('slutt_dato', 'Sluttdato kan ikke være før 1900') <NEW_LINE> return self
Validator klasse for Arrangement
6259905e07f4c71912bb0aab
class ConceptSchemeSource(models.Model): <NEW_LINE> <INDENT> concept_scheme = models.ForeignKey( SkosConceptScheme, related_name="has_sources", verbose_name="skos:ConceptScheme", help_text="Which Skos:ConceptScheme current source belongs to", on_delete=models.CASCADE ) <NEW_LINE> name = models.TextField( verbose_name="dc:source", help_text="Verbal description of a concept scheme's source" ) <NEW_LINE> language = models.CharField( max_length=3, verbose_name="dc:source language", help_text="Language of source given above" ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{}".format(self.name)
A Class for ConceptScheme source information.
6259905e3cc13d1c6d466db0
class FirstVpcPublicNets(Resolver): <NEW_LINE> <INDENT> def resolve(self): <NEW_LINE> <INDENT> response = self.connection_manager.call( 'ec2', 'describe_vpcs' ) <NEW_LINE> vpc = response['Vpcs'][0]['VpcId'] <NEW_LINE> response = self.connection_manager.call( 'ec2', 'describe_subnets' ) <NEW_LINE> public_networks = [] <NEW_LINE> for net in response['Subnets']: <NEW_LINE> <INDENT> if net['VpcId'] == vpc and net['MapPublicIpOnLaunch']: <NEW_LINE> <INDENT> public_networks.append(net['SubnetId']) <NEW_LINE> <DEDENT> <DEDENT> return ','.join(public_networks)
Implementing class for this resolver.
6259905e45492302aabfdb48
class mesh_classic: <NEW_LINE> <INDENT> def __init__(self, x, y, z, topcolor=(1,0,0), botcolor=(0,1,1)): <NEW_LINE> <INDENT> self.t = vp.faces(color=topcolor) <NEW_LINE> self.b = vp.faces(color=botcolor) <NEW_LINE> self.move(x, y, z) <NEW_LINE> <DEDENT> def corners(self, x, y, z): <NEW_LINE> <INDENT> p = np.dstack((x, y, z)) <NEW_LINE> cs = np.column_stack <NEW_LINE> s = lambda u: np.reshape(u, (-1,3)) <NEW_LINE> a, c = s(p[:-1,:-1]), s(p[1:,1:]) <NEW_LINE> t1 = cs((a, s(p[:-1,1:]), c)) <NEW_LINE> t2 = cs((a, c, s(p[1:,:-1]))) <NEW_LINE> q = np.concatenate((t1,t2)).reshape(-1,3) <NEW_LINE> r = np.reshape(q, (len(q)//3,3,3)) <NEW_LINE> r = np.reshape(r[:,[0,2,1],:], (-1,3)) <NEW_LINE> return q, r <NEW_LINE> <DEDENT> def move(self, x, y, z): <NEW_LINE> <INDENT> self.t.pos, self.b.pos = self.corners(x, y, z) <NEW_LINE> self.t.make_normals(), self.b.make_normals()
create a mesh surface, grid points are given by x[,],y[,],z[,] other input: top and bottom surface colors
6259905ed99f1b3c44d06d11
class Ladder(Lattice): <NEW_LINE> <INDENT> Lu = 2 <NEW_LINE> dim = 1 <NEW_LINE> def __init__(self, L, sites, **kwargs): <NEW_LINE> <INDENT> sites = _parse_sites(sites, 2) <NEW_LINE> basis = np.array([[1., 0.]]) <NEW_LINE> pos = np.array([[0., 0.], [0., 1.]]) <NEW_LINE> kwargs.setdefault('basis', basis) <NEW_LINE> kwargs.setdefault('positions', pos) <NEW_LINE> NN = [(0, 0, np.array([1])), (1, 1, np.array([1])), (0, 1, np.array([0]))] <NEW_LINE> nNN = [(0, 1, np.array([1])), (1, 0, np.array([1]))] <NEW_LINE> nnNN = [(0, 0, np.array([2])), (1, 1, np.array([2]))] <NEW_LINE> kwargs.setdefault('pairs', {}) <NEW_LINE> kwargs['pairs'].setdefault('nearest_neighbors', NN) <NEW_LINE> kwargs['pairs'].setdefault('next_nearest_neighbors', nNN) <NEW_LINE> kwargs['pairs'].setdefault('next_next_nearest_neighbors', nnNN) <NEW_LINE> Lattice.__init__(self, [L], sites, **kwargs) <NEW_LINE> <DEDENT> def ordering(self, order): <NEW_LINE> <INDENT> if isinstance(order, str) and (order == 'default' or order == 'folded' or order == 'folded2'): <NEW_LINE> <INDENT> (L, u) = self.shape <NEW_LINE> assert u == 2 <NEW_LINE> ordering = np.zeros([2 * L, 2], dtype=np.intp) <NEW_LINE> if order == 'default': <NEW_LINE> <INDENT> ordering[:, 0] = np.repeat(np.arange(L, dtype=np.intp), 2) <NEW_LINE> ordering[:, 1] = np.tile(np.array([0, 1], dtype=np.intp), L) <NEW_LINE> <DEDENT> elif order == 'folded': <NEW_LINE> <INDENT> order = [] <NEW_LINE> for i in range(L // 2): <NEW_LINE> <INDENT> order.append((i, 0)) <NEW_LINE> order.append((i, 1)) <NEW_LINE> order.append((L - i - 1, 0)) <NEW_LINE> order.append((L - i - 1, 1)) <NEW_LINE> <DEDENT> if L % 2 == 1: <NEW_LINE> <INDENT> order.append((L // 2, 0)) <NEW_LINE> order.append((L // 2, 1)) <NEW_LINE> <DEDENT> assert len(order) == 2 * L <NEW_LINE> ordering = np.array(order, dtype=np.intp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert (False) <NEW_LINE> <DEDENT> return ordering <NEW_LINE> <DEDENT> return super().ordering(order)
A ladder coupling two chains. .. plot :: import matplotlib.pyplot as plt from tenpy.models import lattice plt.figure(figsize=(5, 1.4)) ax = plt.gca() lat = lattice.Ladder(4, None, bc='periodic') lat.plot_coupling(ax, linewidth=3.) lat.plot_order(ax, linestyle=':') lat.plot_sites(ax) lat.plot_basis(ax, origin=[-0.5, -0.25], shade=False) ax.set_aspect('equal') ax.set_xlim(-1.) ax.set_ylim(-1.) plt.show() Parameters ---------- L : int The length of each chain, we have 2*L sites in total. sites : (list of) :class:`~tenpy.networks.site.Site` The two local lattice sites making the `unit_cell` of the :class:`Lattice`. If only a single :class:`~tenpy.networks.site.Site` is given, it is used for both chains. **kwargs : Additional keyword arguments given to the :class:`Lattice`. `basis`, `pos` and `pairs` are set accordingly.
6259905e8e7ae83300eea6fc
class KDMBundle(object): <NEW_LINE> <INDENT> def __init__(self, catalog, kdms): <NEW_LINE> <INDENT> self.catalog = catalog <NEW_LINE> self.kdms = kdms <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_tarfile(cls, filepath): <NEW_LINE> <INDENT> tar = tarfile.open(filepath, 'r') <NEW_LINE> cat_member = tar.getmember('CATALOG') <NEW_LINE> catalog = KDMBundle._parse_catalog(tar.extractfile(cat_member).read()) <NEW_LINE> kdms = [] <NEW_LINE> for kdm_path in catalog.kdm_paths: <NEW_LINE> <INDENT> xml = tar.extractfile(os.path.join('CONTENT', kdm_path)).read() <NEW_LINE> kdms.append(KDM(xml)) <NEW_LINE> <DEDENT> tar.close() <NEW_LINE> return cls(catalog, kdms) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_catalog(xml_str): <NEW_LINE> <INDENT> return KDMBundleCatalog.from_string(xml_str) <NEW_LINE> <DEDENT> def str(self, tar): <NEW_LINE> <INDENT> for tarinfo in tar: <NEW_LINE> <INDENT> print(tarinfo.name, "is", tarinfo.size, "bytes in size and is") <NEW_LINE> if tarinfo.isreg(): <NEW_LINE> <INDENT> print("a regular file.") <NEW_LINE> <DEDENT> elif tarinfo.isdir(): <NEW_LINE> <INDENT> print("a directory.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("something else.")
Manages SMPTE KDM bundles SMPTE Doc: S430-9-2008
6259905e56ac1b37e630381e