code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class ManifestItemMeta(ABCMeta): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs, **kwargs): <NEW_LINE> <INDENT> rv = ABCMeta.__new__(cls, name, bases, attrs, **kwargs) <NEW_LINE> item_types[rv.item_type] = rv <NEW_LINE> return rv | Custom metaclass that registers all the subclasses in the
item_types dictionary according to the value of their item_type
attribute, and otherwise behaves like an ABCMeta. | 625990617d43ff2487427f70 |
class Projects(object): <NEW_LINE> <INDENT> def __init__(self, items=None): <NEW_LINE> <INDENT> self.swagger_types = { 'items': 'list[Project]' } <NEW_LINE> self.attribute_map = { 'items': 'items' } <NEW_LINE> self._items = items <NEW_LINE> <DEDENT> @property <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> @items.setter <NEW_LINE> def items(self, items): <NEW_LINE> <INDENT> self._items = items <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Projects): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906144b2445a339b74c1 |
class LinearColormap(LinearSegmentedColormap): <NEW_LINE> <INDENT> def __init__(self, name, segmented_data, **kwargs): <NEW_LINE> <INDENT> segmented_data = dict((key, [(x, y, y) for x, y in value]) for key, value in segmented_data.items()) <NEW_LINE> LinearSegmentedColormap.__init__(self, name, segmented_data, **kwargs) | LinearSegmentedColormap in which color varies smoothly.
This class is a simplification of LinearSegmentedColormap, which doesn't
support jumps in color intensities.
Parameters
----------
name : str
Name of colormap.
segmented_data : dict
Dictionary of 'red', 'green', 'blue', and (optionally) 'alpha' values.
Each color key contains a list of `x`, `y` tuples. `x` must increase
monotonically from 0 to 1 and corresponds to input values for a
mappable object (e.g. an image). `y` corresponds to the color
intensity. | 625990614e4d562566373ac8 |
class Moon(Sol): <NEW_LINE> <INDENT> bodytype = 'moon' <NEW_LINE> def __init__(self, name='Moon'): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> assert (type(name) == str), 'Name must be a string.' <NEW_LINE> self.name = name <NEW_LINE> if name in moon_data: <NEW_LINE> <INDENT> for key in moon_data[name]: <NEW_LINE> <INDENT> setattr(self, key, moon_data[name][key]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for key in moon_data['Moon']: <NEW_LINE> <INDENT> setattr(self, key, moon_data['Moon'][key]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.parent = Planet(self.system) | Create a new Moon object. Moons are natural satellites of planets or minor bodies.
Args:
name dtype: str | Must match one of: 'Moon'. Defaults to 'Moon' if not specified.
Initial physical attributes are generated from values stored in spacepy.data.bodydata.moon_data. For Earth's Moon, the following parameters are defined:
id dtype: int | NAIF integer code for this moon.
parent dtype: str | Body around which this moon orbits.
gm dtype: float | Gravitational parameter. Unit: km**3 / s**2
r dtype: float | Equatorial radius. Unit: km
rmean dtype: float | Mean radius. Unit: km
rho dtype: float | Bulk density. Unit: kg / m**3
sday dtype: float | Sidereal rotation period. Unit: s | 62599061d6c5a102081e37e6 |
class DoubanBackend(OAuthBackend): <NEW_LINE> <INDENT> name = 'douban' <NEW_LINE> EXTRA_DATA = [('id', 'id')] <NEW_LINE> def get_user_id(self, details, response): <NEW_LINE> <INDENT> return response['db:uid']['$t'] <NEW_LINE> <DEDENT> def get_user_details(self, response): <NEW_LINE> <INDENT> return {'username': response["db:uid"]["$t"], 'email': ''} | Douban OAuth authentication backend | 62599061adb09d7d5dc0bc2c |
class ChangePassword(ChangePasswordBase): <NEW_LINE> <INDENT> class SimpleIO(ChangePasswordBase.SimpleIO): <NEW_LINE> <INDENT> request_elem = 'zato_outgoing_sql_change_password_request' <NEW_LINE> response_elem = 'zato_outgoing_sql_change_password_response' <NEW_LINE> <DEDENT> def handle(self): <NEW_LINE> <INDENT> def _auth(instance, password): <NEW_LINE> <INDENT> instance.password = password <NEW_LINE> <DEDENT> self._handle(SQLConnectionPool, _auth, OUTGOING.SQL_CHANGE_PASSWORD.value) | Changes the password of an outgoing SQL connection.
| 625990616e29344779b01d11 |
class File(Validator): <NEW_LINE> <INDENT> def __init__(self, mode='r', buffering=-1): <NEW_LINE> <INDENT> self.mode = mode <NEW_LINE> self.buffering = buffering <NEW_LINE> <DEDENT> def __call__(self, value): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> path = str(value) <NEW_LINE> if not os.path.isabs(path): <NEW_LINE> <INDENT> path = os.path.join(File._var_run_splunk, path) <NEW_LINE> <DEDENT> value = open(path, self.mode, self.buffering) <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> raise ValueError( 'Cannot open %s with mode=%s and buffering=%s: %s' % (value, self.mode, self.buffering, e)) <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def format(self, value): <NEW_LINE> <INDENT> return value.name <NEW_LINE> <DEDENT> _var_run_splunk = os.path.join( os.environ['SPLUNK_HOME'], "var", "run", "splunk") | Validates file option values.
| 6259906163b5f9789fe86835 |
class Dia(Package): <NEW_LINE> <INDENT> homepage = 'https://wiki.gnome.org/Apps/Dia' <NEW_LINE> url = 'https://ftp.gnome.org/pub/gnome/sources/dia/0.97/dia-0.97.3.tar.xz' <NEW_LINE> version('0.97.3', '0e744a0f6a6c4cb6a089e4d955392c3c') <NEW_LINE> depends_on('intltool', type='build') <NEW_LINE> depends_on('gettext', type='build') <NEW_LINE> depends_on('pkg-config', type='build') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> depends_on('libxslt') <NEW_LINE> depends_on('python') <NEW_LINE> depends_on('swig') <NEW_LINE> depends_on('libsm') <NEW_LINE> depends_on('libuuid') <NEW_LINE> depends_on('libxinerama') <NEW_LINE> depends_on('libxrender') <NEW_LINE> depends_on('libxml2') <NEW_LINE> depends_on('freetype') <NEW_LINE> def url_for_version(self, version): <NEW_LINE> <INDENT> return 'https://ftp.gnome.org/pub/gnome/sources/dia/%s/dia-%s.tar.xz' % (version.up_to(2), version) <NEW_LINE> <DEDENT> def install(self, spec, prefix): <NEW_LINE> <INDENT> options = ['--prefix=%s' % prefix, '--with-cairo', '--with-xslt-prefix=%s' % spec['libxslt'].prefix, '--with-python', '--with-swig'] <NEW_LINE> configure(*options) <NEW_LINE> make() <NEW_LINE> make('install') | Dia is a program for drawing structured diagrams. | 62599061009cb60464d02bf9 |
class ExperimentalPlugins(BasePluginManager): <NEW_LINE> <INDENT> def get_query_set(self): <NEW_LINE> <INDENT> return super(ExperimentalPlugins, self).get_query_set().filter(pluginversion__approved=True, pluginversion__experimental=True).distinct() | Shows only public plugins: i.e. those with "approved" flag set
and with one "experimental" version | 6259906121bff66bcd724327 |
class InputError(Exception): <NEW_LINE> <INDENT> pass | 入力に関するエラーの例外クラス | 625990614e4d562566373ac9 |
class Provider(object): <NEW_LINE> <INDENT> DUMMY = 'dummy' <NEW_LINE> S3 = 's3' <NEW_LINE> S3_US_WEST = 's3_us_west' <NEW_LINE> S3_EU_WEST = 's3_eu_west' <NEW_LINE> S3_AP_SOUTHEAST = 's3_ap_southeast' <NEW_LINE> S3_AP_NORTHEAST = 's3_ap_northeast' <NEW_LINE> NINEFOLD = 'ninefold' <NEW_LINE> GOOGLE_STORAGE = 'google_storage' <NEW_LINE> S3_US_WEST_OREGON = 's3_us_west_oregon' <NEW_LINE> CLOUDFILES_SWIFT = 'cloudfiles_swift' <NEW_LINE> NIMBUS = 'nimbus' <NEW_LINE> LOCAL = 'local' <NEW_LINE> CLOUDFILES = 'cloudfiles' <NEW_LINE> AZURE_BLOBS = 'azure_blobs' <NEW_LINE> CLOUDFILES_US = 'cloudfiles_us' <NEW_LINE> CLOUDFILES_UK = 'cloudfiles_uk' | Defines for each of the supported providers
:cvar DUMMY: Example provider
:cvar CLOUDFILES_US: CloudFiles US
:cvar CLOUDFILES_UK: CloudFiles UK
:cvar S3: Amazon S3 US
:cvar S3_US_WEST: Amazon S3 US West (Northern California)
:cvar S3_EU_WEST: Amazon S3 EU West (Ireland)
:cvar S3_AP_SOUTHEAST_HOST: Amazon S3 Asia South East (Singapore)
:cvar S3_AP_NORTHEAST_HOST: Amazon S3 Asia South East (Tokyo)
:cvar NINEFOLD: Ninefold
:cvar GOOGLE_STORAGE Google Storage
:cvar S3_US_WEST_OREGON: Amazon S3 US West 2 (Oregon)
:cvar NIMBUS: Nimbus.io driver
:cvar LOCAL: Local storage driver | 62599061f7d966606f74941a |
class Critic: <NEW_LINE> <INDENT> def __init__(self, state_size, action_size): <NEW_LINE> <INDENT> self.state_size = state_size <NEW_LINE> self.action_size = action_size <NEW_LINE> self.build_model() <NEW_LINE> <DEDENT> def build_model(self): <NEW_LINE> <INDENT> states = layers.Input(shape=(self.state_size,), name='states') <NEW_LINE> actions = layers.Input(shape=(self.action_size,), name='actions') <NEW_LINE> net_states = layers.Dense(units=400, kernel_regularizer=layers.regularizers.l2(1e-6))(states) <NEW_LINE> net_states = layers.BatchNormalization()(net_states) <NEW_LINE> net_states = layers.Activation('relu')(net_states) <NEW_LINE> net_states = layers.Dense(units=300, kernel_regularizer=layers.regularizers.l2(1e-6))(net_states) <NEW_LINE> net_states = layers.BatchNormalization()(net_states) <NEW_LINE> net_states = layers.Activation('relu')(net_states) <NEW_LINE> net_actions = layers.Dense(units=300, kernel_regularizer=layers.regularizers.l2(1e-6))(actions) <NEW_LINE> net_actions = layers.BatchNormalization()(net_actions) <NEW_LINE> net_actions = layers.Activation('relu')(net_actions) <NEW_LINE> net = layers.Add()([net_states, net_actions]) <NEW_LINE> net = layers.Activation('relu')(net) <NEW_LINE> Q_values = layers.Dense(units=1, name='q_values', kernel_initializer=layers.initializers.RandomUniform(minval=-0.003, maxval=0.003))(net) <NEW_LINE> self.model = models.Model(inputs=[states, actions], outputs=Q_values) <NEW_LINE> optimizer = optimizers.Adam(lr=0.001) <NEW_LINE> self.model.compile(optimizer=optimizer, loss='mse') <NEW_LINE> action_gradients = K.gradients(Q_values, actions) <NEW_LINE> self.get_action_gradients = K.function( inputs=[*self.model.input, K.learning_phase()], outputs=action_gradients) | Critic (Value) Model. | 625990613cc13d1c6d466e04 |
class Car(): <NEW_LINE> <INDENT> def __init__(self, make, model, year): <NEW_LINE> <INDENT> self.make = make <NEW_LINE> self.model = model <NEW_LINE> self.year = year <NEW_LINE> self.odometer_reading = 0 <NEW_LINE> <DEDENT> def get_descriptive_name(self): <NEW_LINE> <INDENT> long_name = str(self.year) + ' ' + self.make + ' ' + self.model <NEW_LINE> return long_name.title() <NEW_LINE> <DEDENT> def read_odometer(self): <NEW_LINE> <INDENT> print("This car has "+str(self.odometer_reading)+" miles on it") <NEW_LINE> <DEDENT> def update_odometer(self,mileage): <NEW_LINE> <INDENT> self.odometer_reading = mileage <NEW_LINE> <DEDENT> def increment_odometer(self,miles): <NEW_LINE> <INDENT> self.odometer_reading+=miles | A simple attempt to represent a car. | 625990613539df3088ecd95f |
class GeneSequenceGenerator(object): <NEW_LINE> <INDENT> def __init__(self, max_size=None): <NEW_LINE> <INDENT> self.max_size = max_size if max_size is not None else float('inf') <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> from classes import Gene <NEW_LINE> i = 0 <NEW_LINE> while i < self.max_size: <NEW_LINE> <INDENT> gene = '' <NEW_LINE> for _ in range(GENE_LENGTH): <NEW_LINE> <INDENT> gene += random.choice('acgt') <NEW_LINE> <DEDENT> yield Gene(gene) <NEW_LINE> i += 1 | Generate a sequence of genes
| 625990613eb6a72ae038bd22 |
class SwingViableDay: <NEW_LINE> <INDENT> day_date: date <NEW_LINE> prev_day_volume: int <NEW_LINE> avg_volume_50: int <NEW_LINE> range_75: float <NEW_LINE> prev_day_high: float <NEW_LINE> def __init__(self, day_date: date, prev_day_volume: int, avg_volume_50: int, range_75: float, prev_day_high: float): <NEW_LINE> <INDENT> self.day_date = day_date <NEW_LINE> self.prev_day_volume = prev_day_volume <NEW_LINE> self.avg_volume_50 = avg_volume_50 <NEW_LINE> self.range_75 = range_75 <NEW_LINE> self.prev_day_high = prev_day_high <NEW_LINE> <DEDENT> def to_json(self) -> Dict[str, any]: <NEW_LINE> <INDENT> return { 'day_date': self.day_date.strftime(DATE_FORMAT), 'prev_day_volume': self.prev_day_volume, 'avg_volume_50': self.avg_volume_50, 'range_75': self.range_75, 'prev_day_high': self.prev_day_high } | Contains json-serializable info on a single day that was viable for SwingStrategy. | 62599061498bea3a75a59160 |
class CancelAggregationTask(IcontrolRestCommand): <NEW_LINE> <INDENT> def __init__(self, name=None, itemid=None, timeout=60, *args, **kwargs): <NEW_LINE> <INDENT> super(CancelAggregationTask, self).__init__(*args, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.itemid = itemid <NEW_LINE> self.timeout = timeout <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> LOG.info("Canceling Aggregation Task '{0}'...".format(self.name or self.itemid)) <NEW_LINE> payload = None <NEW_LINE> if self.itemid: <NEW_LINE> <INDENT> payload = self.api.get(EventAggregationTasks.ITEM_URI % self.itemid) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for item in self.api.get(EventAggregationTasks.URI)['items']: <NEW_LINE> <INDENT> if item.name: <NEW_LINE> <INDENT> if item.name == self.name: <NEW_LINE> <INDENT> payload = item <NEW_LINE> self.itemid = item.id <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if payload.status != "CANCELED": <NEW_LINE> <INDENT> payload['status'] = "CANCEL_REQUESTED" <NEW_LINE> self.api.patch(EventAggregationTasks.ITEM_URI % payload.id, payload) <NEW_LINE> <DEDENT> self.resp = None <NEW_LINE> def is_status_canceled(): <NEW_LINE> <INDENT> self.resp = self.api.get(EventAggregationTasks.ITEM_URI % self.itemid) <NEW_LINE> if self.resp.status == "CANCELED": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> wait(is_status_canceled, progress_cb=lambda x: "Task Status: {0}".format(self.resp.status), timeout=self.timeout, interval=2, timeout_message="Task is not Canceled after {0}s") <NEW_LINE> return self.resp | Cancels an Aggregation Task via the icontrol rest api using task id or name
Type: PATCH
@param name: name
@type name: string
@param itemid: the item id
@type itemid: string
@return: the api resp
@rtype: attr dict json | 6259906145492302aabfdb9d |
class Hand(object): <NEW_LINE> <INDENT> def __init__(self, cards): <NEW_LINE> <INDENT> self.cards = cards <NEW_LINE> self.rank = None <NEW_LINE> <DEDENT> def by_suit(self): <NEW_LINE> <INDENT> self.cards.sort(key=lambda card: card.suit) <NEW_LINE> <DEDENT> def by_rank(self): <NEW_LINE> <INDENT> self.cards.sort(key=lambda card: card.rank) <NEW_LINE> <DEDENT> def beats(self, hand): <NEW_LINE> <INDENT> return self.rank > hand.rank <NEW_LINE> <DEDENT> def find_type(self): <NEW_LINE> <INDENT> if self.is_royal(): <NEW_LINE> <INDENT> return "RF" <NEW_LINE> <DEDENT> elif self.is_a_straight(): <NEW_LINE> <INDENT> return "SR" <NEW_LINE> <DEDENT> elif self.is_a_flush(): <NEW_LINE> <INDENT> return "FL" <NEW_LINE> <DEDENT> elif self.straight_flush(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def is_royal(self): <NEW_LINE> <INDENT> return reduce(ans, (c.face() for c in self.cards)) <NEW_LINE> <DEDENT> def is_a_straight(self): <NEW_LINE> <INDENT> lin = sorted(R.suit for R in self.cards) <NEW_LINE> return lin == range(min(lin), max(lin) + 1) <NEW_LINE> <DEDENT> def is_a_flush(self): <NEW_LINE> <INDENT> return reduce(eq,(cd.suit for cd in self.cards)) <NEW_LINE> <DEDENT> def straight_flush(self): <NEW_LINE> <INDENT> return self.is_a_straight() and self.is_a_flush() <NEW_LINE> <DEDENT> def royal_flush(self): <NEW_LINE> <INDENT> return self.is_a_straight() and self.is_a_flush() and self.is_royal() | a poker hand | 625990617d847024c075da98 |
class AccountPage(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> logger.info("Initializing Account Page's Element") <NEW_LINE> self.home_path = BuiltIn().get_variable_value("${globalTestBed}")["AutomationServer"]["HOME_PATH"] <NEW_LINE> self.driver = BuiltIn().get_variable_value("${cur_session}")["session"] <NEW_LINE> self.dashNav = self.driver.find_element_by_css_selector("#dashnav >ul") <NEW_LINE> <DEDENT> def clickAccountNavigation(self, AccountNavItem): <NEW_LINE> <INDENT> WebDriverWait(self.driver,20).until(EC.visibility_of(self.dashNav)) <NEW_LINE> self.anchorList = self.dashNav.find_elements_by_tag_name("a") <NEW_LINE> for anchor in self.anchorList: <NEW_LINE> <INDENT> if(AccountNavItem.lower() in anchor.get_attribute("href")): <NEW_LINE> <INDENT> logger.info("Clicked on %s" % (str(AccountNavItem))) <NEW_LINE> anchor.click() <NEW_LINE> logger.info("Opening Page: " + self.home_path+"/PageObjects/"+str(AccountNavItem)+"Page.py") <NEW_LINE> BuiltIn().import_library(self.home_path+"/PageObjects/"+str(AccountNavItem)+"Page.py") <NEW_LINE> break | classdocs
Created on Oct 23, 2016
@author: tarun
Provides element of the Account page and the associated methods. | 625990614a966d76dd5f05b7 |
class ToFieldNameTest(TestCase): <NEW_LINE> <INDENT> def test_filter(self): <NEW_LINE> <INDENT> self.assertEqual( di_tags.to_field_name('TextField'), 'Text' ) <NEW_LINE> self.assertEqual( di_tags.to_field_name('NumericField'), 'Numeric' ) <NEW_LINE> self.assertEqual( di_tags.to_field_name('DateTimeField'), 'Date and Time' ) <NEW_LINE> self.assertEqual( di_tags.to_field_name('DateField'), 'Date' ) <NEW_LINE> self.assertEqual( di_tags.to_field_name('TimeField'), 'Time' ) <NEW_LINE> self.assertEqual( di_tags.to_field_name('LookupField'), 'Select box' ) <NEW_LINE> self.assertEqual( di_tags.to_field_name('MultipleLookupField'), 'Multiple select' ) <NEW_LINE> self.assertEqual( di_tags.to_field_name('NonExistingField'), 'NonExistingField' ) | Test to_field_name filter. | 62599061009cb60464d02bfa |
class FamilyName(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=120, help_text="The family name") <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.name | Reperesents a given family name (surname) | 625990618e71fb1e983bd18e |
class FoolAgent: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def act(self, state): <NEW_LINE> <INDENT> return np.random.choice(state.allowed_actions) | This agent selects the next action at random.
| 6259906167a9b606de547603 |
class CheckoutSerializer(serializers.Serializer): <NEW_LINE> <INDENT> customer_tag = SerializeFormAsTextField('CustomerForm') <NEW_LINE> shipping_address_tag = SerializeFormAsTextField('ShippingAddressForm') <NEW_LINE> billing_address_tag = SerializeFormAsTextField('BillingAddressForm') <NEW_LINE> shipping_method_tag = SerializeFormAsTextField('ShippingMethodForm') <NEW_LINE> payment_method_tag = SerializeFormAsTextField('PaymentMethodForm') <NEW_LINE> extra_annotation_tag = SerializeFormAsTextField('ExtraAnnotationForm') <NEW_LINE> def get_customer_tag(self, form_class, cart): <NEW_LINE> <INDENT> form = form_class(instance=cart.customer) <NEW_LINE> return form.as_text() <NEW_LINE> <DEDENT> def get_shipping_address_tag(self, form_class, cart): <NEW_LINE> <INDENT> form = form_class(instance=cart.shipping_address, cart=cart) <NEW_LINE> return form.as_text() <NEW_LINE> <DEDENT> def get_billing_address_tag(self, form_class, cart): <NEW_LINE> <INDENT> form = form_class(instance=cart.billing_address, cart=cart) <NEW_LINE> return form.as_text() <NEW_LINE> <DEDENT> def get_shipping_method_tag(self, form_class, cart): <NEW_LINE> <INDENT> form = form_class(initial=cart.extra, cart=cart) <NEW_LINE> return form.as_text() <NEW_LINE> <DEDENT> def get_payment_method_tag(self, form_class, cart): <NEW_LINE> <INDENT> form = form_class(initial=cart.extra, cart=cart) <NEW_LINE> return form.as_text() <NEW_LINE> <DEDENT> def get_extra_annotation_tag(self, form_class, cart): <NEW_LINE> <INDENT> form = form_class(initial=cart.extra, cart=cart) <NEW_LINE> return form.as_text() | Serializer to digest a summary of data required for the checkout. | 62599061d486a94d0ba2d68b |
class DisplayPlugin(capture_gui.plugin.Plugin): <NEW_LINE> <INDENT> id = "Display Options" <NEW_LINE> label = "Display Options" <NEW_LINE> section = "config" <NEW_LINE> order = 70 <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(DisplayPlugin, self).__init__(parent=parent) <NEW_LINE> self._colors = dict() <NEW_LINE> self._layout = QtWidgets.QVBoxLayout() <NEW_LINE> self._layout.setContentsMargins(0, 0, 0, 0) <NEW_LINE> self.setLayout(self._layout) <NEW_LINE> self.override = QtWidgets.QCheckBox("Override Display Options") <NEW_LINE> self.display_type = QtWidgets.QComboBox() <NEW_LINE> self.display_type.addItems(["Solid", "Gradient"]) <NEW_LINE> self._color_layout = QtWidgets.QHBoxLayout() <NEW_LINE> for label, default in COLORS.items(): <NEW_LINE> <INDENT> self.add_color_picker(self._color_layout, label, default) <NEW_LINE> <DEDENT> self._layout.addWidget(self.override) <NEW_LINE> self._layout.addWidget(self.display_type) <NEW_LINE> self._layout.addLayout(self._color_layout) <NEW_LINE> self.on_toggle_override() <NEW_LINE> self.connections() <NEW_LINE> <DEDENT> def connections(self): <NEW_LINE> <INDENT> self.override.toggled.connect(self.on_toggle_override) <NEW_LINE> self.override.toggled.connect(self.options_changed) <NEW_LINE> self.display_type.currentIndexChanged.connect(self.options_changed) <NEW_LINE> <DEDENT> def add_color_picker(self, layout, label, default): <NEW_LINE> <INDENT> column = QtWidgets.QVBoxLayout() <NEW_LINE> label_widget = QtWidgets.QLabel(LABELS[label]) <NEW_LINE> color_picker = colorpicker.ColorPicker() <NEW_LINE> color_picker.color = default <NEW_LINE> column.addWidget(label_widget) <NEW_LINE> column.addWidget(color_picker) <NEW_LINE> column.setAlignment(label_widget, QtCore.Qt.AlignCenter) <NEW_LINE> layout.addLayout(column) <NEW_LINE> color_picker.valueChanged.connect(self.options_changed) <NEW_LINE> self._colors[label] = color_picker <NEW_LINE> return color_picker <NEW_LINE> <DEDENT> def on_toggle_override(self): <NEW_LINE> <INDENT> state = self.override.isChecked() <NEW_LINE> self.display_type.setEnabled(state) <NEW_LINE> for widget in self._colors.values(): <NEW_LINE> <INDENT> widget.setEnabled(state) <NEW_LINE> <DEDENT> <DEDENT> def display_gradient(self): <NEW_LINE> <INDENT> return self.display_type.currentText() == "Gradient" <NEW_LINE> <DEDENT> def apply_inputs(self, settings): <NEW_LINE> <INDENT> for label, widget in self._colors.items(): <NEW_LINE> <INDENT> default = COLORS.get(label, [0, 0, 0]) <NEW_LINE> value = settings.get(label, default) <NEW_LINE> widget.color = value <NEW_LINE> <DEDENT> override = settings.get("override_display", False) <NEW_LINE> self.override.setChecked(override) <NEW_LINE> <DEDENT> def get_inputs(self, as_preset): <NEW_LINE> <INDENT> inputs = {"override_display": self.override.isChecked()} <NEW_LINE> for label, widget in self._colors.items(): <NEW_LINE> <INDENT> inputs[label] = widget.color <NEW_LINE> <DEDENT> return inputs <NEW_LINE> <DEDENT> def get_outputs(self): <NEW_LINE> <INDENT> outputs = {} <NEW_LINE> if self.override.isChecked(): <NEW_LINE> <INDENT> outputs["displayGradient"] = self.display_gradient() <NEW_LINE> for label, widget in self._colors.items(): <NEW_LINE> <INDENT> outputs[label] = widget.color <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> outputs["displayGradient"] = cmds.displayPref(query=True, displayGradient=True) <NEW_LINE> for key in COLORS.keys(): <NEW_LINE> <INDENT> color = cmds.displayRGBColor(key, query=True) <NEW_LINE> outputs[key] = color <NEW_LINE> <DEDENT> <DEDENT> return {"display_options": outputs} | Plugin to apply viewport visibilities and settings | 625990618a43f66fc4bf3852 |
class Box(box.Box): <NEW_LINE> <INDENT> def update(self, item=None, **kwargs): <NEW_LINE> <INDENT> source = Box(item) <NEW_LINE> if kwargs: <NEW_LINE> <INDENT> source.update(kwargs) <NEW_LINE> <DEDENT> for key, value in source.items(): <NEW_LINE> <INDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> node = self.setdefault(key, Box()) <NEW_LINE> node.update(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[key] = value | - Merges on update instead of overriding.
- Supports loading from TOML | 6259906199cbb53fe68325a5 |
class Config(object): <NEW_LINE> <INDENT> SECRET_KEY = '9hQaY2nGqS9YQbs_b033vA' <NEW_LINE> WTF_CSRF_SECRET_KEY = 'lCgqy2NPRYY5NYkk25bhuQ' <NEW_LINE> CWD = dirname(abspath(__file__)) <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'sqlite:///' + join(CWD, 'spaceless.db') <NEW_LINE> UPLOAD_FOLDER = join(CWD, 'static/profile_pictures') <NEW_LINE> POST_UPLOAD = join(CWD, 'static/post_pictures') <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False | Set Flask base configuration | 625990618e7ae83300eea750 |
class RunParameters(typing.NamedTuple): <NEW_LINE> <INDENT> freq_max: float <NEW_LINE> freq_min: float = 0.05 <NEW_LINE> Rayleigh_or_Love: str = 'Rayleigh' <NEW_LINE> phase_or_group_velocity: str = 'ph' <NEW_LINE> l_min: int = 0 <NEW_LINE> l_max: int = 7000 <NEW_LINE> l_increment_standard: int = 2 <NEW_LINE> l_increment_failed: int = 2 <NEW_LINE> max_run_N: int = 500 <NEW_LINE> qmod_path: str = './data/earth_models/qmod_highQ' <NEW_LINE> bin_path: str = '../MINEOS/bin' | Parameters needed to run MINEOS.
Fields:
Rayleigh_or_Love:
- str
- 'Rayleigh' or 'Love' for Rayleigh or Love
- Default value = 'Rayleigh'
phase_or_group_velocity:
- str
- 'ph' or 'gr' for phase or group velocity
- Default value = 'ph'
l_min:
- int
- Minimum angular order for calculations
- Default value = 0
l_max:
- int
- Expected max angular order for calculations
- Default value = 3500.
freq_min:
- float
- Units: mHz
- Minimum frequency for calculations.
- Default value = 0.05 mHz (i.e. 20,000 s)
freq_max:
- float
- Units: mHz
- Maximum frequency - should be set to 1000/min(sw_periods) + 1
need to compute a little bit beyond the ideal minimum period
l_increment_standard:
- int
- When MINEOS breaks and has to be restarted with a higher lmin,
it is normally restarted at l_min = the last successfully
calculated l (l_last) + l_increment_standard.
- Default value = 2
l_increment_failed:
- int
- When MINEOS breaks and has to be restarted with a higher lmin,
if the last attempt produced no successful calculations, l_min
is instead l_last + l_increment_failed.
- Default value = 5 how much to incrememnt lmin by if broken*
max_run_N:
- int
- When MINEOS breaks and has to be restarted with a higher lmin,
if it tries to restart more than max_run_N times, it will
return an error instead.
- Default value = 5e2
qmod_path:
- str
- Path to the standard qmod file for attenuation corrections.
- Default value = './data/earth_models/qmod_highQ'
- Default value has such high Q throughout the Earth that it is
equivalent to not doing a Q correction
bin_path:
- str
- Path to the FORTRAN executables for MINEOS
- Default value = '../MINEOS/bin' | 62599061627d3e7fe0e0854e |
class ReferendumStaticViewSitemap(Sitemap): <NEW_LINE> <INDENT> priority = 0.5 <NEW_LINE> changefreq = 'never' <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return ['referendum_create', 'legal'] <NEW_LINE> <DEDENT> def location(self, obj): <NEW_LINE> <INDENT> return reverse(obj) | Never updated pages sitemap | 62599061ac7a0e7691f73ba7 |
class ProjectMiddleware(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> if request.session.get('CURRENT_PROJECT'): <NEW_LINE> <INDENT> request.project = Project.objects.get(pk=request.session['CURRENT_PROJECT']) <NEW_LINE> return <NEW_LINE> <DEDENT> if request.path.startswith('/designer/'): <NEW_LINE> <INDENT> return redirect('manage-projects') | Stores current project in request object
makes redirect if project not set | 62599061a17c0f6771d5d706 |
class P2TRConst: <NEW_LINE> <INDENT> FIELD_SIZE: int = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F <NEW_LINE> TAP_TWEAK_SHA256: bytes = BytesUtils.FromHexString( "e80fe1639c9ca050e3af1b39c143c63e429cbceb15d940fbb5c5a1f4af57c5e9" ) <NEW_LINE> WITNESS_VER: int = 1 | Class container for P2TR constants. | 62599061e64d504609df9f30 |
class File_toolbar(QToolBar, object): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.setObjectName("File Toolbar") <NEW_LINE> self.setMovable(False) <NEW_LINE> self.setFloatable(False) <NEW_LINE> self.new_button = self.addAction(self.style().standardIcon( QStyle.SP_FileIcon), 'New model') <NEW_LINE> self.open_button = self.addAction(self.style().standardIcon( QStyle.SP_DialogOpenButton), 'Open model') <NEW_LINE> self.save_button = self.addAction(self.style().standardIcon( QStyle.SP_DialogSaveButton), 'Save model') <NEW_LINE> self.check_button = self.addAction(self.style().standardIcon( QStyle.SP_DialogApplyButton), 'Check model') <NEW_LINE> self.addSeparator() <NEW_LINE> self.up_button = self.addAction(self.style().standardIcon( QStyle.SP_ArrowUp), 'Go one level above') <NEW_LINE> self.up_button.setEnabled(False) | Toolbar with file open, save, etc | 62599061460517430c432bb5 |
class SteelFusionLUNIOReport(BaseStatsReport): <NEW_LINE> <INDENT> resource = 'granite_lun_io' <NEW_LINE> link = 'report' <NEW_LINE> data_key = 'response_data' <NEW_LINE> required_fields = ['device', 'start_time', 'end_time'] <NEW_LINE> non_required_fields = ['traffic_type', 'lun_subclass_id'] | Report class to return the SteelFusion lun io timeseries | 62599061a8370b77170f1a92 |
class InternationalMelonOrder(AbstractMelonOrder): <NEW_LINE> <INDENT> def __init__(self, species, qty, country_code): <NEW_LINE> <INDENT> super(InternationalMelonOrder, self).__init__(species, qty, country_code, "international", 0.17) | An international (non-US) melon order. | 62599061009cb60464d02bfb |
class FeedStats: <NEW_LINE> <INDENT> cfg = None <NEW_LINE> def __init__(self,cfg): <NEW_LINE> <INDENT> self.cfg = cfg <NEW_LINE> return <NEW_LINE> <DEDENT> def incr(self,statname): <NEW_LINE> <INDENT> if self.cfg.has_section("stats") == False: <NEW_LINE> <INDENT> self.cfg.add_section("stats") <NEW_LINE> <DEDENT> statval = 0 <NEW_LINE> if self.cfg.has_option("stats",statname) == True: <NEW_LINE> <INDENT> statval = self.cfg.get("stats",statname) <NEW_LINE> <DEDENT> self.cfg.set("stats",statname,int(statval) + 1) <NEW_LINE> <DEDENT> def set(self,statname,statval): <NEW_LINE> <INDENT> if self.cfg.has_section("stats") == False: <NEW_LINE> <INDENT> self.cfg.add_section("stats") <NEW_LINE> <DEDENT> self.cfg.set("stats",statname,int(statval)) <NEW_LINE> self.cfg.redis_set(("stats_%s" % statname),int(statval)) | Handles counters etc for drawing nice graphs | 62599061e76e3b2f99fda0c4 |
class Logger(logging.Logger): <NEW_LINE> <INDENT> def __init__(self, name, level): <NEW_LINE> <INDENT> logging.Logger.__init__(self, name, level) <NEW_LINE> self.nomFichierLogs = dateExecution + ".log" <NEW_LINE> logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s") <NEW_LINE> fileHandler = logging.FileHandler("{0}/{1}.log".format(pathDstConfig, self.nomFichierLogs)).setFormatter(logFormatter) <NEW_LINE> self.addHandler(fileHandler) <NEW_LINE> logFormatter = logging.Formatter("[%levelname) -5.5s] %(message)s") <NEW_LINE> consoleHandler = logging.StreamHandler().setFormatter(logFormatter) <NEW_LINE> self.addHandler(consoleHandler) | classdocs | 62599061dd821e528d6da4e3 |
@total_ordering <NEW_LINE> class Package(object): <NEW_LINE> <INDENT> def __init__(self, name, version, filename, last_modified=None, **kwargs): <NEW_LINE> <INDENT> self.name = normalize_name(name) <NEW_LINE> self.version = version <NEW_LINE> self.filename = filename <NEW_LINE> if last_modified is not None: <NEW_LINE> <INDENT> self.last_modified = last_modified <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.last_modified = datetime.utcnow() <NEW_LINE> <DEDENT> self.data = kwargs <NEW_LINE> <DEDENT> def get_url(self, request): <NEW_LINE> <INDENT> return request.db.get_url(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_prerelease(self): <NEW_LINE> <INDENT> return re.match(r'^\d+(\.\d+)*$', self.version) is None <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.name) + hash(self.version) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.name == other.name and self.version == other.version <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return ((self.name, pkg_resources.parse_version(self.version)) < (other.name, pkg_resources.parse_version(other.version))) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return unicode(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return unicode(self).encode('utf-8') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'Package(%s)' % (self.filename) <NEW_LINE> <DEDENT> def __json__(self, request): <NEW_LINE> <INDENT> return { 'name': self.name, 'filename': self.filename, 'last_modified': self.last_modified, 'version': self.version, 'url': self.get_url(request), } | Representation of a versioned package
Parameters
----------
name : str
The name of the package (will be normalized)
version : str
The version number of the package
filename : str
The name of the package file
last_modified : datetime, optional
The datetime when this package was uploaded (default now)
**kwargs : dict
Metadata about the package | 62599061f548e778e596cc4d |
class BaseShippingBackend(BaseBackend): <NEW_LINE> <INDENT> def __init__(self, shop=ShippingBackendAPI()): <NEW_LINE> <INDENT> self.shop = shop <NEW_LINE> super(BaseShippingBackend, self).__init__() <NEW_LINE> <DEDENT> def finished(self): <NEW_LINE> <INDENT> return HttpResponseRedirect('checkout_shipping') | This is the base class for all shipping backends to implement.
Class members:
url_namespace
backend_name
shop | 6259906155399d3f05627be4 |
class NumpyGenericOperator(Operator): <NEW_LINE> <INDENT> def __init__(self, mapping, adjoint_mapping=None, dim_source=1, dim_range=1, linear=False, parameters={}, source_id=None, range_id=None, solver_options=None, name=None): <NEW_LINE> <INDENT> self.__auto_init(locals()) <NEW_LINE> self.source = NumpyVectorSpace(dim_source, source_id) <NEW_LINE> self.range = NumpyVectorSpace(dim_range, range_id) <NEW_LINE> self.parameters_own = parameters <NEW_LINE> <DEDENT> def apply(self, U, mu=None): <NEW_LINE> <INDENT> assert U in self.source <NEW_LINE> assert self.parameters.assert_compatible(mu) <NEW_LINE> if self.parametric: <NEW_LINE> <INDENT> return self.range.make_array(self.mapping(U.to_numpy(), mu=mu)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.range.make_array(self.mapping(U.to_numpy())) <NEW_LINE> <DEDENT> <DEDENT> def apply_adjoint(self, V, mu=None): <NEW_LINE> <INDENT> if self.adjoint_mapping is None: <NEW_LINE> <INDENT> raise ValueError('NumpyGenericOperator: adjoint mapping was not defined.') <NEW_LINE> <DEDENT> assert V in self.range <NEW_LINE> assert self.parameters.assert_compatible(mu) <NEW_LINE> V = V.to_numpy() <NEW_LINE> if self.parametric: <NEW_LINE> <INDENT> return self.source.make_array(self.adjoint_mapping(V, mu=mu)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.source.make_array(self.adjoint_mapping(V)) | Wraps an arbitrary Python function between |NumPy arrays| as an |Operator|.
Parameters
----------
mapping
The function to wrap. If `parameters` is `None`, the function is of
the form `mapping(U)` and is expected to be vectorized. In particular::
mapping(U).shape == U.shape[:-1] + (dim_range,).
If `parameters` is not `None`, the function has to have the signature
`mapping(U, mu)`.
adjoint_mapping
The adjoint function to wrap. If `parameters` is `None`, the function is of
the form `adjoint_mapping(U)` and is expected to be vectorized. In particular::
adjoint_mapping(U).shape == U.shape[:-1] + (dim_source,).
If `parameters` is not `None`, the function has to have the signature
`adjoint_mapping(U, mu)`.
dim_source
Dimension of the operator's source.
dim_range
Dimension of the operator's range.
linear
Set to `True` if the provided `mapping` and `adjoint_mapping` are linear.
parameters
The |Parameters| the depends on.
solver_options
The |solver_options| for the operator.
name
Name of the operator. | 62599061d7e4931a7ef3d6e7 |
class ProtocolLayer4Registry(Registry): <NEW_LINE> <INDENT> entry_point = 'dhcpkit_vpp.protocols.layer4' | Registry for Protocols | 625990617cff6e4e811b710a |
class ExpressRouteServiceProvider(Resource): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'peering_locations': {'key': 'properties.peeringLocations', 'type': '[str]'}, 'bandwidths_offered': {'key': 'properties.bandwidthsOffered', 'type': '[ExpressRouteServiceProviderBandwidthsOffered]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ExpressRouteServiceProvider, self).__init__(**kwargs) <NEW_LINE> self.peering_locations = kwargs.get('peering_locations', None) <NEW_LINE> self.bandwidths_offered = kwargs.get('bandwidths_offered', None) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) | A ExpressRouteResourceProvider object.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param peering_locations: Get a list of peering locations.
:type peering_locations: list[str]
:param bandwidths_offered: Gets bandwidths offered.
:type bandwidths_offered:
list[~azure.mgmt.network.v2019_06_01.models.ExpressRouteServiceProviderBandwidthsOffered]
:param provisioning_state: Gets the provisioning state of the resource.
:type provisioning_state: str | 62599061d268445f2663a6bf |
class PrivateIngredientsAPITest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> self.user = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> self.client.force_authenticate(self.user) <NEW_LINE> <DEDENT> def test_retrieve_ingredient_list(self): <NEW_LINE> <INDENT> Ingredient.objects.create(user=self.user, name='Kale') <NEW_LINE> Ingredient.objects.create(user=self.user, name='Salt') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> ingredients = Ingredient.objects.all().order_by('-name') <NEW_LINE> serializer = IngredientSerializer(ingredients, many=True) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data) <NEW_LINE> <DEDENT> def test_ingredients_limited_to_user(self): <NEW_LINE> <INDENT> user2 = get_user_model().objects.create_user( '[email protected]', 'testpass' ) <NEW_LINE> Ingredient.objects.create(user=user2, name='Vinegar') <NEW_LINE> ingredient = Ingredient.objects.create(user=self.user, name='Tumeric') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(len(res.data), 1) <NEW_LINE> self.assertEqual(res.data[0]['name'], ingredient.name) <NEW_LINE> <DEDENT> def test_create_ingredient_successful(self): <NEW_LINE> <INDENT> payload = {'name': 'Cabbage'} <NEW_LINE> self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> exists = Ingredient.objects.filter( user=self.user, name=payload['name'], ).exists() <NEW_LINE> self.assertTrue(exists) <NEW_LINE> <DEDENT> def test_create_ingredient_invalid(self): <NEW_LINE> <INDENT> payload = {'name': ''} <NEW_LINE> res = self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_retrieve_ingredients_assigned_to_recipes(self): <NEW_LINE> <INDENT> ingredient1 = Ingredient.objects.create( user=self.user, name='Apples' ) <NEW_LINE> ingredient2 = Ingredient.objects.create( user=self.user, name='Turkey' ) <NEW_LINE> recipe = Recipe.objects.create( title='Apple crumble', time_minutes=5, price=10, user=self.user ) <NEW_LINE> recipe.ingredients.add(ingredient1) <NEW_LINE> res = self.client.get(INGREDIENTS_URL, {'assigned_only': 1}) <NEW_LINE> serializer1 = IngredientSerializer(ingredient1) <NEW_LINE> serializer2 = IngredientSerializer(ingredient2) <NEW_LINE> self.assertIn(serializer1.data, res.data) <NEW_LINE> self.assertNotIn(serializer2.data, res.data) <NEW_LINE> <DEDENT> def test_retrieve_ingredients_assigned_unique(self): <NEW_LINE> <INDENT> ingredient = Ingredient.objects.create(user=self.user, name='Eggs') <NEW_LINE> Ingredient.objects.create(user=self.user, name='Cheese') <NEW_LINE> recipe1 = Recipe.objects.create( title='Eggs benedict', time_minutes=30, price=12.00, user=self.user ) <NEW_LINE> recipe1.ingredients.add(ingredient) <NEW_LINE> recipe2 = Recipe.objects.create( title='Corlander eggs on toast', time_minutes=20, price=5.00, user=self.user ) <NEW_LINE> recipe2.ingredients.add(ingredient) <NEW_LINE> res = self.client.get(INGREDIENTS_URL, {'assigned_only': 1}) <NEW_LINE> self.assertEqual(len(res.data), 1) | Test the private ingredients API | 625990618e71fb1e983bd190 |
class CardDeck(object): <NEW_LINE> <INDENT> def __init__(self, decks=1): <NEW_LINE> <INDENT> self.deck = deque() <NEW_LINE> self.deck_count = int(decks) <NEW_LINE> self.shuffle_count = self.deck_count * 7 <NEW_LINE> self.suits = ( 'Clubs', 'Diamonds', 'Hearts', 'Spades', ) <NEW_LINE> self.names = ( 'Ace', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine', 'Ten', 'Jack', 'Queen', 'King', ) <NEW_LINE> self.shuffle() <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.deck) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.deck) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self.deck[index] <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> if self.deck: <NEW_LINE> <INDENT> self.deck = deque() <NEW_LINE> <DEDENT> max_decks = self.deck_count + 1 <NEW_LINE> for deck in range(1, max_decks): <NEW_LINE> <INDENT> for suit in self.suits: <NEW_LINE> <INDENT> for num, name in enumerate(self.names, start=1): <NEW_LINE> <INDENT> card = PlayingCard() <NEW_LINE> card.set_attributes(name, suit, num) <NEW_LINE> self.deck.append(card) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for deck_shuffle in range(self.shuffle_count): <NEW_LINE> <INDENT> random.shuffle(self.deck) <NEW_LINE> <DEDENT> <DEDENT> def draw(self): <NEW_LINE> <INDENT> return self.deck.popleft() | Contains 52 or more playing cards and the methods for using them. | 62599061fff4ab517ebceeec |
class ScoreHandler(PermissionHandler, StarkHandler): <NEW_LINE> <INDENT> model_form_class = ScoreModelForm <NEW_LINE> list_display = ['content', 'score', 'user'] <NEW_LINE> def get_list_display(self, request, *args, **kwargs): <NEW_LINE> <INDENT> value = [] <NEW_LINE> if self.list_display: <NEW_LINE> <INDENT> value.extend(self.list_display) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def get_urls(self): <NEW_LINE> <INDENT> patterns = [ re_path(r'^list/(?P<student_id>\d+)/$', self.wrapper(self.list_view), name=self.get_list_url_name), re_path(r'^add/(?P<student_id>\d+)/$', self.wrapper(self.add_view), name=self.get_add_url_name), ] <NEW_LINE> patterns.extend(self.extra_urls()) <NEW_LINE> return patterns <NEW_LINE> <DEDENT> def get_queryset(self, request, *args, **kwargs): <NEW_LINE> <INDENT> student_id = kwargs.get('student_id') <NEW_LINE> return self.model_class.objects.filter(student_id=student_id) <NEW_LINE> <DEDENT> def save(self, request, form, is_update, *args, **kwargs): <NEW_LINE> <INDENT> student_id = kwargs.get('student_id') <NEW_LINE> current_user_id = request.session['user_info']['id'] <NEW_LINE> form.instance.student_id = student_id <NEW_LINE> form.instance.user_id = current_user_id <NEW_LINE> form.save() <NEW_LINE> score = form.instance.score <NEW_LINE> if score > 0: <NEW_LINE> <INDENT> form.instance.student.score += abs(score) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form.instance.student.score -= abs(score) <NEW_LINE> <DEDENT> form.instance.student.save() | stark配置:积分记录表 | 6259906115baa72349463658 |
class ImageConfiguration(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "Port": (str, False), "RuntimeEnvironmentVariables": ([KeyValuePair], False), "StartCommand": (str, False), } | `ImageConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apprunner-service-imageconfiguration.html>`__ | 62599061a17c0f6771d5d707 |
class EndpointNotFoundError(RpcError): <NEW_LINE> <INDENT> def convert(self): <NEW_LINE> <INDENT> return _EndpointNotFoundError(self.message) | Exception class for endpoint not found error
| 625990618da39b475be048ae |
class Param(NamedTuple): <NEW_LINE> <INDENT> data: torch.Tensor <NEW_LINE> optim_args: Dict[str, Any] = {} | Data structure for model parameters | 62599061f548e778e596cc4e |
class DayWeatherForecastParser: <NEW_LINE> <INDENT> def __init__(self, period_weather_forecast_parsers: list[PeriodWeatherForecastParser]) -> None: <NEW_LINE> <INDENT> self._period_parsers = period_weather_forecast_parsers <NEW_LINE> <DEDENT> @property <NEW_LINE> def period_parsers(self) -> list[PeriodWeatherForecastParser]: <NEW_LINE> <INDENT> return self._period_parsers <NEW_LINE> <DEDENT> @property <NEW_LINE> def temperature_min_of_the_day(self) -> float: <NEW_LINE> <INDENT> temperature_min_of_the_day = min( self._period_parsers, key=operator.attrgetter('temperature_min') ).temperature_min <NEW_LINE> return temperature_min_of_the_day <NEW_LINE> <DEDENT> @property <NEW_LINE> def temperature_max_of_the_day(self) -> float: <NEW_LINE> <INDENT> temperature_max_of_the_day = max( self._period_parsers, key=operator.attrgetter('temperature_max') ).temperature_max <NEW_LINE> return temperature_max_of_the_day | Implements daily weather forecast parser.
Gather all `PeriodWeatherForecastParser` instances that contain the same day information
(but different time-periods).
Used in the other weather-parsers classes in composition way. | 625990616e29344779b01d15 |
class ProximalConvexConjKLCrossEntropy(Operator): <NEW_LINE> <INDENT> def __init__(self, sigma): <NEW_LINE> <INDENT> self.sigma = float(sigma) <NEW_LINE> super(ProximalConvexConjKLCrossEntropy, self).__init__( domain=space, range=space, linear=False) <NEW_LINE> <DEDENT> def _call(self, x, out): <NEW_LINE> <INDENT> import scipy.special <NEW_LINE> if g is None: <NEW_LINE> <INDENT> lambw = scipy.special.lambertw( (self.sigma / lam) * np.exp(x / lam)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lambw = scipy.special.lambertw( (self.sigma / lam) * g * np.exp(x / lam)) <NEW_LINE> <DEDENT> if not np.issubsctype(self.domain.dtype, np.complexfloating): <NEW_LINE> <INDENT> lambw = lambw.real <NEW_LINE> <DEDENT> out.lincomb(1, x, -lam, lambw) | Proximal operator of conjugate of cross entropy KL divergence. | 62599061a8370b77170f1a94 |
class Part(object): <NEW_LINE> <INDENT> def __init__(self, shape, name): <NEW_LINE> <INDENT> self._shape = shape <NEW_LINE> self._part_transformation_matrices = [] <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def shape(self): <NEW_LINE> <INDENT> return self._shape <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def add_matrix(self, m): <NEW_LINE> <INDENT> assert np.shape(m) == (4, 4) <NEW_LINE> self._part_transformation_matrices.append(m) <NEW_LINE> <DEDENT> @property <NEW_LINE> def combined_matrix(self): <NEW_LINE> <INDENT> from functools import reduce <NEW_LINE> if self._part_transformation_matrices: <NEW_LINE> <INDENT> return reduce(np.dot, self._part_transformation_matrices) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return identity_matrix() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def transformed_shape(self): <NEW_LINE> <INDENT> from OCC.Core.gp import gp_Trsf <NEW_LINE> from OCC.Core.BRepBuilderAPI import BRepBuilderAPI_Transform <NEW_LINE> trsf = gp_Trsf() <NEW_LINE> m = self.combined_matrix <NEW_LINE> trsf.SetValues(m[0, 0], m[0, 1], m[0, 2], m[0, 3], m[1, 0], m[1, 1], m[1, 2], m[1, 3], m[2, 0], m[2, 1], m[2, 2], m[2, 3]) <NEW_LINE> transformed = BRepBuilderAPI_Transform(self.shape, trsf) <NEW_LINE> return transformed.Shape() | A Part is the simplest possible element
Parameters
----------
shape : OCC shape
name : str | 62599061e64d504609df9f31 |
class HOCMethod(HOCObject): <NEW_LINE> <INDENT> option_spec: OptionSpec = HOCObject.option_spec.copy() <NEW_LINE> option_spec.update({ 'abstractmethod': directives.flag, 'async': directives.flag, 'classmethod': directives.flag, 'final': directives.flag, 'property': directives.flag, 'staticmethod': directives.flag, }) <NEW_LINE> def needs_arglist(self) -> bool: <NEW_LINE> <INDENT> if 'property' in self.options: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def get_signature_prefix(self, sig: str) -> List[nodes.Node]: <NEW_LINE> <INDENT> prefix: List[nodes.Node] = [] <NEW_LINE> if 'final' in self.options: <NEW_LINE> <INDENT> prefix.append(nodes.Text('final')) <NEW_LINE> prefix.append(addnodes.desc_sig_space()) <NEW_LINE> <DEDENT> if 'abstractmethod' in self.options: <NEW_LINE> <INDENT> prefix.append(nodes.Text('abstract')) <NEW_LINE> prefix.append(addnodes.desc_sig_space()) <NEW_LINE> <DEDENT> if 'async' in self.options: <NEW_LINE> <INDENT> prefix.append(nodes.Text('async')) <NEW_LINE> prefix.append(addnodes.desc_sig_space()) <NEW_LINE> <DEDENT> if 'classmethod' in self.options: <NEW_LINE> <INDENT> prefix.append(nodes.Text('classmethod')) <NEW_LINE> prefix.append(addnodes.desc_sig_space()) <NEW_LINE> <DEDENT> if 'property' in self.options: <NEW_LINE> <INDENT> prefix.append(nodes.Text('property')) <NEW_LINE> prefix.append(addnodes.desc_sig_space()) <NEW_LINE> <DEDENT> if 'staticmethod' in self.options: <NEW_LINE> <INDENT> prefix.append(nodes.Text('static')) <NEW_LINE> prefix.append(addnodes.desc_sig_space()) <NEW_LINE> <DEDENT> return prefix <NEW_LINE> <DEDENT> def get_index_text(self, modname: str, name_cls: Tuple[str, str]) -> str: <NEW_LINE> <INDENT> name, cls = name_cls <NEW_LINE> try: <NEW_LINE> <INDENT> clsname, methname = name.rsplit('.', 1) <NEW_LINE> if modname and self.env.config.add_module_names: <NEW_LINE> <INDENT> clsname = '.'.join([modname, clsname]) <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> if modname: <NEW_LINE> <INDENT> return _('%s() (in module %s)') % (name, modname) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '%s()' % name <NEW_LINE> <DEDENT> <DEDENT> if 'classmethod' in self.options: <NEW_LINE> <INDENT> return _('%s() (HOC %s class method)') % (methname, clsname) <NEW_LINE> <DEDENT> elif 'property' in self.options: <NEW_LINE> <INDENT> return _('%s (HOC %s property)') % (methname, clsname) <NEW_LINE> <DEDENT> elif 'staticmethod' in self.options: <NEW_LINE> <INDENT> return _('%s() (HOC %s static method)') % (methname, clsname) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return _('%s() (HOC %s method)') % (methname, clsname) | Description of a method. | 6259906176e4537e8c3f0c53 |
class MoveZeroesTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_case_1(self): <NEW_LINE> <INDENT> numbers = [0, 1, 0, 3, 12] <NEW_LINE> move_zeroes(numbers) <NEW_LINE> self.assertEqual(numbers, [1, 3, 12, 0, 0]) <NEW_LINE> <DEDENT> def test_case_2(self): <NEW_LINE> <INDENT> numbers = [0, 0, 0, 0, 0] <NEW_LINE> move_zeroes(numbers) <NEW_LINE> self.assertEqual(numbers, [0, 0, 0, 0, 0]) <NEW_LINE> <DEDENT> def test_case_3(self): <NEW_LINE> <INDENT> numbers = [1, 2, 3, 4, 5] <NEW_LINE> move_zeroes(numbers) <NEW_LINE> self.assertEqual(numbers, [1, 2, 3, 4, 5]) <NEW_LINE> <DEDENT> def test_case_4(self): <NEW_LINE> <INDENT> numbers = [1, 0, 0, 0, 0] <NEW_LINE> move_zeroes(numbers) <NEW_LINE> self.assertEqual(numbers, [1, 0, 0, 0, 0]) <NEW_LINE> <DEDENT> def test_case_5(self): <NEW_LINE> <INDENT> numbers = [0, 0, 0, 0, 1] <NEW_LINE> move_zeroes(numbers) <NEW_LINE> self.assertEqual(numbers, [1, 0, 0, 0, 0]) <NEW_LINE> <DEDENT> def test_case_6(self): <NEW_LINE> <INDENT> numbers = [0, 1, 0, 2, 0] <NEW_LINE> move_zeroes(numbers) <NEW_LINE> self.assertEqual(numbers, [1, 2, 0, 0, 0]) <NEW_LINE> <DEDENT> def test_case_7(self): <NEW_LINE> <INDENT> numbers = [1, 0, 2, 0, 3] <NEW_LINE> move_zeroes(numbers) <NEW_LINE> self.assertEqual(numbers, [1, 2, 3, 0, 0]) | Tests for move zeroes challenge. | 625990613d592f4c4edbc5a3 |
class DotDict(dict): <NEW_LINE> <INDENT> def __init__(self, value=None): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for key in value: <NEW_LINE> <INDENT> self.__setitem__(key, value[key]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if '.' in key: <NEW_LINE> <INDENT> myKey, restOfKey = key.split('.', 1) <NEW_LINE> target = self.setdefault(myKey, DotDict()) <NEW_LINE> target[restOfKey] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(value, dict) and not isinstance(value, DotDict): <NEW_LINE> <INDENT> value = DotDict(value) <NEW_LINE> <DEDENT> dict.__setitem__(self, key, value) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if '.' not in key: <NEW_LINE> <INDENT> return dict.__getitem__(self, key) <NEW_LINE> <DEDENT> myKey, restOfKey = key.split('.', 1) <NEW_LINE> target = dict.__getitem__(self, myKey) <NEW_LINE> return target[restOfKey] <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> if '.' not in key: <NEW_LINE> <INDENT> return dict.__contains__(self, key) <NEW_LINE> <DEDENT> myKey, restOfKey = key.split('.', 1) <NEW_LINE> target = dict.__getitem__(self, myKey) <NEW_LINE> return restOfKey in target <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> return DotDict(copy.deepcopy(dict(self))) <NEW_LINE> <DEDENT> __setattr__ = __setitem__ <NEW_LINE> __getattr__ = __getitem__ | Wrapper dict that allows to get dotted attributes | 625990614e4d562566373acd |
class Min(FunctionBase): <NEW_LINE> <INDENT> def __init__(self, name, pos): <NEW_LINE> <INDENT> FunctionBase.__init__(self, name, pos) <NEW_LINE> self.shapeImage = 'min.png' <NEW_LINE> self.minValue = 1e38 <NEW_LINE> <DEDENT> def drawShape(self, gc): <NEW_LINE> <INDENT> gc.setPen(QPen(self.shapeColor, 0.6)) <NEW_LINE> grad = QLinearGradient(0, -25, 0, 50) <NEW_LINE> grad.setColorAt(0, Color.white) <NEW_LINE> grad.setColorAt(1, Color.yellow) <NEW_LINE> gc.setBrush(QBrush(grad)) <NEW_LINE> gc.drawRoundedRect(-25, -25, 50, 50, 5, 5) <NEW_LINE> self.drawIcon(gc, -20, -10) <NEW_LINE> <DEDENT> def sim(self, flag, value, time, step): <NEW_LINE> <INDENT> inp = self.terminal[1].value <NEW_LINE> if flag == SIM_INIT: <NEW_LINE> <INDENT> self.minValue = 1e38 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if (type(inp) == ndarray) or (type(inp) == list): <NEW_LINE> <INDENT> value = min(inp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = inp <NEW_LINE> <DEDENT> if value < self.minValue: <NEW_LINE> <INDENT> self.minValue = value <NEW_LINE> <DEDENT> self.terminal[2].value = self.minValue | !
@if English
@endif
@if Slovak
Minimalna hodnota vstupnej hodnoty. Ak je vstupná hodnota vektor,
berie sa minimálna hodnota z položiek vektora.
@endif | 62599061cc0a2c111447c632 |
class EmptyError(Exception): <NEW_LINE> <INDENT> pass | Exception class for empty queue ADT | 62599061d7e4931a7ef3d6e8 |
class IpRange(models.Model): <NEW_LINE> <INDENT> start_ip = models.BigIntegerField(_('Ip range block beginning, as integer'), db_index=True) <NEW_LINE> end_ip = models.BigIntegerField(_('Ip range block ending, as integer'), db_index=True) <NEW_LINE> country = models.ForeignKey(Country) <NEW_LINE> region = models.ForeignKey(Region, null=True) <NEW_LINE> city = models.ForeignKey(City, null=True) <NEW_LINE> objects = IpRangeManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('IP range') <NEW_LINE> verbose_name_plural = _("IP ranges") | IP ranges are stored in separate table, one row for each ip range.
Each range might be associated with either country (for IP ranges outside of Russia and Ukraine)
or country, region and city together.
Ip range borders are `stored as long integers
<http://publibn.boulder.ibm.com/doc_link/en_US/a_doc_lib/libs/commtrf2/inet_addr.htm>`_ | 6259906191af0d3eaad3b4ee |
class WM_OT_studiolight_uninstall(Operator): <NEW_LINE> <INDENT> bl_idname = 'wm.studiolight_uninstall' <NEW_LINE> bl_label = "Uninstall Studio Light" <NEW_LINE> index: bpy.props.IntProperty() <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> import os <NEW_LINE> prefs = context.preferences <NEW_LINE> for studio_light in prefs.studio_lights: <NEW_LINE> <INDENT> if studio_light.index == self.index: <NEW_LINE> <INDENT> for filepath in ( studio_light.path, studio_light.path_irr_cache, studio_light.path_sh_cache, ): <NEW_LINE> <INDENT> if filepath and os.path.exists(filepath): <NEW_LINE> <INDENT> os.unlink(filepath) <NEW_LINE> <DEDENT> <DEDENT> prefs.studio_lights.remove(studio_light) <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> <DEDENT> return {'CANCELLED'} | Delete Studio Light | 625990613539df3088ecd963 |
class Section(ABC): <NEW_LINE> <INDENT> def __init__(self, section_name, parser): <NEW_LINE> <INDENT> self._section_name = section_name <NEW_LINE> self._parser = parser <NEW_LINE> <DEDENT> def _has_option(self, option_name): <NEW_LINE> <INDENT> return self._parser.has_option(self._section_name, option_name) <NEW_LINE> <DEDENT> def _get_option(self, option_name, converter=None): <NEW_LINE> <INDENT> return get_option(self._parser, self._section_name, option_name, converter) <NEW_LINE> <DEDENT> def _set_option(self, option_name, value): <NEW_LINE> <INDENT> set_option(self._parser, self._section_name, option_name, value) | A base class for representation of a configuration section. | 6259906191f36d47f22319f2 |
class ProviderResourceType(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'default_api_version': {'readonly': True}, 'api_profiles': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[str]'}, 'aliases': {'key': 'aliases', 'type': '[Alias]'}, 'api_versions': {'key': 'apiVersions', 'type': '[str]'}, 'default_api_version': {'key': 'defaultApiVersion', 'type': 'str'}, 'zone_mappings': {'key': 'zoneMappings', 'type': '[ZoneMapping]'}, 'api_profiles': {'key': 'apiProfiles', 'type': '[ApiProfile]'}, 'capabilities': {'key': 'capabilities', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, } <NEW_LINE> def __init__( self, *, resource_type: Optional[str] = None, locations: Optional[List[str]] = None, aliases: Optional[List["Alias"]] = None, api_versions: Optional[List[str]] = None, zone_mappings: Optional[List["ZoneMapping"]] = None, capabilities: Optional[str] = None, properties: Optional[Dict[str, str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ProviderResourceType, self).__init__(**kwargs) <NEW_LINE> self.resource_type = resource_type <NEW_LINE> self.locations = locations <NEW_LINE> self.aliases = aliases <NEW_LINE> self.api_versions = api_versions <NEW_LINE> self.default_api_version = None <NEW_LINE> self.zone_mappings = zone_mappings <NEW_LINE> self.api_profiles = None <NEW_LINE> self.capabilities = capabilities <NEW_LINE> self.properties = properties | Resource type managed by the resource provider.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar resource_type: The resource type.
:vartype resource_type: str
:ivar locations: The collection of locations where this resource type can be created.
:vartype locations: list[str]
:ivar aliases: The aliases that are supported by this resource type.
:vartype aliases: list[~azure.mgmt.resource.resources.v2020_06_01.models.Alias]
:ivar api_versions: The API version.
:vartype api_versions: list[str]
:ivar default_api_version: The default API version.
:vartype default_api_version: str
:ivar zone_mappings:
:vartype zone_mappings: list[~azure.mgmt.resource.resources.v2020_06_01.models.ZoneMapping]
:ivar api_profiles: The API profiles for the resource provider.
:vartype api_profiles: list[~azure.mgmt.resource.resources.v2020_06_01.models.ApiProfile]
:ivar capabilities: The additional capabilities offered by this resource type.
:vartype capabilities: str
:ivar properties: The properties.
:vartype properties: dict[str, str] | 625990614a966d76dd5f05bb |
class Solution(object): <NEW_LINE> <INDENT> def isSameTree(self, p, q): <NEW_LINE> <INDENT> if not p and not q: return True <NEW_LINE> if not p or not q: return False <NEW_LINE> if p.val == q.val: <NEW_LINE> <INDENT> return self.isSameTree(p.left, q.left) and self.isSameTree(p.right, q.right) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | https://leetcode-cn.com/problems/same-tree/ | 6259906167a9b606de547605 |
class SerialPort(object): <NEW_LINE> <INDENT> def __init__( self, portName, baudRate, numDataBits, parity, numStopBits, readTimeout=None, writeTimeout=None, exceptionClass=None): <NEW_LINE> <INDENT> self._exceptionClass = exceptionClass <NEW_LINE> byteSize = _getByteSize(numDataBits) <NEW_LINE> parity = _getParity(parity) <NEW_LINE> stopBits = _getStopBits(numStopBits) <NEW_LINE> self._serialPort = self._try('initialization', Serial, *[], **{ 'port': None, 'baudrate': baudRate, 'bytesize': byteSize, 'parity': parity, 'stopbits': stopBits, 'timeout': readTimeout, 'writeTimeout': writeTimeout }) <NEW_LINE> self._try('initialization', self._serialPort.setPort, portName) <NEW_LINE> <DEDENT> def _try(self, name, callable_, *args, **kwds): <NEW_LINE> <INDENT> if self._exceptionClass is None: <NEW_LINE> <INDENT> return callable_(*args, **kwds) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return callable_(*args, **kwds) <NEW_LINE> <DEDENT> except SerialException as e: <NEW_LINE> <INDENT> raise self._exceptionClass( 'Serial port {:s} failed with message: {:s}'.format(name, str(e))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def open(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._try('open', self._serialPort.open) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> raise self._exceptionClass('Serial port open failed with message: {:s}'.format(str(e))) <NEW_LINE> <DEDENT> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._try('write', self._serialPort.write, data) <NEW_LINE> <DEDENT> except SerialTimeoutException: <NEW_LINE> <INDENT> if self._exceptionClass is None: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise self._exceptionClass('Serial port write timed out.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> return self._try('flush', self._serialPort.flush) <NEW_LINE> <DEDENT> def flushOutput(self): <NEW_LINE> <INDENT> return self._try('output flush', self._serialPort.flushOutput) <NEW_LINE> <DEDENT> def flushInput(self): <NEW_LINE> <INDENT> return self._try('input flush', self._serialPort.flushInput) <NEW_LINE> <DEDENT> def read(self, numBytes): <NEW_LINE> <INDENT> return self._try('read', self._serialPort.read, numBytes) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> return self._try('close', self._serialPort.close) | Serial communication port.
An instance of this class wraps an instance of the `Serial` class of the PySerial
serial communications Python extension, optionally transforming exceptions raised
by `Serial` methods into exceptions of another type.
Note that the functionality of this class is quite limited. It does not support
reading or modifying the port configuration, for example. It is intended only to
simplify the implementation of certain Maka input device classes, mainly by
supporting the transformation of exceptions as mentioned above. | 625990611b99ca4002290099 |
class HeadRecordVisualCompass(AbstractActionElement): <NEW_LINE> <INDENT> def perform(self): <NEW_LINE> <INDENT> self.blackboard.blackboard.set_head_duty(HeadMode.RECORD_VISUAL_COMPASS) <NEW_LINE> return self.pop() | Record ground truth for the visual compass | 625990618e71fb1e983bd192 |
class RandomTranslateWithReflect: <NEW_LINE> <INDENT> def __init__(self, max_translation): <NEW_LINE> <INDENT> if not _PIL_AVAILABLE: <NEW_LINE> <INDENT> raise ModuleNotFoundError("You want to use `Pillow` which is not installed yet.") <NEW_LINE> <DEDENT> self.max_translation = max_translation <NEW_LINE> <DEDENT> def __call__(self, old_image): <NEW_LINE> <INDENT> xtranslation, ytranslation = np.random.randint(-self.max_translation, self.max_translation + 1, size=2) <NEW_LINE> xpad, ypad = abs(xtranslation), abs(ytranslation) <NEW_LINE> xsize, ysize = old_image.size <NEW_LINE> flipped_lr = old_image.transpose(Image.FLIP_LEFT_RIGHT) <NEW_LINE> flipped_tb = old_image.transpose(Image.FLIP_TOP_BOTTOM) <NEW_LINE> flipped_both = old_image.transpose(Image.ROTATE_180) <NEW_LINE> new_image = Image.new("RGB", (xsize + 2 * xpad, ysize + 2 * ypad)) <NEW_LINE> new_image.paste(old_image, (xpad, ypad)) <NEW_LINE> new_image.paste(flipped_lr, (xpad + xsize - 1, ypad)) <NEW_LINE> new_image.paste(flipped_lr, (xpad - xsize + 1, ypad)) <NEW_LINE> new_image.paste(flipped_tb, (xpad, ypad + ysize - 1)) <NEW_LINE> new_image.paste(flipped_tb, (xpad, ypad - ysize + 1)) <NEW_LINE> new_image.paste(flipped_both, (xpad - xsize + 1, ypad - ysize + 1)) <NEW_LINE> new_image.paste(flipped_both, (xpad + xsize - 1, ypad - ysize + 1)) <NEW_LINE> new_image.paste(flipped_both, (xpad - xsize + 1, ypad + ysize - 1)) <NEW_LINE> new_image.paste(flipped_both, (xpad + xsize - 1, ypad + ysize - 1)) <NEW_LINE> new_image = new_image.crop( (xpad - xtranslation, ypad - ytranslation, xpad + xsize - xtranslation, ypad + ysize - ytranslation) ) <NEW_LINE> return new_image | Translate image randomly
Translate vertically and horizontally by n pixels where
n is integer drawn uniformly independently for each axis
from [-max_translation, max_translation].
Fill the uncovered blank area with reflect padding. | 625990615166f23b2e244a99 |
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0): <NEW_LINE> <INDENT> if isinstance(size, int): <NEW_LINE> <INDENT> if size >= 0: <NEW_LINE> <INDENT> self._Square__size = size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("size must be >= 0") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("size must be an integer") | Square class | 625990612c8b7c6e89bd4eb6 |
class RibmosaicRender(bpy.types.RenderEngine): <NEW_LINE> <INDENT> bl_use_preview = True <NEW_LINE> bl_idname = rm.ENGINE <NEW_LINE> bl_label = rm.ENGINE <NEW_LINE> compile_library = "" <NEW_LINE> preview_samples = 2 <NEW_LINE> preview_shading = 2.0 <NEW_LINE> preview_compile = True <NEW_LINE> preview_optimize = True <NEW_LINE> def render(self, scene): <NEW_LINE> <INDENT> rmv = rm.ENGINE + " " + rm.VERSION <NEW_LINE> try: <NEW_LINE> <INDENT> c = scene.frame_current <NEW_LINE> i = scene.frame_step <NEW_LINE> s = scene.frame_start <NEW_LINE> print("Export Frame: ", rm.export_manager.export_frame) <NEW_LINE> if c == s or not ((c - i) == rm.export_manager.export_frame): <NEW_LINE> <INDENT> self.update_stats("", rmv + ": Preparing export...") <NEW_LINE> rm.export_manager.prepare_export(active_scene=scene, shader_library=self.compile_library) <NEW_LINE> self.update_stats("", rmv + ": Processing shaders...") <NEW_LINE> rm.export_manager.export_shaders(render_object=self, shader_library=self.compile_library) <NEW_LINE> if not self.compile_library: <NEW_LINE> <INDENT> self.update_stats("", rmv + ": Processing textures...") <NEW_LINE> rm.export_manager.export_textures(render_object=self) <NEW_LINE> <DEDENT> <DEDENT> if scene.name == "preview" and rm.export_manager.active_pass: <NEW_LINE> <INDENT> ap = rm.export_manager.active_pass <NEW_LINE> ap.pass_shadingrate = self.preview_shading <NEW_LINE> ap.pass_samples_x = self.preview_samples <NEW_LINE> ap.pass_samples_y = self.preview_samples <NEW_LINE> <DEDENT> if not self.compile_library: <NEW_LINE> <INDENT> self.update_stats("", rmv + ": Processing RIB...") <NEW_LINE> rm.export_manager.export_rib(render_object=self) <NEW_LINE> <DEDENT> self.update_stats("", rmv + ": Executing commands...") <NEW_LINE> rm.export_manager.execute_commands() <NEW_LINE> if not self.compile_library: <NEW_LINE> <INDENT> self.update_stats("", rmv + ": Post processing...") <NEW_LINE> x = rm.export_manager.display_output['x'] <NEW_LINE> y = rm.export_manager.display_output['y'] <NEW_LINE> result = self.begin_result(0, 0, x, y) <NEW_LINE> for p in rm.export_manager.display_output['passes']: <NEW_LINE> <INDENT> if self.test_break(): <NEW_LINE> <INDENT> raise rm_error.RibmosaicError( "RibmosaicRender.render: Export canceled") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if p['multilayer']: <NEW_LINE> <INDENT> result.load_from_file(p['file']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for l in result.layers: <NEW_LINE> <INDENT> if not p['layer'] or p['layer'] == l.name: <NEW_LINE> <INDENT> l.load_from_file(p['file']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> rm.RibmosaicInfo("RibmosaicRender.render:" " Could not load " + p['file'] + " into layer") <NEW_LINE> <DEDENT> <DEDENT> self.end_result(result) <NEW_LINE> <DEDENT> self.update_stats("", rmv + ": Process complete") <NEW_LINE> <DEDENT> except rm_error.RibmosaicError as err: <NEW_LINE> <INDENT> self.update_stats("", rmv + ": Process terminated") <NEW_LINE> err.ReportError() | The render engine class, used for scene and preview renders | 625990619c8ee82313040ced |
class HorizontalAsymptotes(Asymptotes): <NEW_LINE> <INDENT> def __init__(self, info): <NEW_LINE> <INDENT> Asymptotes.__init__(self, info) <NEW_LINE> self.scale = self.yscale <NEW_LINE> <DEDENT> def value_from_spline(self, spline): <NEW_LINE> <INDENT> px = spline[0][1] <NEW_LINE> y = self.px_to_yval(px) <NEW_LINE> return px, y | Horizontal Asymptote.
Note:
Use this class to interact with any horizontal asymptotes in the
function you are grading. | 625990617d43ff2487427f73 |
class mbType16(): <NEW_LINE> <INDENT> def __init__(self,fmt,endian): <NEW_LINE> <INDENT> self.fmt = fmt <NEW_LINE> self.endian= endian <NEW_LINE> <DEDENT> def pack(self,value): <NEW_LINE> <INDENT> assert type(value)==int <NEW_LINE> if self.endian=='big': <NEW_LINE> <INDENT> return struct.pack(">"+self.fmt,value) <NEW_LINE> <DEDENT> elif self.endian=='little': <NEW_LINE> <INDENT> return struct.pack("<"+self.fmt,value) <NEW_LINE> <DEDENT> <DEDENT> def unpack(self,value): <NEW_LINE> <INDENT> if self.endian=='big': <NEW_LINE> <INDENT> return struct.unpack(">"+self.fmt,value) <NEW_LINE> <DEDENT> elif self.endian=='little': <NEW_LINE> <INDENT> return struct.unpack("<"+self.fmt,value) | fmt : H , h
suffixes: > , <
(optional-default <) | 625990614e4d562566373ace |
class FeatureExtractor(object): <NEW_LINE> <INDENT> def __init__(self, pooling=False, device='cpu', dtype=torch.float32): <NEW_LINE> <INDENT> self.preprocess = transforms.Compose([ transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), ]) <NEW_LINE> self.device, self.dtype = device, dtype <NEW_LINE> self.mobilenet = models.mobilenet_v2(pretrained=True).to(device) <NEW_LINE> self.mobilenet = nn.Sequential(*list(self.mobilenet.children())[:-1]) <NEW_LINE> if pooling: <NEW_LINE> <INDENT> self.mobilenet.add_module('LastAvgPool', nn.AvgPool2d(4, 4)) <NEW_LINE> <DEDENT> self.mobilenet.eval() <NEW_LINE> <DEDENT> def extract_mobilenet_feature(self, img): <NEW_LINE> <INDENT> num_img = img.shape[0] <NEW_LINE> img_prepro = [] <NEW_LINE> for i in range(num_img): <NEW_LINE> <INDENT> img_prepro.append(self.preprocess(img[i].type(self.dtype).div(255.))) <NEW_LINE> <DEDENT> img_prepro = torch.stack(img_prepro).to(self.device) <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> feat = [] <NEW_LINE> process_batch = 500 <NEW_LINE> for b in range(math.ceil(num_img / process_batch)): <NEW_LINE> <INDENT> feat.append(self.mobilenet(img_prepro[b * process_batch:(b + 1) * process_batch] ).squeeze(-1).squeeze(-1)) <NEW_LINE> <DEDENT> feat = torch.cat(feat) <NEW_LINE> F.normalize(feat, p=2, dim=1) <NEW_LINE> <DEDENT> return feat | Image feature extraction with MobileNet. | 62599061462c4b4f79dbd0cd |
class AutoDiscoverPyLibMCCache(PyLibMCCache): <NEW_LINE> <INDENT> def __init__(self, server, params): <NEW_LINE> <INDENT> super(AutoDiscoverPyLibMCCache, self).__init__(server, params) <NEW_LINE> for method_name in ('set', 'set_many', 'get', 'get_many', 'delete'): <NEW_LINE> <INDENT> method = getattr(self, method_name) <NEW_LINE> setattr(self, method_name, clear_client_on_error(method)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def _cache(self): <NEW_LINE> <INDENT> this = getattr(self, '_local', self) <NEW_LINE> client = getattr(this, '_client', None) <NEW_LINE> if client is None: <NEW_LINE> <INDENT> client = self._lib.Client(get_addresses(self._servers)) <NEW_LINE> if self._options: <NEW_LINE> <INDENT> client.behaviors = self._options.get( 'behaviors', self._options) <NEW_LINE> <DEDENT> setattr(this, '_client', client) <NEW_LINE> <DEDENT> return client | Handle multiple servers in a single A record.
Simple auto-discover for use with Kubernetes. | 6259906163d6d428bbee3dec |
class BaseGeometry: <NEW_LINE> <INDENT> def area(self): <NEW_LINE> <INDENT> raise Exception("area() is not implemented") <NEW_LINE> <DEDENT> """Validate as positive number""" <NEW_LINE> def integer_validator(self, name, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError("{} must be an integer".format(name)) <NEW_LINE> <DEDENT> if value <= 0: <NEW_LINE> <INDENT> raise ValueError("{} must be greater than 0".format(name)) | Class base geometry | 62599061f548e778e596cc50 |
class HTTPTimeoutException(httplib.HTTPException): <NEW_LINE> <INDENT> pass | A timeout occurred while waiting on the server. | 62599061435de62698e9d4cf |
class Algorithm(object): <NEW_LINE> <INDENT> def __init__(self, chromo_length, pop_size, crossover_rate, mutation_rate): <NEW_LINE> <INDENT> self.chromo_length = chromo_length <NEW_LINE> self.pop_size = pop_size <NEW_LINE> self.crossover_rate = crossover_rate <NEW_LINE> self.mutation_rate = mutation_rate <NEW_LINE> self.generation = 1 <NEW_LINE> self.size = pop_size <NEW_LINE> self.chromosomes = [] <NEW_LINE> for _ in range(self.size): <NEW_LINE> <INDENT> weights = [random_clamped() for _ in range(chromo_length)] <NEW_LINE> self.chromosomes.append(Chromosome(weights, mutation_rate)) <NEW_LINE> <DEDENT> <DEDENT> def roulette(self): <NEW_LINE> <INDENT> total_fitness = sum([c.fitness for c in self.chromosomes]) <NEW_LINE> rand_value = random.random() * total_fitness <NEW_LINE> value = 0 <NEW_LINE> for c in self.chromosomes: <NEW_LINE> <INDENT> value += c.fitness <NEW_LINE> if value >= rand_value: <NEW_LINE> <INDENT> return c <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def crossover(self, papa, mama): <NEW_LINE> <INDENT> if random.random() > self.crossover_rate: <NEW_LINE> <INDENT> return papa, mama <NEW_LINE> <DEDENT> x = random.randint(0,len(papa.weights)-1) <NEW_LINE> baby1 = Chromosome(papa.weights[0:x] + mama.weights[x:], self.mutation_rate) <NEW_LINE> baby2 = Chromosome(mama.weights[0:x] + papa.weights[x:], self.mutation_rate) <NEW_LINE> return baby1, baby2 <NEW_LINE> <DEDENT> def epoch(self): <NEW_LINE> <INDENT> chromosomes = sorted( self.chromosomes, key = lambda c: c.fitness, reverse = True ) <NEW_LINE> new_chromosomes = [] <NEW_LINE> while len(new_chromosomes) < self.size: <NEW_LINE> <INDENT> papa = self.roulette() <NEW_LINE> mama = self.roulette() <NEW_LINE> if papa and mama: <NEW_LINE> <INDENT> baby1, baby2 = self.crossover(papa, mama) <NEW_LINE> baby1.mutate(1.0/self.generation) <NEW_LINE> baby2.mutate(1.0/self.generation) <NEW_LINE> new_chromosomes.append(baby1) <NEW_LINE> new_chromosomes.append(baby2) <NEW_LINE> <DEDENT> <DEDENT> self.chromosomes = new_chromosomes <NEW_LINE> self.generation += 1 | Genetic Algorithm class | 6259906145492302aabfdba2 |
class _FilterHandlerMixIn: <NEW_LINE> <INDENT> def initfilters(self, filters: Optional[List]) -> None: <NEW_LINE> <INDENT> self._filters = filters or [] <NEW_LINE> <DEDENT> async def prepare(self) -> Awaitable[None]: <NEW_LINE> <INDENT> super().prepare() <NEW_LINE> for filt in self._filters: <NEW_LINE> <INDENT> await filt.apply( self ) | Handle filter handlers
| 625990611f5feb6acb1642b3 |
class _NotNaRowFunc(object): <NEW_LINE> <INDENT> def __init__( self, label: object, ) -> None: <NEW_LINE> <INDENT> self.label = label <NEW_LINE> self.__doc__ = f"df[{label}] is not NA" <NEW_LINE> <DEDENT> def __call__(self, df: pandas.DataFrame) -> pandas.Series: <NEW_LINE> <INDENT> return df[self.label].notna() | A pickle-able notna callable class. | 62599061cb5e8a47e493cce9 |
class ElementsException(Exception): <NEW_LINE> <INDENT> pass | An exception for element formats. | 62599061379a373c97d9a6ec |
class CartDiscountKeyReference(KeyReference): <NEW_LINE> <INDENT> def __init__(self, *, key: str): <NEW_LINE> <INDENT> super().__init__(key=key, type_id=ReferenceType.CART_DISCOUNT) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deserialize( cls, data: typing.Dict[str, typing.Any] ) -> "CartDiscountKeyReference": <NEW_LINE> <INDENT> from ._schemas.common import CartDiscountKeyReferenceSchema <NEW_LINE> return CartDiscountKeyReferenceSchema().load(data) <NEW_LINE> <DEDENT> def serialize(self) -> typing.Dict[str, typing.Any]: <NEW_LINE> <INDENT> from ._schemas.common import CartDiscountKeyReferenceSchema <NEW_LINE> return CartDiscountKeyReferenceSchema().dump(self) | References a cart discount by key. | 62599061d7e4931a7ef3d6e9 |
class Actor(object): <NEW_LINE> <INDENT> name_only_regex = re.compile( r'<(.+)>' ) <NEW_LINE> name_email_regex = re.compile( r'(.*) <(.+?)>' ) <NEW_LINE> def __init__(self, name, email): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.email = email <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.name == other.name and self.email == other.email <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.name, self.email)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<git.Actor "%s <%s>">' % (self.name, self.email) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_string(cls, string): <NEW_LINE> <INDENT> m = cls.name_email_regex.search(string) <NEW_LINE> if m: <NEW_LINE> <INDENT> name, email = m.groups() <NEW_LINE> return Actor(name, email) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m = cls.name_only_regex.search(string) <NEW_LINE> if m: <NEW_LINE> <INDENT> return Actor(m.group(1), None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Actor(string, None) | Actors hold information about a person acting on the repository. They
can be committers and authors or anything with a name and an email as
mentioned in the git log entries. | 6259906191af0d3eaad3b4f0 |
class Producer: <NEW_LINE> <INDENT> existing_topics = set([]) <NEW_LINE> def __init__( self, topic_name, key_schema, value_schema=None, num_partitions=1, num_replicas=1, ): <NEW_LINE> <INDENT> self.topic_name = topic_name <NEW_LINE> self.key_schema = key_schema <NEW_LINE> self.value_schema = value_schema <NEW_LINE> self.num_partitions = num_partitions <NEW_LINE> self.num_replicas = num_replicas <NEW_LINE> self.broker_properties = { "bootstrap.servers": "PLAINTEXT://localhost:9092,PLAINTEXT://localhost:9093,PLAINTEXT://localhost:9094", } <NEW_LINE> if self.topic_name not in Producer.existing_topics: <NEW_LINE> <INDENT> self.create_topic() <NEW_LINE> Producer.existing_topics.add(self.topic_name) <NEW_LINE> <DEDENT> schema_registry = CachedSchemaRegistryClient({"url": "http://localhost:8081"}) <NEW_LINE> self.producer = AvroProducer( self.broker_properties, default_key_schema=key_schema, default_value_schema=value_schema, schema_registry=schema_registry, ) <NEW_LINE> <DEDENT> def create_topic(self): <NEW_LINE> <INDENT> client = AdminClient(self.broker_properties) <NEW_LINE> topic_metadata = client.list_topics() <NEW_LINE> if topic_metadata.topics.get(self.topic_name) is not None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> futures = client.create_topics( [ NewTopic( topic=self.topic_name, num_partitions=self.num_partitions, replication_factor=self.num_replicas, config={ "cleanup.policy": "compact", "compression.type": "lz4", "delete.retention.ms": 100, "file.delete.delay.ms": 100, }, ) ] ) <NEW_LINE> <DEDENT> def time_millis(self): <NEW_LINE> <INDENT> return int(round(time.time() * 1000)) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.producer is not None: <NEW_LINE> <INDENT> self.producer.flush() <NEW_LINE> <DEDENT> logger.info("producer close incomplete - skipping") <NEW_LINE> <DEDENT> def time_millis(self): <NEW_LINE> <INDENT> return int(round(time.time() * 1000)) | Defines and provides common functionality amongst Producers | 625990610c0af96317c578c3 |
class _ArtifactoryFlavour(pathlib._Flavour): <NEW_LINE> <INDENT> sep = '/' <NEW_LINE> altsep = '/' <NEW_LINE> has_drv = True <NEW_LINE> pathmod = pathlib.posixpath <NEW_LINE> is_supported = (True) <NEW_LINE> def parse_parts(self, parts): <NEW_LINE> <INDENT> drv, root, parsed = super(_ArtifactoryFlavour, self).parse_parts(parts) <NEW_LINE> return drv, root, parsed <NEW_LINE> <DEDENT> def splitroot(self, part, sep=sep): <NEW_LINE> <INDENT> drv = '' <NEW_LINE> root = '' <NEW_LINE> base = get_global_base_url(part) <NEW_LINE> if base and without_http_prefix(part).startswith(without_http_prefix(base)): <NEW_LINE> <INDENT> mark = without_http_prefix(base).rstrip(sep)+sep <NEW_LINE> parts = part.split(mark) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mark = sep+'artifactory'+sep <NEW_LINE> parts = part.split(mark) <NEW_LINE> <DEDENT> if len(parts) >= 2: <NEW_LINE> <INDENT> drv = parts[0] + mark.rstrip(sep) <NEW_LINE> rest = sep + mark.join(parts[1:]) <NEW_LINE> <DEDENT> elif part.endswith(mark.rstrip(sep)): <NEW_LINE> <INDENT> drv = part <NEW_LINE> rest = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rest = part <NEW_LINE> <DEDENT> if not rest: <NEW_LINE> <INDENT> return drv, '', '' <NEW_LINE> <DEDENT> if rest == sep: <NEW_LINE> <INDENT> return drv, '', '' <NEW_LINE> <DEDENT> if rest.startswith(sep): <NEW_LINE> <INDENT> root, _, part = rest[1:].partition(sep) <NEW_LINE> root = sep + root + sep <NEW_LINE> <DEDENT> return drv, root, part <NEW_LINE> <DEDENT> def casefold(self, string): <NEW_LINE> <INDENT> return string <NEW_LINE> <DEDENT> def casefold_parts(self, parts): <NEW_LINE> <INDENT> return parts <NEW_LINE> <DEDENT> def resolve(self, path): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> def is_reserved(self, _): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def make_uri(self, path): <NEW_LINE> <INDENT> return path | Implements Artifactory-specific pure path manipulations.
I.e. what is 'drive', 'root' and 'path' and how to split full path into
components.
See 'pathlib' documentation for explanation how those are used.
drive: in context of artifactory, it's the base URI like
http://mysite/artifactory
root: repository, e.g. 'libs-snapshot-local' or 'ext-release-local'
path: relative artifact path within the repository | 625990614f88993c371f1083 |
class TestGeneralFunctions(unittest.TestCase): <NEW_LINE> <INDENT> def test_dummy(self): <NEW_LINE> <INDENT> d = Dummy() <NEW_LINE> d.foo = 'bar' <NEW_LINE> d.bar = 'baz' <NEW_LINE> self.assertIs(d.foo, 'bar') <NEW_LINE> self.assertIs(d.bar, 'baz') <NEW_LINE> <DEDENT> def test_json_encoder(self): <NEW_LINE> <INDENT> je = JSONEncoder() <NEW_LINE> _id = uuid.uuid4() <NEW_LINE> self.assertEqual(je.default(_id), 'urn:uuid:' + str(_id)) <NEW_LINE> <DEDENT> def test_json_decoder(self): <NEW_LINE> <INDENT> jd = JSONDecoder() <NEW_LINE> self.assertEqual(jd.decode('"urn:uuid:b6ea2918-e2f1-4c0a-aa50-948edb9120fa"'), uuid.UUID('b6ea2918-e2f1-4c0a-aa50-948edb9120fa')) <NEW_LINE> self.assertEqual(jd.decode('"foo"'), 'foo') <NEW_LINE> self.assertEqual(jd.decode('true'), True) <NEW_LINE> self.assertEqual(jd.decode('false'), False) <NEW_LINE> self.assertEqual(jd.decode('123'), 123) <NEW_LINE> self.assertEqual(jd.decode('123.123'), 123.123) <NEW_LINE> <DEDENT> def test_dump_dict(self): <NEW_LINE> <INDENT> d = {'foo': 'bar', 'boo': True, 'boop': False, 'baz': 123.123} <NEW_LINE> expect = {'boo': 'true', 'baz': '123.123', 'foo': '"bar"', 'boop': 'false'} <NEW_LINE> self.assertEqual(dump_dict(d), expect) <NEW_LINE> <DEDENT> def test_load_dict(self): <NEW_LINE> <INDENT> l = {b'boo': b'true', b'baz': b'123.123', b'foo': b'"bar"', b'boop': b'false'} <NEW_LINE> expect = {'foo': 'bar', 'boo': True, 'boop': False, 'baz': 123.123} <NEW_LINE> self.assertEqual(load_dict(l), expect) | Test general functions and objects.
(Dummy, JSONEncoder, JSONDecoder, dump_dict, load_dict) | 625990611b99ca400229009a |
class MainObjectListView(generics.ListAPIView): <NEW_LINE> <INDENT> serializer_class = MainObjectSerializer <NEW_LINE> filter_backends = (DjangoFilterBackend,) <NEW_LINE> filterset_class = MainObjectFilter <NEW_LINE> permission_classes = [permissions.IsAuthenticatedOrReadOnly] <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> objects = MainObject.objects.filter(draft=False).annotate( rank_user=models.Count( "ranks", filter=models.Q( ranks__ip=get_client_ip(self.request)))).annotate( middle_value=models.Sum(models.F('ranks__value')) / models.Count(models.F('ranks'))) <NEW_LINE> return objects | object list | 625990617b25080760ed8845 |
class OutputTypeDescription(StorableMixin): <NEW_LINE> <INDENT> def __init__(self, filename=None, stride=1, selection=None): <NEW_LINE> <INDENT> super(OutputTypeDescription, self).__init__() <NEW_LINE> if filename is None: <NEW_LINE> <INDENT> filename = 'stride-%d.dcd' % stride <NEW_LINE> <DEDENT> self.filename = filename <NEW_LINE> self.stride = stride <NEW_LINE> self.selection = selection | A description of a general trajectory type
Attributes
----------
filename : str
a filename to store these type of trajectory in
stride : int
the stride to be used relative to native engine timesteps
selection : str
a :meth:`mdtraj.Topolopgy.select` like selection of an atom subset | 62599061435de62698e9d4d0 |
class HeatStacks(utils.HeatScenario): <NEW_LINE> <INDENT> RESOURCE_NAME_PREFIX = "rally_stack_" <NEW_LINE> RESOURCE_NAME_LENGTH = 7 <NEW_LINE> @staticmethod <NEW_LINE> def _get_template_from_file(template_path): <NEW_LINE> <INDENT> template = None <NEW_LINE> if template_path: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(template_path, "r") as f: <NEW_LINE> <INDENT> template = f.read() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> raise IOError("Provided path '%(template_path)s' is not valid" % {"template_path": template_path}) <NEW_LINE> <DEDENT> <DEDENT> return template <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.HEAT) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @base.scenario(context={"cleanup": ["heat"]}) <NEW_LINE> def create_and_list_stack(self, template_path=None): <NEW_LINE> <INDENT> template = self._get_template_from_file(template_path) <NEW_LINE> self._create_stack(template) <NEW_LINE> self._list_stacks() <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.HEAT) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @base.scenario(context={"cleanup": ["heat"]}) <NEW_LINE> def create_and_delete_stack(self, template_path=None): <NEW_LINE> <INDENT> template = self._get_template_from_file(template_path) <NEW_LINE> stack = self._create_stack(template) <NEW_LINE> self._delete_stack(stack) <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.HEAT) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @base.scenario(context={"cleanup": ["heat"]}) <NEW_LINE> def create_update_delete_stack(self, template_path=None, updated_template_path=None): <NEW_LINE> <INDENT> template = self._get_template_from_file(template_path) <NEW_LINE> stack = self._create_stack(template) <NEW_LINE> updated_template = self._get_template_from_file(updated_template_path) <NEW_LINE> self._update_stack(stack, updated_template) <NEW_LINE> self._delete_stack(stack) | Benchmark scenarios for Heat stacks. | 62599061a79ad1619776b621 |
class PTRansition(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "PTRansition" <NEW_LINE> args = ["1"] | `STATus:OPERation:PTRansition
<http://www.rohde-schwarz.com/webhelp/smb100a_webhelp/Content/19ddd521fccc4c10.htm#ID_7baf119671e812690a00206a0185198a-e4673fbb71e812690a00206a012bc823-en-US>`_
Arguments: 1 | 62599061379a373c97d9a6ed |
class TestPersonalInfoResponse(SimpleTestCase): <NEW_LINE> <INDENT> def test_equality(self): <NEW_LINE> <INDENT> pr_rimmer = PersonalInfoResponse('smeg_head', 'H', sentinel.request_type, 'rimmer', sentinel.custom_email, sentinel.confirmation_method) <NEW_LINE> pr_clone = PersonalInfoResponse('smeg_head', 'H', sentinel.request_type, 'rimmer', sentinel.custom_email, sentinel.confirmation_method) <NEW_LINE> pr_kryten = PersonalInfoResponse('android', 'K', sentinel.request_type, 'kryten', sentinel.other_email, sentinel.confirmation_method) <NEW_LINE> self.assertTrue(pr_rimmer == pr_rimmer) <NEW_LINE> self.assertTrue(pr_rimmer == pr_clone) <NEW_LINE> self.assertFalse(pr_rimmer == pr_kryten) | Test `PersonalInfoResponse` class. | 62599061442bda511e95d8be |
class Solution1: <NEW_LINE> <INDENT> def romanToInt(self, string): <NEW_LINE> <INDENT> prev_char = string[-1] <NEW_LINE> total_value = mapping[prev_char] <NEW_LINE> string = string[:-1] <NEW_LINE> while string: <NEW_LINE> <INDENT> current_char = string[-1] <NEW_LINE> current_value = mapping[current_char] <NEW_LINE> string = string[:-1] <NEW_LINE> if mapping[prev_char] > current_value: <NEW_LINE> <INDENT> total_value -= current_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> total_value += current_value <NEW_LINE> <DEDENT> prev_char = current_char <NEW_LINE> <DEDENT> return total_value | 思路:直接利用 List 的 reverse 思路,比较当前字符和上一个字符的数值大小。
优点:利用 Stack 思路是一致的,但是节约了时间消耗 | 6259906116aa5153ce401ba5 |
class KNearestNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.X_train = X <NEW_LINE> self.y_train = y <NEW_LINE> <DEDENT> def predict(self, X, k=1, num_loops=0): <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> dists = self.compute_distances_no_loops(X) <NEW_LINE> <DEDENT> elif num_loops == 1: <NEW_LINE> <INDENT> dists = self.compute_distances_one_loop(X) <NEW_LINE> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> dists = self.compute_distances_two_loops(X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid value %d for num_loops' % num_loops) <NEW_LINE> <DEDENT> return self.predict_labels(dists, k=k) <NEW_LINE> <DEDENT> def compute_distances_two_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> for j in range(num_train): <NEW_LINE> <INDENT> dists[i,j] = np.sqrt(np.sum((X[i]-self.X_train[j])**2)) <NEW_LINE> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_one_loop(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> dists[i, :] = np.sqrt(np.sum((X[i] - self.X_train) ** 2, axis = 1)) <NEW_LINE> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> dists = np.sqrt(np.sum(X ** 2, axis = 1)[:, np.newaxis] + np.sum(self.X_train ** 2, axis = 1) -2 * np.dot(X, self.X_train.T)) <NEW_LINE> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists, k=1): <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> closest_y = [] <NEW_LINE> closest_y = self.y_train[np.argsort(dists[i])[:k]] <NEW_LINE> y_pred[i] = np.amin(np.argmax(np.bincount(closest_y))) <NEW_LINE> <DEDENT> return y_pred | a kNN classifier with L2 distance | 625990619c8ee82313040cee |
class NewReleaseMessageStatus (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'NewReleaseMessageStatus') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('http://ddex.net/xml/20110630/ddex.xsd', 1423, 4) <NEW_LINE> _Documentation = 'A status of a NewReleaseMessage.' | A status of a NewReleaseMessage. | 62599061b7558d5895464a92 |
class Season(Entity): <NEW_LINE> <INDENT> def __init__(self, hass, hemisphere, season_tracking_type): <NEW_LINE> <INDENT> self.hass = hass <NEW_LINE> self.hemisphere = hemisphere <NEW_LINE> self.datetime = datetime.now() <NEW_LINE> self.type = season_tracking_type <NEW_LINE> self.season = get_season(self.datetime, self.hemisphere, self.type) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "Season" <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self.season <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return SEASON_ICONS.get(self.season, "mdi:cloud") <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.datetime = datetime.utcnow() <NEW_LINE> self.season = get_season(self.datetime, self.hemisphere, self.type) | Representation of the current season. | 625990614e4d562566373ad0 |
class Battery(): <NEW_LINE> <INDENT> def __init__(self, battery_size=70): <NEW_LINE> <INDENT> self.battery_size = battery_size <NEW_LINE> <DEDENT> def describe_battery(self): <NEW_LINE> <INDENT> print("This car has a " + str(self.battery_size)) + "-kWh battery." <NEW_LINE> <DEDENT> def get_range(self): <NEW_LINE> <INDENT> if self.battery_size == 70: <NEW_LINE> <INDENT> range = 240 <NEW_LINE> <DEDENT> elif self.battery_size == 85: <NEW_LINE> <INDENT> range = 270 <NEW_LINE> <DEDENT> message = "This car can go approximately " + str(range) <NEW_LINE> message += " miles on a full charge" <NEW_LINE> print(message) | A simple attempt to model a battery for an electric car. | 6259906138b623060ffaa3b5 |
class GroupUserListApiView(generics.ListAPIView): <NEW_LINE> <INDENT> queryset = GroupUser.objects.all() <NEW_LINE> serializer_class = GroupUserModelSerializer <NEW_LINE> permission_classes = (IsAuthenticated, ) <NEW_LINE> filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) <NEW_LINE> search_fields = ("group__title", "group__code", "user__username") <NEW_LINE> filter_fields = ("group", "group__code", "group_id", "user", "user__username", "permission") <NEW_LINE> ordering_fields = ("group", "user", "id", "permission") <NEW_LINE> ordering = ("id",) | 分组用户列表 | 62599061d6c5a102081e37ee |
class PickleSerialize: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def save(file_name, data): <NEW_LINE> <INDENT> with open("./databases/" + file_name + ".pickle", "wb") as f: <NEW_LINE> <INDENT> pickle.dump(data, f) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def load(file_name): <NEW_LINE> <INDENT> if not os.path.isfile("./databases/" + file_name + ".pickle"): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> with open("./databases/" + file_name + ".pickle", "rb") as f: <NEW_LINE> <INDENT> res = pickle.load(f) <NEW_LINE> return res | Serialize database to pickle file | 625990616e29344779b01d19 |
class Section(FigureCanvas): <NEW_LINE> <INDENT> def __init__(self, well, dpi=90): <NEW_LINE> <INDENT> self.well = well <NEW_LINE> self.figure = Figure(dpi=dpi, facecolor="white") <NEW_LINE> super().__init__(self.figure) <NEW_LINE> self.refresh() <NEW_LINE> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> self.figure.clear() <NEW_LINE> if not self.well.las: <NEW_LINE> <INDENT> self.hide() <NEW_LINE> return <NEW_LINE> <DEDENT> checked_curves = list( filter(lambda curve: curve.mnemonic != "DEPT" and curve.qt_item.checkState(0), self.well.las.curves)) <NEW_LINE> if len(checked_curves) < 1: <NEW_LINE> <INDENT> self.hide() <NEW_LINE> return <NEW_LINE> <DEDENT> self.setFixedWidth(0) <NEW_LINE> self.figure.set_figwidth(0) <NEW_LINE> self.figure.suptitle('"' + self.well.name + '"', fontsize=10, fontweight='normal') <NEW_LINE> number_of_curves = len(checked_curves) <NEW_LINE> for i, curve in enumerate(checked_curves): <NEW_LINE> <INDENT> if curve.qt_item.checkState(0): <NEW_LINE> <INDENT> self.setFixedWidth(self.geometry().width() + 200) <NEW_LINE> self.figure.set_figwidth(self.figure.get_figwidth() + 200 / self.figure.get_dpi()) <NEW_LINE> ax = self.figure.add_subplot(1, number_of_curves, i + 1) <NEW_LINE> ax.set_title(curve.mnemonic, fontsize=8, fontweight='normal') <NEW_LINE> ax.tick_params(axis='both', which='major', labelsize=7) <NEW_LINE> ax.tick_params(axis='both', which='minor', labelsize=7) <NEW_LINE> ax.spines['right'].set_visible(False) <NEW_LINE> ax.spines['top'].set_visible(False) <NEW_LINE> ax.spines['left'].set_visible(False) <NEW_LINE> ax.spines['bottom'].set_visible(False) <NEW_LINE> ax.yaxis.set_ticks_position('left') <NEW_LINE> ax.xaxis.set_ticks_position('bottom') <NEW_LINE> ax.invert_yaxis() <NEW_LINE> ax.grid(color="gray") <NEW_LINE> ax.set_ylabel('depth (m)', fontsize=7) <NEW_LINE> ax.plot(curve.data, self.well.las["DEPT"]) <NEW_LINE> <DEDENT> <DEDENT> self.figure.tight_layout(rect=(0, 0, 1, 0.98)) | Object of this class displays one section | 625990614428ac0f6e659bfc |
class AverageSpeakerScoreMetricAnnotator(SpeakerScoreQuerySetMetricAnnotator): <NEW_LINE> <INDENT> key = "average" <NEW_LINE> name = _("average") <NEW_LINE> abbr = _("Avg") <NEW_LINE> function = Avg | Metric annotator for average speaker score. | 62599061a8370b77170f1a98 |
class ArithmeticDecoder(ArithmeticCoder): <NEW_LINE> <INDENT> def __init__(self, numbits, bitin): <NEW_LINE> <INDENT> super(ArithmeticDecoder, self).__init__(numbits) <NEW_LINE> self.input = bitin <NEW_LINE> self.code = 0 <NEW_LINE> for _ in range(self.num_state_bits): <NEW_LINE> <INDENT> self.code = self.code << 1 | self.read_code_bit() <NEW_LINE> <DEDENT> <DEDENT> def read(self, freqs): <NEW_LINE> <INDENT> total = freqs.get_total() <NEW_LINE> if total > self.maximum_total: <NEW_LINE> <INDENT> raise ValueError("Cannot decode symbol because total is too large") <NEW_LINE> <DEDENT> rng = self.high - self.low + 1 <NEW_LINE> offset = self.code - self.low <NEW_LINE> value = ((offset + 1) * total - 1) // rng <NEW_LINE> start = 0 <NEW_LINE> end = freqs.get_symbol_limit() <NEW_LINE> while end - start > 1: <NEW_LINE> <INDENT> middle = (start + end) >> 1 <NEW_LINE> if freqs.get_low(middle) > value: <NEW_LINE> <INDENT> end = middle <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = middle <NEW_LINE> <DEDENT> <DEDENT> symbol = start <NEW_LINE> self.update(freqs, symbol) <NEW_LINE> if not (self.low <= self.code <= self.high): <NEW_LINE> <INDENT> raise AssertionError("Code out of range") <NEW_LINE> <DEDENT> return symbol <NEW_LINE> <DEDENT> def shift(self): <NEW_LINE> <INDENT> self.code = ((self.code << 1) & self.state_mask) | self.read_code_bit() <NEW_LINE> <DEDENT> def underflow(self): <NEW_LINE> <INDENT> self.code = (self.code & self.half_range) | ((self.code << 1) & (self.state_mask >> 1)) | self.read_code_bit() <NEW_LINE> <DEDENT> def read_code_bit(self): <NEW_LINE> <INDENT> temp = self.input.read_bit() <NEW_LINE> if temp == -1: <NEW_LINE> <INDENT> temp = 0 <NEW_LINE> <DEDENT> return temp | Reads from an arithmetic-coded bit stream and decodes symbols. | 62599061f7d966606f74941e |
class ContainsEagerMultipleOfType( fixtures.DeclarativeMappedTest, testing.AssertsCompiledSQL ): <NEW_LINE> <INDENT> __dialect__ = "default" <NEW_LINE> @classmethod <NEW_LINE> def setup_classes(cls): <NEW_LINE> <INDENT> Base = cls.DeclarativeBasic <NEW_LINE> class X(Base): <NEW_LINE> <INDENT> __tablename__ = "x" <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> a_id = Column(Integer, ForeignKey("a.id")) <NEW_LINE> a = relationship("A", back_populates="x") <NEW_LINE> <DEDENT> class A(Base): <NEW_LINE> <INDENT> __tablename__ = "a" <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> b = relationship("B", back_populates="a") <NEW_LINE> kind = Column(String(30)) <NEW_LINE> x = relationship("X", back_populates="a") <NEW_LINE> __mapper_args__ = { "polymorphic_identity": "a", "polymorphic_on": kind, "with_polymorphic": "*", } <NEW_LINE> <DEDENT> class B(A): <NEW_LINE> <INDENT> a_id = Column(Integer, ForeignKey("a.id")) <NEW_LINE> a = relationship( "A", back_populates="b", uselist=False, remote_side=A.id ) <NEW_LINE> __mapper_args__ = {"polymorphic_identity": "b"} <NEW_LINE> <DEDENT> <DEDENT> def test_contains_eager_multi_alias(self): <NEW_LINE> <INDENT> X, B, A = self.classes("X", "B", "A") <NEW_LINE> s = Session() <NEW_LINE> a_b_alias = aliased(B, name="a_b") <NEW_LINE> b_x_alias = aliased(X, name="b_x") <NEW_LINE> q = ( s.query(A) .outerjoin(A.b.of_type(a_b_alias)) .outerjoin(a_b_alias.x.of_type(b_x_alias)) .options( contains_eager(A.b.of_type(a_b_alias)).contains_eager( a_b_alias.x.of_type(b_x_alias) ) ) ) <NEW_LINE> self.assert_compile( q, "SELECT b_x.id AS b_x_id, b_x.a_id AS b_x_a_id, a_b.id AS a_b_id, " "a_b.kind AS a_b_kind, a_b.a_id AS a_b_a_id, a.id AS a_id_1, " "a.kind AS a_kind, a.a_id AS a_a_id FROM a " "LEFT OUTER JOIN a AS a_b ON a.id = a_b.a_id AND a_b.kind IN " "([POSTCOMPILE_kind_1]) LEFT OUTER JOIN x AS b_x " "ON a_b.id = b_x.a_id", ) | test for #5107 | 62599061dd821e528d6da4e6 |
class NamePaginator(object): <NEW_LINE> <INDENT> def __init__(self, object_list, on=None, per_page=25): <NEW_LINE> <INDENT> self.object_list = object_list <NEW_LINE> self.count = len(object_list) <NEW_LINE> self.pages = [] <NEW_LINE> chunks = {} <NEW_LINE> for obj in self.object_list: <NEW_LINE> <INDENT> if on: obj_str = str(getattr(obj, on)) <NEW_LINE> else: obj_str = str(obj) <NEW_LINE> letter = str.upper(obj_str[0]) <NEW_LINE> if letter not in chunks: chunks[letter] = [] <NEW_LINE> chunks[letter].append(obj) <NEW_LINE> <DEDENT> current_page = NamePage(self) <NEW_LINE> for letter in string.ascii_uppercase: <NEW_LINE> <INDENT> if letter not in chunks: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> sub_list = chunks[letter] <NEW_LINE> new_page_count = len(sub_list) + current_page.count <NEW_LINE> if new_page_count > per_page and abs(per_page - current_page.count) < abs(per_page - new_page_count) and current_page.count > 0: <NEW_LINE> <INDENT> self.pages.append(current_page) <NEW_LINE> current_page = NamePage(self) <NEW_LINE> <DEDENT> current_page.add(sub_list, letter) <NEW_LINE> <DEDENT> if current_page.count > 0: self.pages.append(current_page) <NEW_LINE> <DEDENT> def page(self, num): <NEW_LINE> <INDENT> if len(self.pages) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif num > 0 and num <= len(self.pages): <NEW_LINE> <INDENT> return self.pages[num-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidPage <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def num_pages(self): <NEW_LINE> <INDENT> return len(self.pages) | Pagination for string-based objects | 6259906132920d7e50bc7710 |
class DataProvider(object): <NEW_LINE> <INDENT> def __init__(self, source_id): <NEW_LINE> <INDENT> self._source_id = source_id <NEW_LINE> <DEDENT> def setup_data(self): <NEW_LINE> <INDENT> raise NotImplementedError("Method must be redefined") <NEW_LINE> <DEDENT> def iteration_data(self): <NEW_LINE> <INDENT> raise NotImplementedError("Method must be redefined") <NEW_LINE> <DEDENT> def tear_down_data(self): <NEW_LINE> <INDENT> raise NotImplementedError("Method must be redefined") | Interface to be implemented by any data provider. | 6259906132920d7e50bc7711 |
class ProjectWizardPage(WizardPage): <NEW_LINE> <INDENT> project_name = Str <NEW_LINE> location = Directory(auto_set=True) <NEW_LINE> abs_path = Property(Str, depends_on=["project_name", "location"]) <NEW_LINE> use_default = Bool(True) <NEW_LINE> _label = Property(Str("Create a new project resource."), depends_on=["project_name", "location"]) <NEW_LINE> _named = Bool(False) <NEW_LINE> traits_view = View( Group(Heading("Project")), Group( Item(name="_label", style="readonly", show_label=False), "_", ), Group(Item(name="project_name")), Group( Item(name="use_default", label="Use default location"), show_left=False ), Item(name="location", enabled_when="use_default==False") ) <NEW_LINE> def create_page(self, parent): <NEW_LINE> <INDENT> ui = self.edit_traits(parent=parent, kind='subpanel') <NEW_LINE> return ui.control <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _get_abs_path(self): <NEW_LINE> <INDENT> return join(self.location, self.project_name) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _get__label(self): <NEW_LINE> <INDENT> if (exists(self.abs_path)) and (len(self.project_name) != 0): <NEW_LINE> <INDENT> l = "A project with that name already exists." <NEW_LINE> self.complete = False <NEW_LINE> <DEDENT> elif len(self.project_name) == 0 and self._named: <NEW_LINE> <INDENT> l = "Project name must be specified" <NEW_LINE> self.complete = False <NEW_LINE> <DEDENT> elif not exists(self.location): <NEW_LINE> <INDENT> l = "Project location directory does not exist" <NEW_LINE> self.complete = False <NEW_LINE> <DEDENT> elif len(self.location) == 0: <NEW_LINE> <INDENT> l = "Project location directory must be specified" <NEW_LINE> self.complete = False <NEW_LINE> <DEDENT> elif len(self.project_name) == 0: <NEW_LINE> <INDENT> l = "Create a new project resource." <NEW_LINE> self.complete = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l = "Create a new project resource." <NEW_LINE> self.complete = True <NEW_LINE> <DEDENT> return l <NEW_LINE> <DEDENT> def _project_name_changed(self): <NEW_LINE> <INDENT> self._named = True | Wizard page for project creation.
| 625990615166f23b2e244a9d |
class cmdPasswd: <NEW_LINE> <INDENT> spec = { 'name':'Passwd', 'params':[ {'name':'username','req':1,'type':'unicode'}, {'name':'oldPassword','req':1,'type':'unicode'}, {'name':'newPassword','req':1,'type':'unicode'}, {'name':'newPasswordConfirm','req':1,'type':'unicode'} ]} <NEW_LINE> def executeCommand(self,command): <NEW_LINE> <INDENT> username = command.getValue("username") <NEW_LINE> oldpassword= command.getValue("oldPassword") <NEW_LINE> newpassword = command.getValue("newPassword") <NEW_LINE> newpasswordConfirm = command.getValue("newPasswordConfirm") <NEW_LINE> log("IN command %s" % (command.name)) <NEW_LINE> from src.framework.core import UserHelper <NEW_LINE> s = UserHelper().passwd(username, oldpassword, newpassword, newpasswordConfirm) <NEW_LINE> replyCmd = Command('Alert') <NEW_LINE> replyCmd.addParameter('msg', Utils().pack(s) ) <NEW_LINE> command.outCommands.append(replyCmd) | change user password | 62599061d486a94d0ba2d693 |
class ChunkIterator(object): <NEW_LINE> <INDENT> def __init__(self, dset, source_sel=None): <NEW_LINE> <INDENT> self._shape = dset.shape <NEW_LINE> rank = len(dset.shape) <NEW_LINE> if not dset.chunks: <NEW_LINE> <INDENT> raise TypeError("Chunked dataset required") <NEW_LINE> <DEDENT> if isinstance(dset.chunks, dict): <NEW_LINE> <INDENT> self._layout = dset.chunks["dims"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._layout = dset.chunks <NEW_LINE> <DEDENT> if source_sel is None: <NEW_LINE> <INDENT> slices = [] <NEW_LINE> for dim in range(rank): <NEW_LINE> <INDENT> slices.append(slice(0, self._shape[dim])) <NEW_LINE> <DEDENT> self._sel = tuple(slices) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(source_sel, slice): <NEW_LINE> <INDENT> self._sel = (source_sel,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._sel = source_sel <NEW_LINE> <DEDENT> <DEDENT> if len(self._sel) != rank: <NEW_LINE> <INDENT> raise ValueError("Invalid selection - selection region must have same rank as dataset") <NEW_LINE> <DEDENT> self._chunk_index = [] <NEW_LINE> for dim in range(rank): <NEW_LINE> <INDENT> s = self._sel[dim] <NEW_LINE> if s.start < 0 or s.stop > self._shape[dim] or s.stop <= s.start: <NEW_LINE> <INDENT> raise ValueError("Invalid selection - selection region must be within dataset space") <NEW_LINE> <DEDENT> index = s.start // self._layout[dim] <NEW_LINE> self._chunk_index.append(index) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> rank = len(self._shape) <NEW_LINE> slices = [] <NEW_LINE> if rank == 0 or self._chunk_index[0] * self._layout[0] >= self._sel[0].stop: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> for dim in range(rank): <NEW_LINE> <INDENT> s = self._sel[dim] <NEW_LINE> start = self._chunk_index[dim] * self._layout[dim] <NEW_LINE> stop = (self._chunk_index[dim] + 1) * self._layout[dim] <NEW_LINE> if start < s.start: <NEW_LINE> <INDENT> start = s.start <NEW_LINE> <DEDENT> if stop > s.stop: <NEW_LINE> <INDENT> stop = s.stop <NEW_LINE> <DEDENT> s = slice(start, stop, 1) <NEW_LINE> slices.append(s) <NEW_LINE> <DEDENT> dim = rank - 1 <NEW_LINE> while dim >= 0: <NEW_LINE> <INDENT> s = self._sel[dim] <NEW_LINE> self._chunk_index[dim] += 1 <NEW_LINE> chunk_end = self._chunk_index[dim] * self._layout[dim] <NEW_LINE> if chunk_end < s.stop: <NEW_LINE> <INDENT> return tuple(slices) <NEW_LINE> <DEDENT> if dim > 0: <NEW_LINE> <INDENT> self._chunk_index[dim] = 0 <NEW_LINE> <DEDENT> dim -= 1 <NEW_LINE> <DEDENT> return tuple(slices) | Class to iterate through list of chunks of a given dataset | 625990611b99ca400229009b |
class TLSSNI01ServerTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.certs = {b'localhost': ( test_util.load_pyopenssl_private_key('rsa512_key.pem'), test_util.load_cert('cert.pem'), )} <NEW_LINE> from acme.standalone import TLSSNI01Server <NEW_LINE> self.server = TLSSNI01Server(("", 0), certs=self.certs) <NEW_LINE> self.thread = threading.Thread(target=self.server.serve_forever) <NEW_LINE> self.thread.start() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.server.shutdown() <NEW_LINE> self.thread.join() <NEW_LINE> <DEDENT> def test_it(self): <NEW_LINE> <INDENT> host, port = self.server.socket.getsockname()[:2] <NEW_LINE> cert = crypto_util.probe_sni( b'localhost', host=host, port=port, timeout=1) <NEW_LINE> self.assertEqual(jose.ComparableX509(cert), jose.ComparableX509(self.certs[b'localhost'][1])) | Test for acme.standalone.TLSSNI01Server. | 6259906156ac1b37e630384c |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.