code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ALIEN_COLLECTION_EL(NamedTuple): <NEW_LINE> <INDENT> name: str = '' <NEW_LINE> aclId: str = '' <NEW_LINE> broken: str = '' <NEW_LINE> ctime: str = '' <NEW_LINE> dir: str = '' <NEW_LINE> entryId: str = '' <NEW_LINE> expiretime: str = '' <NEW_LINE> gowner: str = '' <NEW_LINE> guid: str = '' <NEW_LINE> guidtime: str = '' <NEW_LINE> jobid: str = '' <NEW_LINE> lfn: str = '' <NEW_LINE> md5: str = '' <NEW_LINE> owner: str = '' <NEW_LINE> perm: str = '' <NEW_LINE> replicated: str = '' <NEW_LINE> size: str = '' <NEW_LINE> turl: str = '' <NEW_LINE> type: str = ''
AliEn style xml collection element strucure
62599079fff4ab517ebcf1f5
class cooked_mode(raw_mode): <NEW_LINE> <INDENT> def _patch(self): <NEW_LINE> <INDENT> ENABLE_ECHO_INPUT = 0x0004 <NEW_LINE> ENABLE_LINE_INPUT = 0x0002 <NEW_LINE> ENABLE_PROCESSED_INPUT = 0x0001 <NEW_LINE> windll.kernel32.SetConsoleMode( self.handle, self.original_mode.value | (ENABLE_ECHO_INPUT | ENABLE_LINE_INPUT | ENABLE_PROCESSED_INPUT))
:: with cooked_mode(stdin): ''' The pseudo-terminal stdin is now used in cooked mode. '''
6259907923849d37ff852a95
class V1DaemonSetUpdateStrategy(object): <NEW_LINE> <INDENT> openapi_types = { 'rolling_update': 'V1RollingUpdateDaemonSet', 'type': 'str' } <NEW_LINE> attribute_map = { 'rolling_update': 'rollingUpdate', 'type': 'type' } <NEW_LINE> def __init__(self, rolling_update=None, type=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._rolling_update = None <NEW_LINE> self._type = None <NEW_LINE> self.discriminator = None <NEW_LINE> if rolling_update is not None: <NEW_LINE> <INDENT> self.rolling_update = rolling_update <NEW_LINE> <DEDENT> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def rolling_update(self): <NEW_LINE> <INDENT> return self._rolling_update <NEW_LINE> <DEDENT> @rolling_update.setter <NEW_LINE> def rolling_update(self, rolling_update): <NEW_LINE> <INDENT> self._rolling_update = rolling_update <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> self._type = type <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1DaemonSetUpdateStrategy): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1DaemonSetUpdateStrategy): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259907901c39578d7f14423
class IObjectInvitation(IInvitation): <NEW_LINE> <INDENT> oid = interface.Attribute('Object id') <NEW_LINE> object = interface.Attribute('Object')
object invitation
62599079442bda511e95da46
class HtmlPageParsedRegion(HtmlPageRegion): <NEW_LINE> <INDENT> def __new__(cls, htmlpage, start_index, end_index): <NEW_LINE> <INDENT> text_start = htmlpage.parsed_body[start_index].start <NEW_LINE> text_end = htmlpage.parsed_body[end_index or -1].end <NEW_LINE> text = htmlpage.body[text_start:text_end] <NEW_LINE> return HtmlPageRegion.__new__(cls, htmlpage, text) <NEW_LINE> <DEDENT> def __init__(self, htmlpage, start_index, end_index): <NEW_LINE> <INDENT> self.htmlpage = htmlpage <NEW_LINE> self.start_index = start_index <NEW_LINE> self.end_index = end_index <NEW_LINE> <DEDENT> @property <NEW_LINE> def parsed_fragments(self): <NEW_LINE> <INDENT> end = self.end_index + 1 if self.end_index is not None else None <NEW_LINE> return self.htmlpage.parsed_body[self.start_index:end]
A region of an HtmlPage that has been extracted This has a parsed_fragments property that contains the parsed html fragments contained within this region
625990797d847024c075ddba
class PODaggrKriging(ot.OpenTURNSPythonFunction): <NEW_LINE> <INDENT> def __init__(self, krigingPOD, dim, defectSizes, detection): <NEW_LINE> <INDENT> super(PODaggrKriging, self).__init__(dim, defectSizes.shape[0]) <NEW_LINE> self.krigingResult = krigingPOD.getKrigingResult() <NEW_LINE> self.defectNumber = len(defectSizes) <NEW_LINE> self.defectSizes = defectSizes <NEW_LINE> self.detection = detection <NEW_LINE> <DEDENT> def _exec(self, X): <NEW_LINE> <INDENT> x = np.array(X, ndmin=2) <NEW_LINE> x = x.repeat(self.defectNumber, axis=0) <NEW_LINE> xWitha = np.concatenate((np.vstack(self.defectSizes), x), axis=1) <NEW_LINE> mean = np.array(self.krigingResult.getConditionalMean(xWitha)) <NEW_LINE> var = np.array([self.krigingResult.getConditionalCovariance(p)[0, 0] for p in xWitha]) <NEW_LINE> if (var < 0).all(): <NEW_LINE> <INDENT> logging.warning("Warning : some variance values are negatives, " + "the kriging model may not be accurate enough.") <NEW_LINE> if (var[var<0] < 1e-2).all(): <NEW_LINE> <INDENT> raise ValueError("Variance values are lower than -1e-2. Please " + "check the validity of the kriging model.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> var = np.abs(var) <NEW_LINE> <DEDENT> <DEDENT> quantile = np.vstack((self.detection - mean) / np.sqrt(var)) <NEW_LINE> prob = 1. - np.array([ot.DistFunc.pNormal(q[0]) for q in quantile]) <NEW_LINE> return prob
Aggregate function that compute the POD for a given points for all defect sizes given as parameter. Parameters ---------- krigingPOD : :class:`KrigingPOD` or :class:`AdaptiveSignalPOD` The kriging POD object obtained after building the POD. dim : integer The number of input parameters of the function without the defect. defectSizes : sequence of float The defect size values for which the POD is computed. detection : float Detection value of the signal after box cox if it was enabled : must be "detectionBoxCox" from the POD object.
6259907960cbc95b06365a5c
class Language(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200, help_text="Enter a the book's natural language (e.g. English, French, Japanese etc.)") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Model representing a Language (e.g. English, French, Japanese, etc.)
625990797cff6e4e811b741d
class Base: <NEW_LINE> <INDENT> __nb_object = 0 <NEW_LINE> def __init__(self, id=None): <NEW_LINE> <INDENT> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Base.__nb_object += 1 <NEW_LINE> self.id = Base.__nb_object <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def to_json_string(list_dictionaries): <NEW_LINE> <INDENT> if list_dictionaries is None or len(list_dictionaries) is 0: <NEW_LINE> <INDENT> return "[]" <NEW_LINE> <DEDENT> return json.dumps(list_dictionaries) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def save_to_file(cls, list_objs): <NEW_LINE> <INDENT> name = cls.__name__ + ".json" <NEW_LINE> ret_list = [] <NEW_LINE> if list_objs is not None: <NEW_LINE> <INDENT> for i in list_objs: <NEW_LINE> <INDENT> ret_list.append(i.to_dictionary()) <NEW_LINE> <DEDENT> <DEDENT> with open(name, 'w') as f: <NEW_LINE> <INDENT> f.write(cls.to_json_string(ret_list)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def from_json_string(json_string): <NEW_LINE> <INDENT> if json_string is None or json_string is "": <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return json.loads(json_string) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create(cls, **dictionary): <NEW_LINE> <INDENT> new_instance = cls(1, 1) <NEW_LINE> new_instance.update(**dictionary) <NEW_LINE> return new_instance <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load_from_file(cls): <NEW_LINE> <INDENT> name = cls.__name__ + ".json" <NEW_LINE> ret_list = [] <NEW_LINE> with open(name) as f: <NEW_LINE> <INDENT> file_list = cls.from_json_string(f.read()) <NEW_LINE> <DEDENT> for i in file_list: <NEW_LINE> <INDENT> n = cls.create(**i) <NEW_LINE> ret_list.append(n) <NEW_LINE> <DEDENT> return ret_list
Class that makes the base for other classes. Manages __nb_objects and public instance attribute id.
625990799c8ee82313040e76
class GroveSlidePotentiometer(ADC): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.channel = channel <NEW_LINE> self.adc = ADC() <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self.adc.read(self.channel)
Grove Slide Poteniometer Sensor class Args: pin(int): number of analog pin/channel the sensor connected.
625990792c8b7c6e89bd51c9
class WeibullFitter(KnownModelParametricUnivariateFitter): <NEW_LINE> <INDENT> lambda_: float <NEW_LINE> rho_: float <NEW_LINE> _fitted_parameter_names = ["lambda_", "rho_"] <NEW_LINE> _compare_to_values = np.array([1.0, 1.0]) <NEW_LINE> _scipy_fit_options = {"ftol": 1e-14} <NEW_LINE> def _create_initial_point(self, Ts, E, entry, weights): <NEW_LINE> <INDENT> return np.array([utils.coalesce(*Ts).mean(), 1.0]) <NEW_LINE> <DEDENT> def _cumulative_hazard(self, params, times): <NEW_LINE> <INDENT> lambda_, rho_ = params <NEW_LINE> return safe_exp(rho_ * (np.log(np.clip(times, 1e-25, np.inf)) - np.log(lambda_))) <NEW_LINE> <DEDENT> def _log_hazard(self, params, times): <NEW_LINE> <INDENT> lambda_, rho_ = params <NEW_LINE> return np.log(rho_) - np.log(lambda_) + (rho_ - 1) * (np.log(times) - np.log(lambda_)) <NEW_LINE> <DEDENT> def percentile(self, p) -> float: <NEW_LINE> <INDENT> return self.lambda_ * (np.log(1.0 / p) ** (1.0 / self.rho_))
This class implements a Weibull model for univariate data. The model has parameterized form: .. math:: S(t) = \exp\left(-\left(\frac{t}{\lambda}\right)^\rho\right), \lambda > 0, \rho > 0, The :math:`\lambda` (scale) parameter has an applicable interpretation: it represents the time when 63.2% of the population has died. The :math:`\rho` (shape) parameter controls if the cumulative hazard (see below) is convex or concave, representing accelerating or decelerating hazards. .. image:: /images/weibull_parameters.png The cumulative hazard rate is .. math:: H(t) = \left(\frac{t}{\lambda}\right)^\rho, and the hazard rate is: .. math:: h(t) = \frac{\rho}{\lambda}\left(\frac{t}{\lambda}\right)^{\rho-1} After calling the ``.fit`` method, you have access to properties like: ``cumulative_hazard_``, ``survival_function_``, ``lambda_`` and ``rho_``. A summary of the fit is available with the method ``print_summary()``. Parameters ----------- alpha: float, optional (default=0.05) the level in the confidence intervals. Examples -------- .. code:: python from lifelines import WeibullFitter from lifelines.datasets import load_waltons waltons = load_waltons() wbf = WeibullFitter() wbf.fit(waltons['T'], waltons['E']) wbf.plot() print(wbf.lambda_) Attributes ---------- cumulative_hazard_ : DataFrame The estimated cumulative hazard (with custom timeline if provided) hazard_ : DataFrame The estimated hazard (with custom timeline if provided) survival_function_ : DataFrame The estimated survival function (with custom timeline if provided) cumulative_density_ : DataFrame The estimated cumulative density function (with custom timeline if provided) density_: DataFrame The estimated density function (PDF) (with custom timeline if provided) variance_matrix_ : DataFrame The variance matrix of the coefficients median_survival_time_: float The median time to event lambda_: float The fitted parameter in the model rho_: float The fitted parameter in the model durations: array The durations provided event_observed: array The event_observed variable provided timeline: array The time line to use for plotting and indexing entry: array or None The entry array provided, or None Notes ---------- Looking for a 3-parameter Weibull model? See notes `here <https://lifelines.readthedocs.io/en/latest/jupyter_notebooks/Piecewise%20Exponential%20Models%20and%20Creating%20Custom%20Models.html#3-parameter-Weibull-distribution>`_.
62599079bf627c535bcb2ead
class IMultiLayoutPossibleFacetedNavigable(IPossibleFacetedNavigable): <NEW_LINE> <INDENT> pass
Marker interface for all objects that should have the ability to be faceted navigable
6259907967a9b606de547795
class TextInfo: <NEW_LINE> <INDENT> def __init__(self, face="Arial", bold=False, size=8, padding=3, color=(0, 0, 0)): <NEW_LINE> <INDENT> self.face = (face, cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_BOLD if bold else cairo.FONT_WEIGHT_NORMAL) <NEW_LINE> self.size = size <NEW_LINE> self.padding = padding <NEW_LINE> self.color = color
Class to hold text font and padding information. Public methods: __init()__
62599079fff4ab517ebcf1f7
class ManagementGroupChildInfo(Model): <NEW_LINE> <INDENT> _attribute_map = { 'child_type': {'key': 'childType', 'type': 'str'}, 'child_id': {'key': 'childId', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'children': {'key': 'children', 'type': '[ManagementGroupChildInfo]'}, } <NEW_LINE> def __init__(self, child_type=None, child_id=None, display_name=None, children=None): <NEW_LINE> <INDENT> super(ManagementGroupChildInfo, self).__init__() <NEW_LINE> self.child_type = child_type <NEW_LINE> self.child_id = child_id <NEW_LINE> self.display_name = display_name <NEW_LINE> self.children = children
The child information of a management group. :param child_type: The type of child resource. Possible values include: 'ManagementGroup', 'Subscription' :type child_type: str or ~azure.mgmt.managementgroups.models.enum :param child_id: The fully qualified ID for the child resource (management group or subscription). For example, /providers/Microsoft.Management/managementGroups/0000000-0000-0000-0000-000000000000 :type child_id: str :param display_name: The friendly name of the child resource. :type display_name: str :param children: The list of children. :type children: list[~azure.mgmt.managementgroups.models.ManagementGroupChildInfo]
625990794f88993c371f1211
class FilesystemResolver(etree.Resolver): <NEW_LINE> <INDENT> def resolve(self, system_url, public_id, context): <NEW_LINE> <INDENT> if not '://' in system_url and os.path.exists(system_url): <NEW_LINE> <INDENT> return self.resolve_filename(system_url, context) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Resolver for filesystem paths
625990794f6381625f19a19c
class TestGatewayOption(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testGatewayOption(self): <NEW_LINE> <INDENT> pass
GatewayOption unit test stubs
6259907960cbc95b06365a5d
class LTIJohnProvider(tornado.web.RequestHandler): <NEW_LINE> <INDENT> eventDispatcherURL = 'http://mono.stanford.edu:6969/ltiResponse' <NEW_LINE> def post(self): <NEW_LINE> <INDENT> postBodyForm = self.request.body <NEW_LINE> postBodyDict = eval(postBodyForm) <NEW_LINE> self.echoParmsToEventDispatcher(postBodyDict) <NEW_LINE> <DEDENT> def echoParmsToEventDispatcher(self, paramDict): <NEW_LINE> <INDENT> paramNames = paramDict.keys() <NEW_LINE> paramNames.sort() <NEW_LINE> request = httpclient.HTTPRequest(LTIJohnProvider.eventDispatcherURL, method='POST', body=str(paramDict)) <NEW_LINE> http_client = httpclient.AsyncHTTPClient() <NEW_LINE> ltiResult = http_client.fetch(request, callback=lambda result: None) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def makeApp(self): <NEW_LINE> <INDENT> application = tornado.web.Application([ (r"/john", LTIJohnProvider), ]) <NEW_LINE> return application
This class is a Web service that listens to POST requests from an LTI consumer. The module simply echoes all the parameters that the consumer passes in. This class differs from the corresponding classes in lti_candace_provider.py and lti_dill_provider.py in that this service participates in a service registration scheme (see lti_event_dispatcher.py). Consumers direct all requests to the event dispatcher LTI, which forwards to the proper final provider. Results from this class are returned to the dispatcher via POST. The dispatcher returns the results to the originally requesting consumer. That is: requests to this provider will originate not from a browser, but from the event dispatcher LTI. The service listens on port 7070 on the server it runs on. If running on mono.stanford.edu, the following URL lets you exercise the service: https://lagunita.stanford.edu/courses/DavidU/DC1/David_Course/courseware/918c99bd432c4a83ac14e03cbe774fa0/3cdfb888a5bf480a9f17fc0ca1feb53a/2 If you run it on your own server, and you have a sandbox course on Lagunita, you can create an LTI component as described at http://edx.readthedocs.org/projects/edx-partner-course-staff/en/latest/exercises_tools/lti_component.html
62599079baa26c4b54d50c91
class CaveSurveyDialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/CaveSurvey/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull())
Test rerources work.
62599079283ffb24f3cf5280
class CustomResponseXMLFactory(ResponseXMLFactory): <NEW_LINE> <INDENT> def create_response_element(self, **kwargs): <NEW_LINE> <INDENT> cfn = kwargs.get('cfn', None) <NEW_LINE> expect = kwargs.get('expect', None) <NEW_LINE> answer_attr = kwargs.get('answer_attr', None) <NEW_LINE> answer = kwargs.get('answer', None) <NEW_LINE> options = kwargs.get('options', None) <NEW_LINE> cfn_extra_args = kwargs.get('cfn_extra_args', None) <NEW_LINE> response_element = etree.Element("customresponse") <NEW_LINE> if cfn: <NEW_LINE> <INDENT> response_element.set('cfn', str(cfn)) <NEW_LINE> <DEDENT> if expect: <NEW_LINE> <INDENT> response_element.set('expect', str(expect)) <NEW_LINE> <DEDENT> if answer_attr: <NEW_LINE> <INDENT> response_element.set('answer', str(answer_attr)) <NEW_LINE> <DEDENT> if answer: <NEW_LINE> <INDENT> answer_element = etree.SubElement(response_element, "answer") <NEW_LINE> answer_element.text = str(answer) <NEW_LINE> <DEDENT> if options: <NEW_LINE> <INDENT> response_element.set('options', str(options)) <NEW_LINE> <DEDENT> if cfn_extra_args: <NEW_LINE> <INDENT> response_element.set('cfn_extra_args', str(cfn_extra_args)) <NEW_LINE> <DEDENT> return response_element <NEW_LINE> <DEDENT> def create_input_element(self, **kwargs): <NEW_LINE> <INDENT> return ResponseXMLFactory.textline_input_xml(**kwargs)
Factory for producing <customresponse> XML trees
6259907af9cc0f698b1c5fbc
class UQSpecification(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__filename = 'uq_setting.gz' <NEW_LINE> self.__preprocessor = None <NEW_LINE> self.__simulation = None <NEW_LINE> def postprocessor(x, *args, **kws): <NEW_LINE> <INDENT> return {'_': [x]} <NEW_LINE> <DEDENT> self.__postprocessor = postprocessor <NEW_LINE> self.__reachesSteadyState = False <NEW_LINE> self.__save = False <NEW_LINE> self.__interpolants = {} <NEW_LINE> def interpolate(*args, **kws): <NEW_LINE> <INDENT> return interp1d(*args, kind='linear', **kws) <NEW_LINE> <DEDENT> self.__interp1d = interpolate <NEW_LINE> self.__t0 = -1 <NEW_LINE> self.__tn = -1 <NEW_LINE> self.__dt = -1 <NEW_LINE> <DEDENT> def getStartTime(self): <NEW_LINE> <INDENT> return self.__t0 <NEW_LINE> <DEDENT> def setStartTime(self, t0): <NEW_LINE> <INDENT> self.__t0 = t0 <NEW_LINE> <DEDENT> def getEndTime(self): <NEW_LINE> <INDENT> return self.__tn <NEW_LINE> <DEDENT> def setEndTime(self, tn): <NEW_LINE> <INDENT> self.__tn = tn <NEW_LINE> <DEDENT> def getTimeStep(self): <NEW_LINE> <INDENT> return self.__dt <NEW_LINE> <DEDENT> def setTimeStep(self, dt): <NEW_LINE> <INDENT> self.__dt = dt <NEW_LINE> <DEDENT> def getPreprocessor(self): <NEW_LINE> <INDENT> return self.__preprocessor <NEW_LINE> <DEDENT> def setPreprocessor(self, preprocessor): <NEW_LINE> <INDENT> if isinstance(preprocessor, Transformation): <NEW_LINE> <INDENT> self.__preprocessor = preprocessor <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('The preprocessor has to be an ' + 'instance of Transformation') <NEW_LINE> <DEDENT> <DEDENT> def getSimulation(self): <NEW_LINE> <INDENT> return self.__simulation <NEW_LINE> <DEDENT> def setSimulation(self, simulation): <NEW_LINE> <INDENT> self.__simulation = simulation <NEW_LINE> <DEDENT> def getPostprocessor(self): <NEW_LINE> <INDENT> return self.__postprocessor <NEW_LINE> <DEDENT> def setPostprocessor(self, postprocessor): <NEW_LINE> <INDENT> self.__postprocessor = postprocessor <NEW_LINE> <DEDENT> def setInterpolationFunction(self, interp1d): <NEW_LINE> <INDENT> self.__interp1d = interp1d <NEW_LINE> <DEDENT> def hasInterpolationFunction(self): <NEW_LINE> <INDENT> return self.__interp1d is not None <NEW_LINE> <DEDENT> def getInterpolationFunction(self, p, ts, results): <NEW_LINE> <INDENT> if p in self.__interpolants: <NEW_LINE> <INDENT> return self.__interpolants[p] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f = self.__interp1d(ts, results) <NEW_LINE> self.__interpolants[p] = f <NEW_LINE> return f <NEW_LINE> <DEDENT> <DEDENT> def setReachesSteadyState(self, reachesSteadyState): <NEW_LINE> <INDENT> self.__reachesSteadyState = reachesSteadyState <NEW_LINE> <DEDENT> def reachesSteadyState(self): <NEW_LINE> <INDENT> return self.__reachesSteadyState <NEW_LINE> <DEDENT> def setFilename(self, filename): <NEW_LINE> <INDENT> self.__filename = filename <NEW_LINE> <DEDENT> def getFilename(self): <NEW_LINE> <INDENT> return self.__filename <NEW_LINE> <DEDENT> def setSaveAfterEachRun(self, save): <NEW_LINE> <INDENT> self.__save = save <NEW_LINE> <DEDENT> def getSaveAfterEachRun(self): <NEW_LINE> <INDENT> return self.__save
UQ specification object
6259907a76e4537e8c3f0f5f
class FileUtils: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def open(filePath, permArgs): <NEW_LINE> <INDENT> return open(filePath, permArgs)
Contains utilities related to file system operations.
6259907a7047854f46340d9b
class HomeView(web.View): <NEW_LINE> <INDENT> @aiohttp_jinja2.template('radio/landing.html') <NEW_LINE> async def get(self): <NEW_LINE> <INDENT> return {'filter_class': get_day_time(), 'DEBUG': settings.DEBUG, 'GA': settings.GOOGLE_ANALYTICS}
Base view that return index page with all JS fancy stuff on it
6259907a796e427e5385015b
class PointerInput: <NEW_LINE> <INDENT> def __init__(self, pos, length): <NEW_LINE> <INDENT> self.pos = pos <NEW_LINE> self.length = length
Base class for various pointer input types.
6259907af548e778e596cf72
class ItemAttribute(): <NEW_LINE> <INDENT> ATTRIB_EXCLUSION_LIST = 'copy' <NEW_LINE> def __repr__(self, indent=2): <NEW_LINE> <INDENT> result = self.__class__.__name__ + '\n' <NEW_LINE> for k,v in self._attribs(): <NEW_LINE> <INDENT> result += ' '*indent + k + ': ' + str(v) <NEW_LINE> <DEDENT> result += '\n' <NEW_LINE> return result <NEW_LINE> <DEDENT> def _attribs(self): <NEW_LINE> <INDENT> return [(k, getattr(self, k)) for k in dir(self) if (not k.startswith('_') and k not in ItemAttribute.ATTRIB_EXCLUSION_LIST)] <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> attrib = ItemAttribute() <NEW_LINE> for k, v in self._attribs(): <NEW_LINE> <INDENT> setattr(attrib, k, v.copy() if hasattr(v, 'copy') else v) <NEW_LINE> <DEDENT> return attrib
An attribute about an item which, in turn, contains attributes in the form of Python attributes, set and retrieved using setattr() and getattr()
6259907a32920d7e50bc7a29
class Categories(Resource): <NEW_LINE> <INDENT> @jwt_required <NEW_LINE> @admin_only <NEW_LINE> def get(self): <NEW_LINE> <INDENT> return category.get_all_categories() <NEW_LINE> <DEDENT> @jwt_required <NEW_LINE> @admin_only <NEW_LINE> @expects_json(category_schema) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> cat_name = request.get_json("cat_name")["cat_name"].strip(" ") <NEW_LINE> description = request.get_json("desc")["desc"].strip(" ") <NEW_LINE> if not cat_name or not description: <NEW_LINE> <INDENT> return jsonify({ "message": "Category name and description are required", "status": 400 }) <NEW_LINE> <DEDENT> return category.save_category(cat_name, description)
Creates the endpoint for categories
6259907a16aa5153ce401eba
class ExecutorError(Exception): <NEW_LINE> <INDENT> def __init__(self, executor, reason): <NEW_LINE> <INDENT> self.executor = executor <NEW_LINE> self.reason = reason <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Executor {0} failed due to {1}".format(self.executor, self.reason) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__()
Base class for all exceptions. Only to be invoked when only a more specific error is not available.
6259907aadb09d7d5dc0bf4a
class ConnectionStringAuthentication(ConnectionString, Authentication): <NEW_LINE> <INDENT> def __init__(self, connection_string): <NEW_LINE> <INDENT> super(ConnectionStringAuthentication, self).__init__( connection_string ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_with_parsed_values(cls, host_name, shared_access_key_name, shared_access_key): <NEW_LINE> <INDENT> connection_string = ( HOST_NAME + "=" + host_name + ";" + SHARED_ACCESS_KEY_NAME + "=" + shared_access_key_name + ";" + SHARED_ACCESS_KEY + "=" + shared_access_key ) <NEW_LINE> return cls(connection_string) <NEW_LINE> <DEDENT> def signed_session(self, session=None): <NEW_LINE> <INDENT> session = super(ConnectionStringAuthentication, self).signed_session(session) <NEW_LINE> sastoken = SasToken(self[HOST_NAME], self[SHARED_ACCESS_KEY], self[SHARED_ACCESS_KEY_NAME]) <NEW_LINE> session.headers[self.header] = str(sastoken) <NEW_LINE> return session
ConnectionString class that can be used with msrest to provide SasToken authentication :param connection_string: The connection string to generate SasToken with
6259907a7d847024c075ddbe
class cm_shell_slurm: <NEW_LINE> <INDENT> def activate_cm_shell_slurm(self): <NEW_LINE> <INDENT> self.register_command_topic('platform', 'slurm') <NEW_LINE> pass <NEW_LINE> <DEDENT> @command <NEW_LINE> def do_slurm(self, args, arguments): <NEW_LINE> <INDENT> log.info(arguments) <NEW_LINE> if(arguments["deploy"] and arguments["GROUPNAME"] and arguments["LOGINNAME"]): <NEW_LINE> <INDENT> virtual_slurm().DeploySlurm("{GROUPNAME}".format(**arguments), "{LOGINNAME}".format(**arguments), "india") <NEW_LINE> return <NEW_LINE> <DEDENT> if(arguments["delete"] and arguments["GROUPNAME"]): <NEW_LINE> <INDENT> virtual_slurm().Delete("{GROUPNAME}".format(**arguments), "india") <NEW_LINE> return <NEW_LINE> <DEDENT> if(arguments["info"] and arguments["GROUPNAME"]): <NEW_LINE> <INDENT> virtual_slurm().Info("{GROUPNAME}".format(**arguments)) <NEW_LINE> return <NEW_LINE> <DEDENT> return
Creating a slurm cluster
6259907a656771135c48ad20
class EAP_TTLS(EAP): <NEW_LINE> <INDENT> name = "EAP-TTLS" <NEW_LINE> fields_desc = [ ByteEnumField("code", 1, eap_codes), ByteField("id", 0), FieldLenField("len", None, fmt="H", length_of="data", adjust=lambda p, x: x + 10 if p.L == 1 else x + 6), ByteEnumField("type", 21, eap_types), BitField("L", 0, 1), BitField("M", 0, 1), BitField("S", 0, 1), BitField("reserved", 0, 2), BitField("version", 0, 3), ConditionalField(IntField("message_len", 0), lambda pkt: pkt.L == 1), XStrLenField("data", "", length_from=lambda pkt: 0 if pkt.len is None else pkt.len - (6 + 4 * pkt.L)) ]
RFC 5281 - "Extensible Authentication Protocol Tunneled Transport Layer Security Authenticated Protocol Version 0 (EAP-TTLSv0)"
6259907a4a966d76dd5f08c7
class RootViaSudoExecutionController( CheckBoxDifferentialExecutionController): <NEW_LINE> <INDENT> def __init__(self, session_dir, provider_list): <NEW_LINE> <INDENT> super().__init__(session_dir, provider_list) <NEW_LINE> try: <NEW_LINE> <INDENT> in_sudo_group = grp.getgrnam("sudo").gr_gid in posix.getgroups() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> in_sudo_group = False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> in_admin_group = grp.getgrnam("admin").gr_gid in posix.getgroups() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> in_admin_group = False <NEW_LINE> <DEDENT> self.user_can_sudo = in_sudo_group or in_admin_group <NEW_LINE> <DEDENT> def get_execution_command(self, job, config, nest_dir): <NEW_LINE> <INDENT> cmd = ['sudo', '-u', job.user, 'env'] <NEW_LINE> env = self.get_differential_execution_environment( job, config, nest_dir) <NEW_LINE> cmd += ["{key}={value}".format(key=key, value=value) for key, value in sorted(env.items())] <NEW_LINE> cmd += ['bash', '-c', job.command] <NEW_LINE> return cmd <NEW_LINE> <DEDENT> def get_checkbox_score(self, job): <NEW_LINE> <INDENT> if job.user is not None and self.user_can_sudo: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1
Execution controller that gains root by using sudo. This controller should be used for jobs that need root but cannot be executed by the plainbox-trusted-launcher-1. This happens whenever the job is not in the system-wide provider location. In practice it is used when working with the special 'checkbox-in-source-tree' provider as well as for jobs that need to run as root from the non-system-wide location. Using this controller is preferable to pkexec if running on command line as unlike pkexec, it retains 'memory' and doesn't ask for the password over and over again.
6259907a460517430c432d4a
class FixHeaderApplyTransforms(ApplyTransforms): <NEW_LINE> <INDENT> def _run_interface(self, runtime, correct_return_codes=(0,)): <NEW_LINE> <INDENT> runtime = super(FixHeaderApplyTransforms, self)._run_interface( runtime, correct_return_codes) <NEW_LINE> _copyxform(self.inputs.reference_image, os.path.abspath(self._gen_filename('output_image')), message='%s (niworkflows v%s)' % ( self.__class__.__name__, __version__)) <NEW_LINE> return runtime
A replacement for nipype.interfaces.ants.resampling.ApplyTransforms that fixes the resampled image header to match the xform of the reference image
6259907ae1aae11d1e7cf501
class WiktionaryTerm: <NEW_LINE> <INDENT> def __init__(self, text: AccentedText): <NEW_LINE> <INDENT> self.text = AccentedText(text) <NEW_LINE> self.etymology = AccentedText() <NEW_LINE> self.words = {} <NEW_LINE> self.audio_sources = {} <NEW_LINE> self.download_timestamp = time.time() <NEW_LINE> <DEDENT> def get_audio_url(self, extension="ogg") -> str: <NEW_LINE> <INDENT> return self.audio_sources.get(extension, None) <NEW_LINE> <DEDENT> def get_word_data(self, word_type: WordType): <NEW_LINE> <INDENT> return self.words.get(word_type, None) <NEW_LINE> <DEDENT> def serialize(self) -> dict: <NEW_LINE> <INDENT> data = { "text": self.text.raw, "download_timestamp": self.download_timestamp, "etymology": self.etymology.raw, "words": {}, } <NEW_LINE> if self.audio_sources: <NEW_LINE> <INDENT> data["audio_sources"] = {} <NEW_LINE> for extension, url in self.audio_sources.items(): <NEW_LINE> <INDENT> data["audio_sources"][extension] = url <NEW_LINE> <DEDENT> <DEDENT> for word_type, word_data in self.words.items(): <NEW_LINE> <INDENT> data["words"][word_type.name.lower()] = word_data.serialize() <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def deserialize(self, data: dict): <NEW_LINE> <INDENT> self.text = AccentedText(data["text"]) <NEW_LINE> self.download_timestamp = data["download_timestamp"] <NEW_LINE> self.etymology = AccentedText(data["etymology"]) <NEW_LINE> self.words.clear() <NEW_LINE> self.audio_sources.clear() <NEW_LINE> self.audio_sources.update(data.get("audio_sources", {})) <NEW_LINE> for word_type_name, word_data in data.get("words", {}).items(): <NEW_LINE> <INDENT> word_type = types.parse_word_type(word_type_name) <NEW_LINE> word = WiktionaryWordData(word_type=word_type) <NEW_LINE> word.deserialize(word_data) <NEW_LINE> self.words[word_type] = word
Data representing a single page on Wiktionary for a term.
6259907a4527f215b58eb691
class MigrateServer(command.Command): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + '.MigrateServer') <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(MigrateServer, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'server', metavar='<server>', help=_('Server (name or ID)'), ) <NEW_LINE> parser.add_argument( '--live', metavar='<hostname>', help=_('Target hostname'), ) <NEW_LINE> migration_group = parser.add_mutually_exclusive_group() <NEW_LINE> migration_group.add_argument( '--shared-migration', dest='shared_migration', action='store_true', default=True, help=_('Perform a shared live migration (default)'), ) <NEW_LINE> migration_group.add_argument( '--block-migration', dest='shared_migration', action='store_false', help=_('Perform a block live migration'), ) <NEW_LINE> disk_group = parser.add_mutually_exclusive_group() <NEW_LINE> disk_group.add_argument( '--disk-overcommit', action='store_true', default=False, help=_('Allow disk over-commit on the destination host'), ) <NEW_LINE> disk_group.add_argument( '--no-disk-overcommit', dest='disk_overcommit', action='store_false', default=False, help=_('Do not over-commit disk on the' ' destination host (default)'), ) <NEW_LINE> parser.add_argument( '--wait', action='store_true', help=_('Wait for resize to complete'), ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> @utils.log_method(log) <NEW_LINE> def take_action(self, parsed_args): <NEW_LINE> <INDENT> compute_client = self.app.client_manager.compute <NEW_LINE> server = utils.find_resource( compute_client.servers, parsed_args.server, ) <NEW_LINE> if parsed_args.live: <NEW_LINE> <INDENT> server.live_migrate( parsed_args.live, parsed_args.shared_migration, parsed_args.disk_overcommit, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> server.migrate() <NEW_LINE> <DEDENT> if parsed_args.wait: <NEW_LINE> <INDENT> if utils.wait_for_status( compute_client.servers.get, server.id, callback=_show_progress, ): <NEW_LINE> <INDENT> sys.stdout.write(_('Complete\n')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sys.stdout.write(_('\nError migrating server')) <NEW_LINE> raise SystemExit
Migrate server to different host
6259907aaad79263cf43019b
class MyApp(wx.App): <NEW_LINE> <INDENT> def OnInit(self): <NEW_LINE> <INDENT> img = wx.Image(util.resource_path('resources/f22.jpg'), wx.BITMAP_TYPE_JPEG) <NEW_LINE> self.frame = MyFrame(image=img) <NEW_LINE> self.frame.Show(True) <NEW_LINE> self.SetTopWindow(self.frame) <NEW_LINE> return True
Application class.
6259907a5fc7496912d48f5b
class AttackBase(object): <NEW_LINE> <INDENT> name = None <NEW_LINE> targeted = False <NEW_LINE> def __init__(self, model, classifier, options): <NEW_LINE> <INDENT> assert self.name, 'Each attack must define a name attribute.' <NEW_LINE> self.session = model.session <NEW_LINE> self.model = model <NEW_LINE> self.classifier = classifier <NEW_LINE> self.options = options <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_options(cls, parser): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def adversarial_noise_op(self, x, y): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def adversarial_examples(self, x, y, intensity=0.1): <NEW_LINE> <INDENT> if hasattr(self, '_adversarial'): <NEW_LINE> <INDENT> adversarial_input, adversarial_labels, adversarial_op = self._adversarial <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with tf.name_scope(self.name): <NEW_LINE> <INDENT> adversarial_input = self.classifier._get_input_placeholder() <NEW_LINE> adversarial_labels = self.classifier._get_labels_placeholder() <NEW_LINE> adversarial_op = self.adversarial_noise_op(adversarial_input, adversarial_labels) <NEW_LINE> <DEDENT> self._adversarial = adversarial_input, adversarial_labels, adversarial_op <NEW_LINE> <DEDENT> noise, targets = self.classifier.batch_apply(adversarial_op, feed_dict=self.classifier._set_training({ adversarial_input: x, adversarial_labels: y, }, False)) <NEW_LINE> return (x + intensity * noise, targets) <NEW_LINE> <DEDENT> def get_target_examples(self): <NEW_LINE> <INDENT> pass
Abstract attack.
6259907a56b00c62f0fb42b5
class UNG_917t1: <NEW_LINE> <INDENT> activate = Buff(TARGET, "UNG_917e")
Dinomancy - (HeroPower) Hero Power Give a Beast +2/+2. https://hearthstone.gamepedia.com/Dinomancy
6259907abf627c535bcb2eb1
class PyramidalFlowFilter(FlowFilter): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self._H = kwargs.pop('levels', 1) <NEW_LINE> pIter = kwargs.pop('propIterations', 1) <NEW_LINE> sIter = kwargs.pop('smoothIterations', 1) <NEW_LINE> gamma = kwargs.pop('gamma', 1.0) <NEW_LINE> self._maxflow = kwargs.pop('maxflow', 0.25) <NEW_LINE> self._propIterations = list() <NEW_LINE> self._smoothIterations = list() <NEW_LINE> self._gamma = list() <NEW_LINE> for h in range(self._H): <NEW_LINE> <INDENT> if isinstance(pIter, Iterable): <NEW_LINE> <INDENT> self._propIterations.append(pIter[h]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._propIterations.append(int(math.ceil(pIter / math.pow(2, h)))) <NEW_LINE> <DEDENT> self._smoothIterations.append(sIter[h] if isinstance(sIter, Iterable) else sIter) <NEW_LINE> self._gamma.append(gamma[h] if isinstance(gamma, Iterable) else gamma) <NEW_LINE> <DEDENT> self._filterTop = SimpleFlowFilter(propIterations=self._propIterations[self._H-1], smoothIterations=self._smoothIterations[self._H-1], gamma=self._gamma[self._H-1]) <NEW_LINE> self._lowLevelFilters = list() <NEW_LINE> for h in range(self._H-1): <NEW_LINE> <INDENT> filterLow = DeltaFlowFilter(proptIterations=self._propIterations[h], smoothIterations=self._smoothIterations[h], gamma=self._gamma[h], maxflow=self._maxflow) <NEW_LINE> self._lowLevelFilters.append(filterLow) <NEW_LINE> <DEDENT> self._firstLoad = True <NEW_LINE> self._elapsedTime = 0.0 <NEW_LINE> <DEDENT> def loadImage(self, img): <NEW_LINE> <INDENT> self._img = img <NEW_LINE> if self._firstLoad: <NEW_LINE> <INDENT> self._flow = np.zeros((self._img.shape[0], self._img.shape[1], 2), dtype=np.float32) <NEW_LINE> self._firstLoad = False <NEW_LINE> <DEDENT> <DEDENT> def compute(self): <NEW_LINE> <INDENT> start = time.clock() <NEW_LINE> imgPyr = fmisc.imagePyramid(self._img, self._H) <NEW_LINE> self._filterTop.loadImage(imgPyr[-1]) <NEW_LINE> flowOld = self._filterTop.getFlow() <NEW_LINE> self._filterTop.compute() <NEW_LINE> self._flow = self._filterTop.getFlow() <NEW_LINE> if self._H > 1: <NEW_LINE> <INDENT> for h in range(self._H-2, -1, -1): <NEW_LINE> <INDENT> filterLow = self._lowLevelFilters[h] <NEW_LINE> filterLow.loadImage(imgPyr[h]) <NEW_LINE> flowOld = 2.0*fmisc.imageUp(flowOld) + filterLow.getDeltaFlow() <NEW_LINE> filterLow.setFlow(flowOld) <NEW_LINE> filterLow.compute() <NEW_LINE> self._flow = 2.0*fmisc.imageUp(self._flow) + filterLow.getDeltaFlow() <NEW_LINE> self._flow[self._flow > self._propIterations[h]] = self._propIterations[h] <NEW_LINE> self._flow[self._flow < -self._propIterations[h]] = -self._propIterations[h] <NEW_LINE> <DEDENT> <DEDENT> stop = time.clock() <NEW_LINE> self._elapsedTime = (stop - start) * 1000.0 <NEW_LINE> <DEDENT> def elapsedTime(self): <NEW_LINE> <INDENT> return self._elapsedTime <NEW_LINE> <DEDENT> def getFlow(self): <NEW_LINE> <INDENT> return self._flow
Pyramidal optical flow filter See also -------- SimpleFlowFilter : DeltaFlowFilter : Examples --------
6259907a7047854f46340d9e
class PaidTask(models.Model): <NEW_LINE> <INDENT> type_choices = [ (PaidTaskTypes.TRANSLATION, _("Translation")), (PaidTaskTypes.REVIEW, _("Review")), (PaidTaskTypes.HOURLY_WORK, _("Hourly Work")), (PaidTaskTypes.CORRECTION, _("Correction")), ] <NEW_LINE> task_type = models.PositiveSmallIntegerField( _("Type"), choices=type_choices, null=False, db_index=True, default=PaidTaskTypes.TRANSLATION, ) <NEW_LINE> amount = models.FloatField(_("Amount"), default=0, null=False) <NEW_LINE> rate = models.FloatField(null=False, default=0) <NEW_LINE> datetime = models.DateTimeField(_("Date"), null=False, db_index=True) <NEW_LINE> description = models.TextField(_("Description"), null=True) <NEW_LINE> user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) <NEW_LINE> objects = PaidTaskManager() <NEW_LINE> @classmethod <NEW_LINE> def get_task_type_title(cls, task_type): <NEW_LINE> <INDENT> return dict(cls.type_choices).get(task_type, "") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Task: [id=%s, user=%s, month=%s, " "type=%s, amount=%s, comment=%s]" % ( self.id, self.user.username, self.datetime.strftime("%Y-%m"), PaidTask.get_task_type_title(self.task_type), self.amount, self.description, ) <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> now = timezone.now() <NEW_LINE> if settings.USE_TZ: <NEW_LINE> <INDENT> now = timezone.localtime(now) <NEW_LINE> <DEDENT> if now.month == self.datetime.month and now.year == self.datetime.year: <NEW_LINE> <INDENT> self.datetime = now
The Paid Task. ``task_type``, ``amount`` and ``date`` are required.
6259907a67a9b606de547797
class C3H10N(Fragment): <NEW_LINE> <INDENT> def __init__(self, lipid, adduct, intensity): <NEW_LINE> <INDENT> assert not self.Formula() > lipid.formula <NEW_LINE> super().__init__(lipid, adduct, intensity) <NEW_LINE> <DEDENT> def MZ(self): <NEW_LINE> <INDENT> return 60.080776 <NEW_LINE> <DEDENT> def Formula(self): <NEW_LINE> <INDENT> formula = Counter({'C':3, 'H':10, 'N':1}) <NEW_LINE> return formula <NEW_LINE> <DEDENT> def Charge(self): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> def Comment(self): <NEW_LINE> <INDENT> return '[C3H10N]+'
X-H Fragment common to phospholipids under negative ESI MZ: 60.080776
6259907a23849d37ff852a9b
class Libro: <NEW_LINE> <INDENT> ISBN="" <NEW_LINE> nombre="" <NEW_LINE> anno=0 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.ISBN="" <NEW_LINE> self.nombre="xxx" <NEW_LINE> self.anno=0 <NEW_LINE> <DEDENT> def setISBN(self,pISBN): <NEW_LINE> <INDENT> self.ISBN=pISBN <NEW_LINE> <DEDENT> def setNombre(self,pnombre): <NEW_LINE> <INDENT> self.nombre=pnombre <NEW_LINE> <DEDENT> def setAnno(self,panno): <NEW_LINE> <INDENT> self.anno=panno <NEW_LINE> <DEDENT> def getISBN(self): <NEW_LINE> <INDENT> return self.ISBN <NEW_LINE> <DEDENT> def getNombre(self): <NEW_LINE> <INDENT> return self.nombre <NEW_LINE> <DEDENT> def getAnno(self): <NEW_LINE> <INDENT> return self.anno <NEW_LINE> <DEDENT> def getDatos(self): <NEW_LINE> <INDENT> print("\nISBN: "+self.ISBN) <NEW_LINE> print("Nombre: "+self.nombre) <NEW_LINE> print("Año: "+str(self.anno)) <NEW_LINE> return
Definición de atibutos
6259907afff4ab517ebcf1fb
class MetadataFileSystemAdapterTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._osStatResult = SimpleMock() <NEW_LINE> self._osStatResult.st_ctime = 788999998.0 <NEW_LINE> self._osStatResult.st_mtime = 788999998.0 <NEW_LINE> self._osStatResult.st_size = 788999998.0 <NEW_LINE> self._osModuleMock = _OsModuleMock(self._osStatResult) <NEW_LINE> adapter.os = self._osModuleMock <NEW_LINE> self._mimetypesModuleMock = SimpleMock((None, None)) <NEW_LINE> adapter.mimetypes = self._mimetypesModuleMock <NEW_LINE> self._adapter = adapter.MetadataFileSystemAdapter("/identifier", SimpleMock("/identifier")) <NEW_LINE> <DEDENT> def _initValidRetrieveResult(self, mimeType): <NEW_LINE> <INDENT> mappedResult = dict() <NEW_LINE> mappedResult[constants.CREATION_DATETIME] = MetadataValue(str(self._osStatResult.st_ctime)) <NEW_LINE> mappedResult[constants.MODIFICATION_DATETIME] = MetadataValue(str(self._osStatResult.st_mtime)) <NEW_LINE> mappedResult[constants.SIZE] = MetadataValue(str(self._osStatResult.st_size)) <NEW_LINE> mappedResult[constants.MIME_TYPE] = MetadataValue(mimeType) <NEW_LINE> mappedResult[constants.OWNER] = MetadataValue("") <NEW_LINE> return mappedResult <NEW_LINE> <DEDENT> def testRetrieve(self): <NEW_LINE> <INDENT> expectedResult = self._initValidRetrieveResult("") <NEW_LINE> self.assertEquals(self._adapter.retrieve(), expectedResult) <NEW_LINE> result = self._adapter.retrieve([constants.OWNER]) <NEW_LINE> self.assertTrue(len(result) == 1) <NEW_LINE> self.assertTrue(constants.OWNER in result) <NEW_LINE> result = self._adapter.retrieve([constants.OWNER, constants.CREATION_DATETIME]) <NEW_LINE> self.assertTrue(len(result) == 2) <NEW_LINE> self.assertTrue(constants.OWNER in result) <NEW_LINE> self.assertTrue(constants.CREATION_DATETIME in result) <NEW_LINE> self.assertEquals(self._adapter.retrieve([]), expectedResult) <NEW_LINE> self.assertEquals(self._adapter.retrieve(None), expectedResult) <NEW_LINE> self._mimetypesModuleMock.value = ("application/pdf", None) <NEW_LINE> expectedResult = self._initValidRetrieveResult("application/pdf") <NEW_LINE> self.assertEquals(self._adapter.retrieve(), expectedResult) <NEW_LINE> self._osModuleMock.error = OSError() <NEW_LINE> self.assertRaises(PersistenceError, self._adapter.retrieve) <NEW_LINE> <DEDENT> def testUpdate(self): <NEW_LINE> <INDENT> self._adapter.update(dict()) <NEW_LINE> <DEDENT> def testDelete(self): <NEW_LINE> <INDENT> self._adapter.delete(list())
Test cases of the meta data adapter.
6259907aadb09d7d5dc0bf4c
class Command(BaseCommand): <NEW_LINE> <INDENT> help = 'import csv file and create records.' <NEW_LINE> def add_arguments(self, parser: CommandParser) -> None: <NEW_LINE> <INDENT> parser.add_argument('csv', nargs='+', type=str) <NEW_LINE> <DEDENT> def handle(self, *args: Any, **options: Any) -> Optional[str]: <NEW_LINE> <INDENT> with open(options['csv'][0]) as f: <NEW_LINE> <INDENT> reader = csv.DictReader(f) <NEW_LINE> for i, row in enumerate(reader): <NEW_LINE> <INDENT> document = Document.objects.create( description=row['description'], photo=row['photo'] ) <NEW_LINE> print(i, document) <NEW_LINE> <DEDENT> print('completed!')
Read csv and import data to database. python manage.py import_csv [csv file path] csv header need to contain 'description' and 'photo' columns. description(str): description photo(str): file path Args: BaseCommand ([type]): BaseCommand
6259907a1b99ca4002290227
class UnknownAnnotationTypeException(Exception): <NEW_LINE> <INDENT> pass
error raised when an annotation type is not found
6259907a44b2445a339b764f
class BitmapBuffer(wx.MemoryDC): <NEW_LINE> <INDENT> def __init__(self, width, height, colour): <NEW_LINE> <INDENT> wx.MemoryDC.__init__(self) <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.colour = colour <NEW_LINE> self.bitmap = wx.Bitmap(self.width, self.height) <NEW_LINE> self.SelectObject(self.bitmap) <NEW_LINE> self.SetBackground(wx.Brush(self.colour, wx.BRUSHSTYLE_SOLID)) <NEW_LINE> self.Clear() <NEW_LINE> self.SetMapMode(wx.MM_TEXT) <NEW_LINE> <DEDENT> def GetBitmap(self): <NEW_LINE> <INDENT> return self.bitmap <NEW_LINE> <DEDENT> def GetPixelColour(self, x, y): <NEW_LINE> <INDENT> img = self.GetAsBitmap().ConvertToImage() <NEW_LINE> red = img.GetRed(x, y) <NEW_LINE> green = img.GetGreen(x, y) <NEW_LINE> blue = img.GetBlue(x, y) <NEW_LINE> return wx.Colour(red, green, blue)
A screen buffer class. This class implements a screen output buffer. Data is meant to be drawn in the buffer class and then blitted directly to the output device, or on-screen window.
6259907a97e22403b383c8e5
class Or(Operator): <NEW_LINE> <INDENT> def op(self, arg1, arg2): <NEW_LINE> <INDENT> return arg1 or arg2 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '(%s | %s)' % tuple(self)
Prop Cal Or
6259907a460517430c432d4b
class AttrEncoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, outdims=40): <NEW_LINE> <INDENT> super(AttrEncoder, self).__init__() <NEW_LINE> self.resnet = resnet50(pretrained=True) <NEW_LINE> self.reslayers = list(self.resnet.children())[:-2] <NEW_LINE> self.reslayers.append(nn.Conv2d(2048, 2048, 2)) <NEW_LINE> self.model = nn.Sequential(*self.reslayers) <NEW_LINE> self.affine = nn.Linear(2048, outdims) <NEW_LINE> self.act = nn.Sigmoid() <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> if x.dim() == 3: <NEW_LINE> <INDENT> x = x.unsqueeze(0) <NEW_LINE> <DEDENT> resout = self.model(x) <NEW_LINE> resout = resout.view(resout.size(0), -1) <NEW_LINE> out = self.act(self.affine(resout)) <NEW_LINE> return out
Attribute predictor class (encoder)
6259907a283ffb24f3cf5284
class Atype8(Atype): <NEW_LINE> <INDENT> def __init__(self, tik: int, object_id: int, coal_id: int, task_type_id: int, success: bool, icon_type_id: int, pos: dict): <NEW_LINE> <INDENT> super().__init__(tik) <NEW_LINE> self.object_id = object_id <NEW_LINE> self.coal_id = coal_id <NEW_LINE> self.task_type_id = task_type_id <NEW_LINE> self.success = success <NEW_LINE> self.icon_type_id = icon_type_id <NEW_LINE> self.pos = pos <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'T:{self.tik} AType:8 OBJID:{self.object_id} POS({self.pos}) COAL:{self.coal_id} ' f'TYPE:{self.task_type_id} RES:{self.success} ICTYPE:{self.icon_type_id}'
Mission Objective
6259907aad47b63b2c5a9233
class WrathOfTheStorm(Feature): <NEW_LINE> <INDENT> _name = "Wrath of the Storm" <NEW_LINE> source = "Cleric (Tempest Domain)" <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> num_uses = max(1, self.owner.wisdom.modifier) <NEW_LINE> return self._name + ' ({:d}x/LR)'.format(num_uses)
Also at 1st level, you can thunderously rebuke attackers. When a creature within 5 feet of you that you can see hits you with an attack, you can use your reaction to cause the creature to make a Dexterity saving throw. The creature takes 2d8 lightning or thunder damage (your choice) on a failed saving throw, and half as much damage on a successful one. You can use this feature a number of times equal to your Wisdom modifier (a minimum of once). You regain all expended uses when you finish a long rest.
6259907af9cc0f698b1c5fbe
class NoErrors(LogValidator): <NEW_LINE> <INDENT> def parseLogLine(self,proc,date,time,level,msg): <NEW_LINE> <INDENT> if level in ["FATAL","ERROR","EXCEPTION"]: <NEW_LINE> <INDENT> raise ValidationError( "Encountered an %s" % (level,) )
Considers a log valid if there are no errors or exceptions.
6259907aaad79263cf43019d
class ArticleCreateView(LoginRequiredMixin, CustomSuccessMessageMixin, CreateView): <NEW_LINE> <INDENT> login_url = reverse_lazy('login_page') <NEW_LINE> model = Articles <NEW_LINE> template_name = 'edit_page.html' <NEW_LINE> form_class = ArticleForm <NEW_LINE> success_url = reverse_lazy('edit_page') <NEW_LINE> success_msg = 'Статья создана' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> kwargs['list_articles'] = Articles.objects.all().order_by('-id') <NEW_LINE> return super().get_context_data(**kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> self.object = form.save(commit=False) <NEW_LINE> self.object.author = self.request.user <NEW_LINE> self.object.save() <NEW_LINE> return super().form_valid(form)
Класс вида создания статьи
6259907abf627c535bcb2eb3
class GoogleSheetsOptions(object): <NEW_LINE> <INDENT> _SOURCE_FORMAT = 'GOOGLE_SHEETS' <NEW_LINE> _RESOURCE_NAME = 'googleSheetsOptions' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._properties = {} <NEW_LINE> <DEDENT> skip_leading_rows = _TypedApiResourceProperty( 'skip_leading_rows', 'skipLeadingRows', six.integer_types) <NEW_LINE> def to_api_repr(self): <NEW_LINE> <INDENT> config = copy.deepcopy(self._properties) <NEW_LINE> slr = config.pop('skipLeadingRows', None) <NEW_LINE> if slr is not None: <NEW_LINE> <INDENT> config['skipLeadingRows'] = str(slr) <NEW_LINE> <DEDENT> return config <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_api_repr(cls, resource): <NEW_LINE> <INDENT> slr = resource.get('skipLeadingRows') <NEW_LINE> config = cls() <NEW_LINE> config._properties = copy.deepcopy(resource) <NEW_LINE> config.skip_leading_rows = _int_or_none(slr) <NEW_LINE> return config
Options that describe how to treat Google Sheets as BigQuery tables.
6259907a67a9b606de547798
class ColormapDialog(QtWidgets.QDialog): <NEW_LINE> <INDENT> @docstrings.with_indent(8) <NEW_LINE> def __init__(self, names=[], N=10, editable=True, *args, **kwargs): <NEW_LINE> <INDENT> super(QtWidgets.QDialog, self).__init__(*args, **kwargs) <NEW_LINE> vbox = QtWidgets.QVBoxLayout() <NEW_LINE> self.table = ColormapTable(names=names, N=N, editable=editable) <NEW_LINE> if editable: <NEW_LINE> <INDENT> vbox.addWidget(QtWidgets.QLabel("Double-click a color to edit")) <NEW_LINE> <DEDENT> vbox.addWidget(self.table) <NEW_LINE> self.setLayout(vbox) <NEW_LINE> col_width = self.table.columnWidth(0) <NEW_LINE> header_width = self.table.verticalHeader().width() <NEW_LINE> row_height = self.table.rowHeight(0) <NEW_LINE> available = QtWidgets.QDesktopWidget().availableGeometry() <NEW_LINE> height = int(min(row_height * (self.table.rowCount() + 1), 2. * available.height() / 3.)) <NEW_LINE> width = int(min(header_width + col_width * N + 0.5 * col_width, 2. * available.width() / 3.)) <NEW_LINE> self.resize(QtCore.QSize(width, height)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @docstrings.with_indent(8) <NEW_LINE> def get_colormap(cls, names=[], N=10, *args, **kwargs): <NEW_LINE> <INDENT> names = safe_list(names) <NEW_LINE> obj = cls(names, N, *args, **kwargs) <NEW_LINE> vbox = obj.layout() <NEW_LINE> buttons = QtWidgets.QDialogButtonBox( QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel, parent=obj) <NEW_LINE> buttons.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False) <NEW_LINE> vbox.addWidget(buttons) <NEW_LINE> buttons.accepted.connect(obj.accept) <NEW_LINE> buttons.rejected.connect(obj.reject) <NEW_LINE> obj.table.selectionModel().selectionChanged.connect( lambda indices: buttons.button(QtWidgets.QDialogButtonBox.Ok).setEnabled( bool(indices))) <NEW_LINE> accepted = obj.exec_() <NEW_LINE> if accepted: <NEW_LINE> <INDENT> return obj.table.chosen_colormap <NEW_LINE> <DEDENT> <DEDENT> docstrings.delete_params('show_colormaps.parameters', 'use_qt') <NEW_LINE> @classmethod <NEW_LINE> @docstrings.with_indent(8) <NEW_LINE> def show_colormap(cls, names=[], N=10, show=True, *args, **kwargs): <NEW_LINE> <INDENT> names = safe_list(names) <NEW_LINE> obj = cls(names, N, *args, **kwargs) <NEW_LINE> vbox = obj.layout() <NEW_LINE> buttons = QtWidgets.QDialogButtonBox(QtWidgets.QDialogButtonBox.Close, parent=obj) <NEW_LINE> buttons.rejected.connect(obj.close) <NEW_LINE> vbox.addWidget(buttons) <NEW_LINE> if show: <NEW_LINE> <INDENT> obj.show() <NEW_LINE> <DEDENT> return obj
A widget for selecting a colormap
6259907ad486a94d0ba2d99c
class FileImager(USBImager): <NEW_LINE> <INDENT> def Run(self): <NEW_LINE> <INDENT> if not os.path.exists(self.device): <NEW_LINE> <INDENT> cros_build_lib.Die('Path %s does not exist.' % self.device) <NEW_LINE> <DEDENT> image_path = self._GetImagePath() <NEW_LINE> if os.path.isdir(self.device): <NEW_LINE> <INDENT> logging.info('Copying to %s', os.path.join(self.device, os.path.basename(image_path))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info('Copying to %s', self.device) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> shutil.copy(image_path, self.device) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> logging.error('Failed to copy image %s to %s', image_path, self.device)
Copy image to the target path.
6259907aadb09d7d5dc0bf4e
class ExtFileField(forms.FileField): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> ext_whitelist = kwargs.pop("ext_whitelist") <NEW_LINE> self.ext_whitelist = [i.lower() for i in ext_whitelist] <NEW_LINE> super(ExtFileField, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def clean(self, *args, **kwargs): <NEW_LINE> <INDENT> data = super(ExtFileField, self).clean(*args, **kwargs) <NEW_LINE> filename = data.name <NEW_LINE> ext = os.path.splitext(filename)[1] <NEW_LINE> ext = ext.lower() <NEW_LINE> if ext not in self.ext_whitelist: <NEW_LINE> <INDENT> raise forms.ValidationError("Not allowed filetype!")
Same as forms.FileField, but you can specify a file extension whitelist. >>> from django.core.files.uploadedfile import SimpleUploadedFile >>> >>> t = ExtFileField(ext_whitelist=(".pdf", ".txt")) >>> >>> t.clean(SimpleUploadedFile('filename.pdf', 'Some File Content')) >>> t.clean(SimpleUploadedFile('filename.txt', 'Some File Content')) >>> >>> t.clean(SimpleUploadedFile('filename.exe', 'Some File Content')) Traceback (most recent call last): ... ValidationError: [u'Not allowed filetype!']
6259907a1b99ca4002290228
class DashboardLoginView(APIView): <NEW_LINE> <INDENT> def post(self, request, format=None): <NEW_LINE> <INDENT> username = request.data['username'] <NEW_LINE> password = request.data['password'] <NEW_LINE> role = request.data['role'] <NEW_LINE> county=request.data["county"] <NEW_LINE> school=request.data["school"] <NEW_LINE> if User.objects.filter(username=username).exists() == False: <NEW_LINE> <INDENT> error = { 'error': 'Username is invalid' } <NEW_LINE> return Response(error, status=status.HTTP_401_UNAUTHORIZED) <NEW_LINE> <DEDENT> user = User.objects.get(username=username) <NEW_LINE> if role == Teacher: <NEW_LINE> <INDENT> if Teacher.objects.filter(user_id=user.id).exists() == False: <NEW_LINE> <INDENT> error = { 'error': 'You are not a teacher' } <NEW_LINE> return Response(error, status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> <DEDENT> elif role == HeadTeacher: <NEW_LINE> <INDENT> if HeadTeacher.objects.filter(user_id=user.id).exists() == False: <NEW_LINE> <INDENT> error = { 'error': 'You are not a headteacher' } <NEW_LINE> return Response(error, status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> <DEDENT> elif role == CountyOfficer: <NEW_LINE> <INDENT> if CountyOfficer.objects.filter(user_id=user.id).exists() == False: <NEW_LINE> <INDENT> error = { 'error': 'You are not a country officer' } <NEW_LINE> return Response(error, status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> return Response(error, status=status.HTTP_406_NOT_ACCEPTABLE) <NEW_LINE> <DEDENT> auth_user = authenticate(username=username, password=password) <NEW_LINE> if auth_user: <NEW_LINE> <INDENT> token = Token.objects.get(user_id=user.id) <NEW_LINE> data = { "user":user.id, "username":user.username, "role":role, "county":county, "school":school, "token": token.key } <NEW_LINE> return Response(data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> error = { 'error': 'Your password is wrong' } <NEW_LINE> return Response(error, status=status.HTTP_400_BAD_REQUEST)
{ "username": "marlinek", "school": 1, "county": 1, "token": "73a8ac6d849d4af2cf001d07d42f83c80d4c2ce9", "role": "teacher", "user": 8 } "error": "Username is invalid" HTTP 401 Unauthorized HTTP 200 OK
6259907a66673b3332c31de4
class CryptoAddressOperation(CryptoOperation): <NEW_LINE> <INDENT> address_id = Column(ForeignKey("crypto_address.id")) <NEW_LINE> address = relationship(CryptoAddress, single_parent=True, cascade="all, delete-orphan", primaryjoin=address_id == CryptoAddress.id, backref="user_owned_crypto_accounts") <NEW_LINE> __mapper_args__ = { 'polymorphic_identity': CryptoOperationType.address, "order_by": CryptoOperation.created_at } <NEW_LINE> def __init__(self, address: CryptoAddress): <NEW_LINE> <INDENT> assert address <NEW_LINE> assert address.id <NEW_LINE> assert address.network <NEW_LINE> assert address.network.id <NEW_LINE> super().__init__(network=address.network) <NEW_LINE> self.address = address
Operation which has one cryptonetwork address as source/destination.
6259907a1f5feb6acb1645dc
class LayerFactory(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._layer_dict = { "ConvBN": (nn_blocks.ConvBN, self.conv_bn_config_todict), "MaxPool": (tf.keras.layers.MaxPool2D, self.maxpool_config_todict) } <NEW_LINE> <DEDENT> def conv_bn_config_todict(self, config, kwargs): <NEW_LINE> <INDENT> dictvals = { "filters": config.filters, "kernel_size": config.kernel_size, "strides": config.strides, "padding": config.padding } <NEW_LINE> dictvals.update(kwargs) <NEW_LINE> return dictvals <NEW_LINE> <DEDENT> def darktiny_config_todict(self, config, kwargs): <NEW_LINE> <INDENT> dictvals = {"filters": config.filters, "strides": config.strides} <NEW_LINE> dictvals.update(kwargs) <NEW_LINE> return dictvals <NEW_LINE> <DEDENT> def maxpool_config_todict(self, config, kwargs): <NEW_LINE> <INDENT> return { "pool_size": config.pool_size, "strides": config.strides, "padding": config.padding, "name": kwargs["name"] } <NEW_LINE> <DEDENT> def __call__(self, config, kwargs): <NEW_LINE> <INDENT> layer, get_param_dict = self._layer_dict[config.layer] <NEW_LINE> param_dict = get_param_dict(config, kwargs) <NEW_LINE> return layer(**param_dict)
Class for quick look up of default layers. Used by darknet to connect, introduce or exit a level. Used in place of an if condition or switch to make adding new layers easier and to reduce redundant code.
6259907aec188e330fdfa28d
class DATA_PT_ribmosaic_panels(RibmosaicPipelinePanels, bpy.types.Panel): <NEW_LINE> <INDENT> bl_space_type = 'PROPERTIES' <NEW_LINE> bl_region_type = 'WINDOW' <NEW_LINE> bl_context = "data" <NEW_LINE> filter_type = ('MESH', 'CURVE', 'SURFACE', 'META', 'LAMP', 'CAMERA') <NEW_LINE> shader_panels = True <NEW_LINE> utility_panels = True <NEW_LINE> pass
Pipeline shader and utility control panel for object data
6259907ae1aae11d1e7cf503
class MDP: <NEW_LINE> <INDENT> def __init__(self, nS, nA, discount): <NEW_LINE> <INDENT> assert 0 <= discount, 'discount must be non-negative' <NEW_LINE> assert discount <= 1, 'value too large' <NEW_LINE> self.S = np.arange(nS) <NEW_LINE> self.A = np.arange(nA) <NEW_LINE> self.discount = discount <NEW_LINE> self.policy = self.initial_policy(self.S) <NEW_LINE> self.values = self.initial_values(self.S) <NEW_LINE> self.state = None <NEW_LINE> self.i = 0 <NEW_LINE> <DEDENT> def initial_policy(self, S): <NEW_LINE> <INDENT> policy = dict() <NEW_LINE> for s in S: <NEW_LINE> <INDENT> actions = self.possible_actions(s) <NEW_LINE> if len(actions) == 0: <NEW_LINE> <INDENT> policy[s] = -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> policy[s] = np.random.choice(actions) <NEW_LINE> <DEDENT> <DEDENT> return policy <NEW_LINE> <DEDENT> def initial_values(self, S): <NEW_LINE> <INDENT> values = dict() <NEW_LINE> for s in S: <NEW_LINE> <INDENT> values[s] = 0.0 <NEW_LINE> <DEDENT> return values <NEW_LINE> <DEDENT> def possible_actions(self, s): <NEW_LINE> <INDENT> return self.A <NEW_LINE> <DEDENT> def step(self, env): <NEW_LINE> <INDENT> if self.state is None: <NEW_LINE> <INDENT> raise ValueError('agent state was not initialized. do agent.state = s0.') <NEW_LINE> <DEDENT> a = self.policy[self.state] <NEW_LINE> s, r, done, info = env.step(a) <NEW_LINE> self.update(a, s, r) <NEW_LINE> self.state = s <NEW_LINE> self.i += 1 <NEW_LINE> return s, r, done, info
S : set of states (implemented as vector of integers) A : set of actions (which i think should also just be a vector of integer labels for now) P : matrix of transition probabilities (A x S x S) -> [0,1] from Wiki: "...sometimes written as Pr(s,a,s'), Pr(s'|s,a)..." TODO which is correct? R : reward for taking action a in s, ending in s' (A x S x S) -> real discount : how import are past rewards? more -> closer to 1, less -> closer to zero.
6259907aad47b63b2c5a9235
class Foo(RefinableObject): <NEW_LINE> <INDENT> name = Refinable() <NEW_LINE> @dispatch <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(Foo, self).__init__(**kwargs)
First description
6259907af9cc0f698b1c5fbf
class MarkTicketClosed(TicketAction): <NEW_LINE> <INDENT> action_name = 'mark_closed' <NEW_LINE> required_states = 'talk started opened ready' <NEW_LINE> veto_vote_states = 'assigned'
Mark this ticket as closed.
6259907ad268445f2663a851
class ImportRIP(bpy.types.Operator, ImportHelper): <NEW_LINE> <INDENT> bl_idname = "import_scene.rip" <NEW_LINE> bl_label = 'Import NinjaRipper (*.rip)' <NEW_LINE> bl_options = {'UNDO'} <NEW_LINE> filename_ext = ".rip" <NEW_LINE> filter_glob = StringProperty(default="*.rip", options={'HIDDEN'}) <NEW_LINE> semantic_setting = bpy.props.EnumProperty(items= (('AUTO', 'Auto', 'Automatically detect vertex layout'), ('MANUAL', 'Manual', 'Enter vertex layout details manually')), name = "Vertex Layout") <NEW_LINE> vxlayout = bpy.props.IntProperty(name="VX", default=0) <NEW_LINE> vylayout = bpy.props.IntProperty(name="VY", default=1) <NEW_LINE> vzlayout = bpy.props.IntProperty(name="VZ", default=2) <NEW_LINE> nxlayout = bpy.props.IntProperty(name="NX", default=3) <NEW_LINE> nylayout = bpy.props.IntProperty(name="NY", default=4) <NEW_LINE> nzlayout = bpy.props.IntProperty(name="NZ", default=5) <NEW_LINE> tulayout = bpy.props.IntProperty(name="TU", default=6) <NEW_LINE> tvlayout = bpy.props.IntProperty(name="TV", default=7) <NEW_LINE> scale = bpy.props.FloatProperty(name="Scale", default=1.0) <NEW_LINE> reusemats = BoolProperty(name="Re-use materials", description="Re-use existing materials from other RIP files (especially useful when loading an entire folder)", default=True) <NEW_LINE> importall = BoolProperty(name="Import entire folder", description="Import all meshes in this folder", default=False) <NEW_LINE> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> sub = layout.row() <NEW_LINE> sub.prop(self, "semantic_setting") <NEW_LINE> if self.semantic_setting == "MANUAL": <NEW_LINE> <INDENT> sub = layout.row() <NEW_LINE> sub.prop(self, "vxlayout") <NEW_LINE> sub.prop(self, "nxlayout") <NEW_LINE> sub.prop(self, "tulayout") <NEW_LINE> sub = layout.row() <NEW_LINE> sub.prop(self, "vylayout") <NEW_LINE> sub.prop(self, "nylayout") <NEW_LINE> sub.prop(self, "tvlayout") <NEW_LINE> sub = layout.row() <NEW_LINE> sub.prop(self, "vzlayout") <NEW_LINE> sub.prop(self, "nzlayout") <NEW_LINE> sub.label("") <NEW_LINE> <DEDENT> sub = layout.row() <NEW_LINE> sub.prop(self, "scale") <NEW_LINE> sub = layout.row() <NEW_LINE> sub.prop(self, "reusemats") <NEW_LINE> sub = layout.row() <NEW_LINE> sub.prop(self, "importall") <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> from . import import_rip <NEW_LINE> keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob", "check_existing", )) <NEW_LINE> return import_rip.load(self, context, **keywords)
Import from RIP file format (.rip)
6259907a7047854f46340da2
class KayakError(Exception): <NEW_LINE> <INDENT> pass
Generic exception for kayak module.
6259907a21bff66bcd72464f
class StudyGuideMetaCampaign(MetaCampaign): <NEW_LINE> <INDENT> event = models.ForeignKey(CalendarEvent, primary_key=True) <NEW_LINE> campaigns = models.ManyToManyField(StudyGuideCampaign, blank=True, null=True) <NEW_LINE> documents = models.ManyToManyField(Document, blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(self.event) <NEW_LINE> <DEDENT> def _update_subscribers(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> campaign = self.campaigns.latest('when') <NEW_LINE> <DEDENT> except StudyGuideCampaign.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for student in utils.students_for_event(self.event): <NEW_LINE> <INDENT> subscriber, created = StudyGuideCampaignSubscriber.objects.get_or_create( campaign=campaign, user=student.user) <NEW_LINE> if created: <NEW_LINE> <INDENT> campaign.subscribers.add(subscriber) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _update_documents(self): <NEW_LINE> <INDENT> if self.campaigns.active(): <NEW_LINE> <INDENT> top_ranked_documents = utils._rank_documents(self.event) <NEW_LINE> if set(top_ranked_documents) == set(self.documents.all()): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> print('[DEBUG] Docs changed! New campaign ahoy!') <NEW_LINE> self.documents = top_ranked_documents <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def _deactivate_campaigns(self): <NEW_LINE> <INDENT> now = timezone.now() <NEW_LINE> for campaign in self.campaigns.active(): <NEW_LINE> <INDENT> campaign.until = now <NEW_LINE> campaign.save(update_fields=['until']) <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self._update_documents(): <NEW_LINE> <INDENT> self._deactivate_campaigns() <NEW_LINE> campaign = StudyGuideCampaign.objects.create( sender_address=self.sender_address, sender_name=self.sender_name, event=self.event, when=timezone.now(), until=self.event.start) <NEW_LINE> campaign.documents = self.documents.all() <NEW_LINE> self.campaigns.add(campaign) <NEW_LINE> <DEDENT> self._update_subscribers()
Campaign builder for study guide mailings. Attributes ---------- event : django.db.models.ForeignKey An event for this campaign. campaigns : django.db.models.ManyToManyField Campaigns associated with this builder. documents : django.db.models.ManyToManyField Documents associated with this builder. Notes ----- [TODO] All related "regular" campaigns should be deleted when a study guide metacampaign is deleted.
6259907a3539df3088ecdc7e
class _Functional(Pipe): <NEW_LINE> <INDENT> class __knobs__: <NEW_LINE> <INDENT> fun = None
Base class for Transform, Monitor and Filter. Implements wrapping up a custom python function into a functional pipe.
6259907a7cff6e4e811b7426
class ParticleCS(AnalysisBase): <NEW_LINE> <INDENT> def __init__(self, mobile, reference=None, rotation=None, **kwargs): <NEW_LINE> <INDENT> if 'start' not in kwargs: <NEW_LINE> <INDENT> kwargs['start'] = 1 <NEW_LINE> <DEDENT> if 'step' in kwargs: <NEW_LINE> <INDENT> kwargs['start'] += kwargs['step'] <NEW_LINE> <DEDENT> super(ParticleCS, self).__init__(mobile.universe.trajectory, **kwargs) <NEW_LINE> self.n_frames += 1 <NEW_LINE> self._mobile, self._ref = parse_common_selection( mobile.universe, mobile, reference) <NEW_LINE> if rotation is None: <NEW_LINE> <INDENT> self._rotation = np.eye(3) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rotation = np.asarray(rotation) <NEW_LINE> <DEDENT> <DEDENT> def _prepare(self): <NEW_LINE> <INDENT> self._dx = [[0, 0, 0]] <NEW_LINE> self._trajectory[self.start - self.step] <NEW_LINE> self._pos_prev = self._mobile.positions.copy() <NEW_LINE> <DEDENT> def _single_frame(self): <NEW_LINE> <INDENT> pos = self._mobile.positions.copy() <NEW_LINE> com = pos.mean(0) <NEW_LINE> com_prev = self._pos_prev.mean(0) <NEW_LINE> diff = com - com_prev <NEW_LINE> R = align.rotation_matrix(self._pos_prev - com_prev, self._ref.positions)[0] <NEW_LINE> self._dx.append(np.dot(diff, np.asarray(R).T)) <NEW_LINE> self._pos_prev = pos <NEW_LINE> <DEDENT> def _conclude(self): <NEW_LINE> <INDENT> self.pcs = np.dot(np.asarray(self._dx), self._rotation.T).cumsum(0)
Transform a trajectory of a rigid body in the laboratory coordinate system into the particle coordinate system (PCS). This removes all rotations and recovers the pure translational movement in the PCS. Attributes ---------- pcs : ndarray trajectory in the PCS
6259907a99fddb7c1ca63aca
class Schema(GenericSchema): <NEW_LINE> <INDENT> LABEL = 'l' <NEW_LINE> def __init__(self, mapping, fallback=None): <NEW_LINE> <INDENT> self._label_key = self._get_unique_mapping(mapping, fallback, self.LABEL, 'LABEL') <NEW_LINE> super(Schema, self).__init__(mapping, fallback) <NEW_LINE> <DEDENT> def transform(self, row): <NEW_LINE> <INDENT> label = row.get(self._label_key, None) <NEW_LINE> if label is not None: <NEW_LINE> <INDENT> label = unicode_t(label) <NEW_LINE> <DEDENT> d = self._transform_as_datum(row, None, [self._label_key]) <NEW_LINE> return (label, d) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def predict(cls, row, typed): <NEW_LINE> <INDENT> raise RuntimeError('Classifier schema cannot be auto predicted')
Schema for Classifier service.
6259907a5fcc89381b266e4e
class ApplicationMessage: <NEW_LINE> <INDENT> def __init__(self, packet_id, topic, qos, data, retain): <NEW_LINE> <INDENT> self.packet_id = packet_id <NEW_LINE> self.topic = topic <NEW_LINE> self.qos = qos <NEW_LINE> self.data = data <NEW_LINE> self.retain = retain <NEW_LINE> self.publish_packet = None <NEW_LINE> self.puback_packet = None <NEW_LINE> self.pubrec_packet = None <NEW_LINE> self.pubrel_packet = None <NEW_LINE> self.pubcomp_packet = None <NEW_LINE> <DEDENT> def build_publish_packet(self, dup=False): <NEW_LINE> <INDENT> return PublishPacket.build(self.topic, self.data, self.packet_id, dup, self.qos, self.retain) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.packet_id == other.packet_id
ApplicationMessage and subclasses are used to store published message information flow. These objects can contain different information depending on the way they were created (incoming or outgoing) and the quality of service used between peers.
6259907a1b99ca4002290229
class ServiceBuilderException(BaseException): <NEW_LINE> <INDENT> pass
Class for exceptions in BuilderService
6259907a44b2445a339b7651
class ToTensor(object): <NEW_LINE> <INDENT> def __call__(self, sample): <NEW_LINE> <INDENT> image, mask , img_name = sample['image'], sample['mask'], sample['image_name'] <NEW_LINE> image = image.transpose((2, 0, 1)) <NEW_LINE> return {'image': torch.from_numpy(image), 'mask': torch.from_numpy(mask), 'image_name': img_name}
Convert ndarrays in sample to Tensors.
6259907a60cbc95b06365a61
class Encoder(nn.Module): <NEW_LINE> <INDENT> def __init__( self, d_word_vec, n_layers, n_head, d_k, d_v, d_model, d_inner, dropout=0.1, n_position=200): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.position_enc = PositionalEncoding(d_word_vec, n_position=n_position) <NEW_LINE> self.dropout = nn.Dropout(p=dropout) <NEW_LINE> self.layer_stack = nn.ModuleList([ EncoderLayer(d_model, d_inner, n_head, d_k, d_v, dropout=dropout) for _ in range(n_layers)]) <NEW_LINE> self.layer_norm = nn.LayerNorm(d_model, eps=1e-6) <NEW_LINE> <DEDENT> def forward(self, src_seq, src_mask, return_attns=False): <NEW_LINE> <INDENT> enc_slf_attn_list = [] <NEW_LINE> enc_output = self.dropout(self.position_enc(src_seq)) <NEW_LINE> enc_output = self.layer_norm(enc_output) <NEW_LINE> for enc_layer in self.layer_stack: <NEW_LINE> <INDENT> enc_output, enc_slf_attn = enc_layer(enc_output, slf_attn_mask=src_mask) <NEW_LINE> enc_slf_attn_list += [enc_slf_attn] if return_attns else [] <NEW_LINE> <DEDENT> if return_attns: <NEW_LINE> <INDENT> return enc_output, enc_slf_attn_list <NEW_LINE> <DEDENT> return enc_output,
A encoder model with self attention mechanism.
6259907a460517430c432d4d
class CylinderToImageMap(PointMap): <NEW_LINE> <INDENT> def __init__(self, L): <NEW_LINE> <INDENT> self._L = L <NEW_LINE> <DEDENT> def map(self, z, theta): <NEW_LINE> <INDENT> return (self._L/2 - self._L*theta/PI, z) <NEW_LINE> <DEDENT> def invmap(self, x, y): <NEW_LINE> <INDENT> return (y, PI*(0.5-x/self._L))
Performs a mapping from a cylinder to a plane and back
6259907a4a966d76dd5f08cd
class ConfigReader(object): <NEW_LINE> <INDENT> def __init__(self, config_file="./OECluster.cfg"): <NEW_LINE> <INDENT> self._config_file = config_file <NEW_LINE> self._config_parser = cp.ConfigParser() <NEW_LINE> self._config_parser.read(config_file) <NEW_LINE> <DEDENT> def get_config_section(self, section): <NEW_LINE> <INDENT> dict1 = {} <NEW_LINE> options = self._config_parser.options(section) <NEW_LINE> for option in options: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dict1[option] = self._config_parser.get(section, option) <NEW_LINE> if dict1[option] == -1: <NEW_LINE> <INDENT> print("skip: %s" % option) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print("exception on %s!" % option) <NEW_LINE> dict1[option] = None <NEW_LINE> <DEDENT> <DEDENT> return dict1 <NEW_LINE> <DEDENT> if __name__ == "__main__": <NEW_LINE> <INDENT> print("This class should not be called directly.")
This class holds information on devices in the network This class holds information about people and their contact information: first- and last name, the cell phone number and the email address.
6259907a283ffb24f3cf5288
class MultiplayerMenu(Menu): <NEW_LINE> <INDENT> def __init__(self, gui, surface): <NEW_LINE> <INDENT> super(MultiplayerMenu, self).__init__(gui, surface) <NEW_LINE> option_two_player = MenuOption("2 PLAYER", function=self.two_player) <NEW_LINE> width, height = option_two_player.get_size() <NEW_LINE> option_two_player.set_pos(((self.surface.get_width() // 2) - (width // 2), 185)) <NEW_LINE> self.options.append(option_two_player) <NEW_LINE> option_three_player = MenuOption("3 PLAYER", function=self.three_player) <NEW_LINE> width, height = option_three_player.get_size() <NEW_LINE> option_three_player.set_pos(((self.surface.get_width() // 2) - (width // 2), 230)) <NEW_LINE> self.options.append(option_three_player) <NEW_LINE> option_four_player = MenuOption("4 PLAYER", function=self.four_player) <NEW_LINE> width, height = option_four_player.get_size() <NEW_LINE> option_four_player.set_pos(((self.surface.get_width() // 2) - (width // 2), 275)) <NEW_LINE> self.options.append(option_four_player) <NEW_LINE> option_back = MenuOption("BACK", (10, 450), function=self.back) <NEW_LINE> self.options.append(option_back) <NEW_LINE> <DEDENT> def two_player(self): <NEW_LINE> <INDENT> self.start_customization_menu(2) <NEW_LINE> <DEDENT> def three_player(self): <NEW_LINE> <INDENT> self.start_customization_menu(3) <NEW_LINE> <DEDENT> def four_player(self): <NEW_LINE> <INDENT> self.start_customization_menu(4) <NEW_LINE> <DEDENT> def start_customization_menu(self, player_amt): <NEW_LINE> <INDENT> customization_menu = CustomizationMenu(self.gui, self.surface, player_amt) <NEW_LINE> customization_menu.handle() <NEW_LINE> self.surface.fill((0, 0, 0)) <NEW_LINE> self.__init__(self.gui, self.surface) <NEW_LINE> <DEDENT> def back(self): <NEW_LINE> <INDENT> self.running = False
The menu that allows selection of the amount of players in the game
6259907a5166f23b2e244dbf
class NovellBugzilla(_parent): <NEW_LINE> <INDENT> version = '0.3' <NEW_LINE> OBS_URL = 'https://api.opensuse.org' <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(NovellBugzilla, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def _login(self, user, password): <NEW_LINE> <INDENT> self._transport.auth_params = (self.user, self.password) <NEW_LINE> return '' <NEW_LINE> <DEDENT> def _logout(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def connect(self, url=None): <NEW_LINE> <INDENT> if url is None: <NEW_LINE> <INDENT> url = 'https://apibugzilla.novell.com/xmlrpc.cgi' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> spliturl = urlparse.urlsplit(url) <NEW_LINE> hostname = spliturl.hostname or url <NEW_LINE> path = spliturl.hostname and spliturl.path or 'xmlrpc.cgi' <NEW_LINE> if not hostname.startswith('api'): <NEW_LINE> <INDENT> hostname = 'api'+hostname <NEW_LINE> <DEDENT> url = urlparse.urlunsplit(('https', hostname, path, spliturl.query, spliturl.fragment)) <NEW_LINE> <DEDENT> return super(NovellBugzilla, self).connect(url) <NEW_LINE> <DEDENT> def readconfig(self, configpath=None): <NEW_LINE> <INDENT> super(NovellBugzilla, self).readconfig(configpath) <NEW_LINE> if not self.user or not self.password: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import osc.conf <NEW_LINE> osc.conf.get_config() <NEW_LINE> conf = osc.conf.config['api_host_options'][self.OBS_URL] <NEW_LINE> user = conf.get('user') <NEW_LINE> pasw = conf.get('pass') <NEW_LINE> if self.user and self.user != user: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.user = user <NEW_LINE> self.password = pasw <NEW_LINE> log.info("Read credentials from ~/.oscrc") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass
bugzilla.novell.com is a standard bugzilla 4.4 with some extensions. By default, it uses a proprietary AccessManager login system, but by using a special domain, you can force it to use HTTP Basic Auth instead. This class can also read credentials from ~/.oscrc if exists, so it does not have to be duplicated in /etc/bugzillarc or ~/.bugzillarc.
6259907aad47b63b2c5a9237
class PageHistoryView(PermissionRequiredMixin, PageTabsContentMixin, generic.DetailView): <NEW_LINE> <INDENT> model = Page <NEW_LINE> context_object_name = "page_instance" <NEW_LINE> template_name = "sveedocuments/board/page_history.html" <NEW_LINE> permission_required = "sveedocuments.change_page" <NEW_LINE> raise_exception = True <NEW_LINE> def get_object(self, *args, **kwargs): <NEW_LINE> <INDENT> cache_key = "_cache_get_object" <NEW_LINE> if not hasattr(self, cache_key): <NEW_LINE> <INDENT> setattr(self, cache_key, super(PageHistoryView, self).get_object(*args, **kwargs)) <NEW_LINE> <DEDENT> return getattr(self, cache_key) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(PageHistoryView, self).get_context_data(**kwargs) <NEW_LINE> context.update({ 'last_revisions': self.object.revision.all().order_by('-created'), }) <NEW_LINE> return context
*Page* history
6259907a627d3e7fe0e0886f
class HTTPConfiguration(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'method': {'key': 'method', 'type': 'str'}, 'headers': {'key': 'headers', 'type': '[HTTPHeader]'}, 'valid_status_codes': {'key': 'validStatusCodes', 'type': '[int]'}, } <NEW_LINE> def __init__( self, *, method: Optional[Union[str, "HTTPMethod"]] = None, headers: Optional[List["HTTPHeader"]] = None, valid_status_codes: Optional[List[int]] = None, **kwargs ): <NEW_LINE> <INDENT> super(HTTPConfiguration, self).__init__(**kwargs) <NEW_LINE> self.method = method <NEW_LINE> self.headers = headers <NEW_LINE> self.valid_status_codes = valid_status_codes
HTTP configuration of the connectivity check. :param method: HTTP method. Possible values include: "Get". :type method: str or ~azure.mgmt.network.v2018_02_01.models.HTTPMethod :param headers: List of HTTP headers. :type headers: list[~azure.mgmt.network.v2018_02_01.models.HTTPHeader] :param valid_status_codes: Valid status codes. :type valid_status_codes: list[int]
6259907a26068e7796d4e326
class Pagination(object): <NEW_LINE> <INDENT> def __init__(self, page, per_page, total_count): <NEW_LINE> <INDENT> self.page = page <NEW_LINE> self.per_page = per_page <NEW_LINE> self.total_count = total_count <NEW_LINE> self.max_page = self.total_count / self.per_page <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_prev(self): <NEW_LINE> <INDENT> return self.page > 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_next(self): <NEW_LINE> <INDENT> return self.page < self.max_pagepage <NEW_LINE> <DEDENT> def iter_pages(self, left_edge=2, left_current=7, right_current=8, right_edge=2): <NEW_LINE> <INDENT> last = -1 <NEW_LINE> for num in xrange(self.max_page + 1): <NEW_LINE> <INDENT> if num < left_edge or (num > self.page - left_current - 1 and num <= self.page + right_current) or num > self.max_page - right_edge: <NEW_LINE> <INDENT> if last + 1 != num: <NEW_LINE> <INDENT> yield None <NEW_LINE> <DEDENT> yield num <NEW_LINE> last = num <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def articles_on_page(self, all_articles): <NEW_LINE> <INDENT> return all_articles[self.page * self.per_page: (self.page + 1) * self.per_page]
http://flask.pocoo.org/snippets/44/
6259907aaad79263cf4301a1
class GVG_099: <NEW_LINE> <INDENT> play = Hit(RANDOM_ENEMY_MINION, 4)
Bomb Lobber
6259907ad268445f2663a852
class FreeLieAlgebraBases(Category_realization_of_parent): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, base): <NEW_LINE> <INDENT> Category_realization_of_parent.__init__(self, base) <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "Category of bases of %s" % self.base() <NEW_LINE> <DEDENT> def super_categories(self): <NEW_LINE> <INDENT> return [LieAlgebras(self.base().base_ring()).WithBasis(), Realizations(self.base())]
The category of bases of a free Lie algebra.
6259907a91f36d47f2231b83
class Event(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._future = Future() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s %s>' % ( self.__class__.__name__, 'set' if self.is_set() else 'clear') <NEW_LINE> <DEDENT> def is_set(self): <NEW_LINE> <INDENT> return self._future.done() <NEW_LINE> <DEDENT> def set(self): <NEW_LINE> <INDENT> if not self._future.done(): <NEW_LINE> <INDENT> self._future.set_result(None) <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> if self._future.done(): <NEW_LINE> <INDENT> self._future = Future() <NEW_LINE> <DEDENT> <DEDENT> def wait(self, timeout=None): <NEW_LINE> <INDENT> if timeout is None: <NEW_LINE> <INDENT> return self._future <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return gen.with_timeout(timeout, self._future)
An event blocks coroutines until its internal flag is set to True. Similar to `threading.Event`.
6259907af548e778e596cf79
class DoneRead(object): <NEW_LINE> <INDENT> def doneRead(self, token, error, lost_size, data, eos): <NEW_LINE> <INDENT> pass
Call back interface for 'read' command.
6259907a3317a56b869bf23a
class DeferredJobContext(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._on_success = [] <NEW_LINE> self._on_failure = [] <NEW_LINE> <DEDENT> def add_deferred_job(self, def_job): <NEW_LINE> <INDENT> self.add_on_success_job(def_job) <NEW_LINE> self.add_on_failure_job(def_job) <NEW_LINE> <DEDENT> def add_deferred_jobs(self, def_jobs): <NEW_LINE> <INDENT> self.add_on_success_jobs(def_jobs) <NEW_LINE> self.add_on_failure_jobs(def_jobs) <NEW_LINE> <DEDENT> def add_on_success_job(self, def_job): <NEW_LINE> <INDENT> self._on_success.append(def_job) <NEW_LINE> <DEDENT> def add_on_success_jobs(self, def_jobs): <NEW_LINE> <INDENT> self._on_success += def_jobs <NEW_LINE> <DEDENT> def add_on_failure_job(self, def_job): <NEW_LINE> <INDENT> self._on_failure.append(def_job) <NEW_LINE> <DEDENT> def add_on_failure_jobs(self, def_jobs): <NEW_LINE> <INDENT> self._on_failure += def_jobs <NEW_LINE> <DEDENT> def _run_jobs(self, jobs): <NEW_LINE> <INDENT> for job in jobs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> job.run() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> log.exception("Failed to execute job (%s)", job) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def handle(self, success): <NEW_LINE> <INDENT> if success: <NEW_LINE> <INDENT> self._run_jobs(self._on_success) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._run_jobs(self._on_failure)
Generic deferred job context. It supports registering deferred actions to be called in case of * success * failure * always Each action should have a `run()` method, which will be called to execute the deferred action. Each action must be independent from other actions, and if one depends on other actions, this dependency must be wrapped into a parent action which will handle all the necessary logic. When running a deferred action, the context expects no output and discards any exceptions by simply logging it.
6259907a3346ee7daa338355
class JSONPSerializer(JSONSerializer): <NEW_LINE> <INDENT> _mime = 'application/javascript' <NEW_LINE> def __init__(self, jsonp='read', **kwargs): <NEW_LINE> <INDENT> super(JSONPSerializer, self).__init__(**kwargs) <NEW_LINE> self._prefix = jsonp <NEW_LINE> <DEDENT> def __call__(self, results): <NEW_LINE> <INDENT> return "// fetched from Indico\n%s(%s);" % (self._prefix, super(JSONPSerializer, self).__call__(results))
Just adds prefix
6259907a23849d37ff852aa1
class HttpBodyConsumer(Protocol): <NEW_LINE> <INDENT> def __init__(self, length, finished): <NEW_LINE> <INDENT> self.remaining = length <NEW_LINE> self.finished = finished <NEW_LINE> self.body = '' <NEW_LINE> <DEDENT> def dataReceived(self, data): <NEW_LINE> <INDENT> if self.remaining: <NEW_LINE> <INDENT> self.body += data[:self.remaining] <NEW_LINE> self.remaining -= len(data) <NEW_LINE> <DEDENT> <DEDENT> def connectionLost(self, reason): <NEW_LINE> <INDENT> self.finished.callback(self.body)
asynchronous http response consumer.
6259907a4f6381625f19a1a1
class _MouseEvent(wx.PyCommandEvent): <NEW_LINE> <INDENT> def __init__(self, EventType, NativeEvent, WinID, Coords = None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.SetEventType( EventType ) <NEW_LINE> self._NativeEvent = NativeEvent <NEW_LINE> self.Coords = Coords <NEW_LINE> <DEDENT> def GetCoords(self): <NEW_LINE> <INDENT> return self.Coords <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> d = self._getAttrDict() <NEW_LINE> if name in d: <NEW_LINE> <INDENT> return d[name] <NEW_LINE> <DEDENT> return getattr(self._NativeEvent, name)
This event class takes a regular wxWindows mouse event as a parameter, and wraps it so that there is access to all the original methods. This is similar to subclassing, but you can't subclass a wxWindows event The goal is to be able to it just like a regular mouse event. It adds the method: GetCoords() , which returns an (x,y) tuple in world coordinates. Another difference is that it is a CommandEvent, which propagates up the window hierarchy until it is handled.
6259907a4c3428357761bca2
class BufferedReader: <NEW_LINE> <INDENT> def __init__(self, path, max_in_mem=256 * 1024, chunk_size=32 * 1024, mode="r"): <NEW_LINE> <INDENT> if "a" in mode or "w" in mode: <NEW_LINE> <INDENT> raise InvalidFileMode("Cannot open file in write or append mode") <NEW_LINE> <DEDENT> self.__path = path <NEW_LINE> self.__file = salt.utils.files.fopen(self.__path, mode) <NEW_LINE> self.__max_in_mem = max_in_mem <NEW_LINE> self.__chunk_size = chunk_size <NEW_LINE> self.__buffered = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def buffered(self): <NEW_LINE> <INDENT> return self.__buffered <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.__buffered is None: <NEW_LINE> <INDENT> multiplier = self.__max_in_mem // self.__chunk_size <NEW_LINE> self.__buffered = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> multiplier = 1 <NEW_LINE> self.__buffered = self.__buffered[self.__chunk_size :] <NEW_LINE> <DEDENT> data = self.__file.read(self.__chunk_size * multiplier) <NEW_LINE> data = salt.utils.stringutils.to_str(data) <NEW_LINE> if not data: <NEW_LINE> <INDENT> self.__file.close() <NEW_LINE> raise StopIteration <NEW_LINE> <DEDENT> self.__buffered += data <NEW_LINE> return self.__buffered <NEW_LINE> <DEDENT> __next__ = next <NEW_LINE> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> if self.__file.closed is False: <NEW_LINE> <INDENT> self.__file.close()
This object allows iterating through the contents of a file keeping X configurable bytes in memory which can be used to, for example, do regex search/matching on more than a single line. So, **an imaginary, non accurate**, example could be: 1 - Initiate the BufferedReader filling it to max_in_men: br = [1, 2, 3] 2 - next chunk(pop chunk_size from the left, append chunk_size to the right): br = [2, 3, 4] :type path: str :param path: The file path to be read :type max_in_mem: int :param max_in_mem: The maximum bytes kept in memory while iterating through the file. Default 256KB. :type chunk_size: int :param chunk_size: The size of each consequent read chunk. Default 32KB. :type mode: str :param mode: The mode the file should be opened. **Only read modes**.
6259907a56ac1b37e63039d7
class IPaymentFailedEvent(IPaymentEvent): <NEW_LINE> <INDENT> pass
This event gets triggered when payment failed.
6259907abe8e80087fbc0a7f
class Seedable(Describable): <NEW_LINE> <INDENT> __undescribed__ = {'rnd'} <NEW_LINE> def __init__(self, seed=None): <NEW_LINE> <INDENT> self.rnd = RandomState(seed) <NEW_LINE> <DEDENT> def __init_from_description__(self, description): <NEW_LINE> <INDENT> Seedable.__init__(self)
Base class for all objects that use randomness. It offers a self.rnd which is a RandomState. Dev-note: It inherits from Describable in order to implement __init_from_description__ and to make rnd undescribed.
6259907a5166f23b2e244dc1
class UserAfterLoginSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> token = serializers.SerializerMethodField() <NEW_LINE> def get_token(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return f'Token {Token.objects.get(user=value).key}' <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = [ 'id', 'username', 'email', 'first_name', 'last_name', 'token', 'is_superuser', 'is_active', 'is_staff', ]
Must be read only
6259907aad47b63b2c5a9239
class Discovery(Pluggable): <NEW_LINE> <INDENT> config_subdirectory = "discovery" <NEW_LINE> entry_point = "lighthouse.discovery" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.shutdown = threading.Event() <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def start_watching(self, cluster, should_update): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def stop_watching(self, cluster): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def report_up(self, service, port): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def report_down(self, service, port): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.shutdown.set() <NEW_LINE> self.disconnect()
Base class for discovery method plugins. Unlike the `Balancer` base class for load balancer plugins, this discovery method plugin has several methods that subclasses are expected to define. Subclasses are used for both the writer process *and* the reporter process so each subclass needs to be able to report on individual nodes as well as monitor and collect the status of all defined clusters. It is important that the various instances of lighthouse running on various machines agree with each other on the status of clusters so a distributed system with strong CP characteristics is recommended.
6259907a2c8b7c6e89bd51d4
class ResetAction(Action): <NEW_LINE> <INDENT> name = "reset" <NEW_LINE> def __init__(self, app): <NEW_LINE> <INDENT> super(ResetAction, self).__init__(app) <NEW_LINE> <DEDENT> def anonymize(self, value): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def discover(self, value): <NEW_LINE> <INDENT> pass
Action plugin that reset a field to None
6259907aaad79263cf4301a3
class PhysicalObject: <NEW_LINE> <INDENT> def __init__(self, available_copies, reserved_copies, location): <NEW_LINE> <INDENT> self.__available_copies = available_copies <NEW_LINE> self.__reserved_copies = reserved_copies <NEW_LINE> self.__location = location <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return print_object(self) <NEW_LINE> <DEDENT> def get_available_copies(self): <NEW_LINE> <INDENT> return self.__available_copies <NEW_LINE> <DEDENT> def set_available_copies(self, available_copies): <NEW_LINE> <INDENT> self.__available_copies = available_copies <NEW_LINE> <DEDENT> def get_reserved_copies(self): <NEW_LINE> <INDENT> return self.__reserved_copies <NEW_LINE> <DEDENT> def set_reserved_copies(self, reserved_copies): <NEW_LINE> <INDENT> self.__reserved_copies = reserved_copies <NEW_LINE> <DEDENT> def get_location(self): <NEW_LINE> <INDENT> return self.__location
Class used as superclass for items only physically available in a library. Currently for the class PhysicalBook
6259907a8a349b6b43687c45
class DeleteObjectMixin: <NEW_LINE> <INDENT> @coroutine <NEW_LINE> def register_viewset(self): <NEW_LINE> <INDENT> yield from self.app_session.register( self.delete_object, self.get_uri('delete')) <NEW_LINE> self.logger.debug( 'Remote procedure to delete {} registered.'.format(self.name)) <NEW_LINE> if hasattr(super(), 'register_viewset'): <NEW_LINE> <INDENT> yield from super().register_viewset() <NEW_LINE> <DEDENT> <DEDENT> @coroutine <NEW_LINE> def delete_object(self, *args, **kwargs): <NEW_LINE> <INDENT> self.logger.debug('Remote procedure delete_object called.') <NEW_LINE> id = kwargs.get('id') <NEW_LINE> yield from self.database[self.name].remove({'id': id}) <NEW_LINE> self.app_session.publish(self.get_uri('deleted'), [], id=id) <NEW_LINE> success = '{} object {} successfully deleted.'.format(self.name, id) <NEW_LINE> return { 'type': 'success', 'details': success}
Interactions to delete an existing object in the database.
6259907ad268445f2663a853
class Update(models.Model): <NEW_LINE> <INDENT> event = models.ForeignKey( Event, on_delete=models.CASCADE, limit_choices_to={'featured': True}, db_index=True ) <NEW_LINE> title = models.CharField("Update title", max_length=200) <NEW_LINE> author = models.ForeignKey( UserModel, on_delete=models.CASCADE, limit_choices_to={'is_staff': True}) <NEW_LINE> update = models.TextField() <NEW_LINE> update_formatted = models.TextField(blank=True, editable=False) <NEW_LINE> pub_time = models.DateTimeField(auto_now_add=True) <NEW_LINE> last_updated = models.DateTimeField(auto_now=True) <NEW_LINE> audio = models.FileField( upload_to='audio/events/special/', blank=True, null=True, help_text="Should be MP3 format" ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('event_update_detail', [str(self.event.slug), str(self.id)]) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.update_formatted = sanetize_text(self.update) <NEW_LINE> super(Update, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def comments_open(self): <NEW_LINE> <INDENT> return self.event.comments_open <NEW_LINE> <DEDENT> def has_image(self): <NEW_LINE> <INDENT> if self.updateimage_set.count(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def get_image(self): <NEW_LINE> <INDENT> return self.updateimage_set.latest('id') <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_gallery_url(self): <NEW_LINE> <INDENT> return ('update_slides', [self.event.slug, str(self.id)]) <NEW_LINE> <DEDENT> def get_top_assets(self): <NEW_LINE> <INDENT> return self.updateimage_set.all()
Allows updating the event in near real-time, with blog-style content updates.
6259907a7b180e01f3e49d5a
class DiscussionAPIUtilsTestCase(ModuleStoreTestCase): <NEW_LINE> <INDENT> CREATE_USER = False <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(DiscussionAPIUtilsTestCase, self).setUp() <NEW_LINE> self.course = CourseFactory.create() <NEW_LINE> self.course.discussion_blackouts = [datetime.now(UTC) - timedelta(days=3), datetime.now(UTC) + timedelta(days=3)] <NEW_LINE> self.student_role = RoleFactory(name='Student', course_id=self.course.id) <NEW_LINE> self.moderator_role = RoleFactory(name='Moderator', course_id=self.course.id) <NEW_LINE> self.community_ta_role = RoleFactory(name='Community TA', course_id=self.course.id) <NEW_LINE> self.student = UserFactory(username='student', email='[email protected]') <NEW_LINE> self.student_enrollment = CourseEnrollmentFactory(user=self.student) <NEW_LINE> self.student_role.users.add(self.student) <NEW_LINE> self.moderator = UserFactory(username='moderator', email='[email protected]', is_staff=True) <NEW_LINE> self.moderator_enrollment = CourseEnrollmentFactory(user=self.moderator) <NEW_LINE> self.moderator_role.users.add(self.moderator) <NEW_LINE> self.community_ta = UserFactory(username='community_ta1', email='[email protected]') <NEW_LINE> self.community_ta_role.users.add(self.community_ta) <NEW_LINE> <DEDENT> def test_discussion_open_for_user(self): <NEW_LINE> <INDENT> self.assertFalse(discussion_open_for_user(self.course, self.student)) <NEW_LINE> self.assertTrue(discussion_open_for_user(self.course, self.moderator)) <NEW_LINE> self.assertTrue(discussion_open_for_user(self.course, self.community_ta))
Base test-case class for utils for Discussion REST API.
6259907a91f36d47f2231b84
class AddNoExplosionOcclusionProperty(bpy.types.Operator): <NEW_LINE> <INDENT> bl_label = "No Explosion Occlusion" <NEW_LINE> bl_idname = "object.add_no_explosion_occlusion_property" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> message = "Adding No Explosion Occlusion Property" <NEW_LINE> self.report({'INFO'}, message) <NEW_LINE> cbPrint(message) <NEW_LINE> return add.add_no_explosion_occlusion_property(self, context)
Click to add a no explosion occlusion property.
6259907aa05bb46b3848be1e
class GridNAS_Model_CIFS(DDNModelPlugin): <NEW_LINE> <INDENT> relname = 'cifss' <NEW_LINE> modname = 'ZenPacks.DDN.GridScalerv2.CIFS' <NEW_LINE> def prepTask(self, device, log): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> log.debug("%s: preparing for CIFS info", device.id) <NEW_LINE> cmdinfo = [{ 'cmd': 'nasctl cifs_show', 'parser': gs.GridNasCIFSParse, 'filter': '', }] <NEW_LINE> myCmds = [] <NEW_LINE> for c in cmdinfo: <NEW_LINE> <INDENT> myCmds.append(gsc.Cmd(command=c['cmd'], template=c['filter'], config=self.config, parser=c['parser'])) <NEW_LINE> <DEDENT> self.cmd = myCmds <NEW_LINE> log.debug('XXX _prepCIFSLists(): self.cmd = %r', self.cmd) <NEW_LINE> <DEDENT> def parseResults(self, resultList): <NEW_LINE> <INDENT> errmsgs = {} <NEW_LINE> log.debug('XXXX within _parseResults with %r', resultList) <NEW_LINE> rm = self.relMap() <NEW_LINE> for success, result in resultList: <NEW_LINE> <INDENT> log.debug("XXXX _parseResults (success/ds %s) %s", success, result) <NEW_LINE> if success: <NEW_LINE> <INDENT> if isinstance(result.result, dict): <NEW_LINE> <INDENT> info = result.result <NEW_LINE> for k, v in info.items(): <NEW_LINE> <INDENT> v = gs.dictflatten(v) <NEW_LINE> log.debug('XXX CIFS %s, attribs %r', k, v) <NEW_LINE> v['id'] = 'CIFS_{0}'.format(k) <NEW_LINE> v['title'] = 'CIFS_{0}'.format(k) <NEW_LINE> om = self.objectMap() <NEW_LINE> om.updateFromDict(v) <NEW_LINE> rm.append(om) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.warn('XXXX success result type %s value %r', type(result.result), result.result) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> errmsgs.update(str(result)) <NEW_LINE> <DEDENT> <DEDENT> res = [rm] <NEW_LINE> d, self._task_defer = self._task_defer, None <NEW_LINE> if d is None or d.called: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if errmsgs: <NEW_LINE> <INDENT> log.warn('XXXX GridNas CIFS collection failed %s', str(errmsgs)) <NEW_LINE> d.callback([{}]) <NEW_LINE> return <NEW_LINE> <DEDENT> log.debug("collected GridNas CIFS property: %r" % res) <NEW_LINE> d.callback(res) <NEW_LINE> <DEDENT> def process(self, device, results, log): <NEW_LINE> <INDENT> log.debug('XXXX modeler process(dev=%r) got results %s', device, str(results)) <NEW_LINE> return results
Models GridNas CIFS
6259907a7cff6e4e811b742a
class Refresh(BaseCommand): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> options = self.options <NEW_LINE> ref = options["<ref>"] <NEW_LINE> with PaperDatabase(self.database_path) as paper_database: <NEW_LINE> <INDENT> processed_ref = process_and_validate_ref(ref, paper_database) <NEW_LINE> paper_database.refresh_paper(processed_ref)
Refreshes information about a paper from the arxiv. Usefull if new versions were released with updated information.
6259907a92d797404e389851
class UpcommingMeetingWithTimeline: <NEW_LINE> <INDENT> def __init__(self, meeting: Meeting, height: int): <NEW_LINE> <INDENT> medium_font = pygame.font.Font("assets/FreeSansBold.ttf", 15) <NEW_LINE> self._time_line = TimeLine((0, 30), meeting.get_start_time_datetime(), medium_font, (20, 20, 20)) <NEW_LINE> self._next_meeting = UpcommingMeeting(meeting, height) <NEW_LINE> self.image = pygame.surface.Surface((self.get_content_width(), height)) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.image.set_colorkey(DEFAULT_BGCOLOR) <NEW_LINE> <DEDENT> def get_content_width(self): <NEW_LINE> <INDENT> return self._time_line.rect.width + self._next_meeting.rect.width + 4 <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.image.fill(DEFAULT_BGCOLOR) <NEW_LINE> self.image.set_colorkey(DEFAULT_BGCOLOR) <NEW_LINE> self._time_line.update() <NEW_LINE> self._time_line.render(self.image) <NEW_LINE> self._next_meeting.rect.left = self._time_line.rect.width + 4 <NEW_LINE> self._next_meeting.render(self.image) <NEW_LINE> <DEDENT> def render(self, surface): <NEW_LINE> <INDENT> self.update() <NEW_LINE> surface.blit(self.image, self.rect)
Composes the texts to be displayed
6259907a01c39578d7f1442a
class SplitComponentState(SplitComponentStateRefMut): <NEW_LINE> <INDENT> def drop(self): <NEW_LINE> <INDENT> if self.ptr != None: <NEW_LINE> <INDENT> self.ptr = None <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.drop() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.drop() <NEW_LINE> <DEDENT> def __init__(self, ptr): <NEW_LINE> <INDENT> self.ptr = ptr
The state object that describes a single segment's information to visualize.
6259907a4c3428357761bca4