code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Chain(object): <NEW_LINE> <INDENT> def __init__(self, index, topology, id): <NEW_LINE> <INDENT> self.index = index <NEW_LINE> self.topology = topology <NEW_LINE> self.id = id <NEW_LINE> self._residues = [] <NEW_LINE> <DEDENT> def residues(self): <NEW_LINE> <INDENT> return iter(self._residues) <NEW_LINE> <DEDENT> def atoms(self): <NEW_LINE> <INDENT> for residue in self._residues: <NEW_LINE> <INDENT> for atom in residue._atoms: <NEW_LINE> <INDENT> yield atom
A Chain object represents a chain within a Topology.
625990707d847024c075dc79
class WKST_CopyLink(bpy.types.Menu): <NEW_LINE> <INDENT> bl_label = "Instance" <NEW_LINE> bl_idname = "wkst.copy_link" <NEW_LINE> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> layout.operator("object.make_links_data", "Set Instance", icon="LINKED").type = 'OBDATA' <NEW_LINE> layout.operator("object.makesingle", "Clear Instance", icon="UNLINKED")
Instance Tools
625990701f5feb6acb164492
class Movie(): <NEW_LINE> <INDENT> valid_ratings = ["G", "PG", "PG-13", "R"] <NEW_LINE> def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube): <NEW_LINE> <INDENT> self.title = movie_title <NEW_LINE> self.storyline = movie_storyline <NEW_LINE> self.poster_image_url = poster_image <NEW_LINE> self.trailer_youtube_url = trailer_youtube <NEW_LINE> <DEDENT> def show_trailer(self): <NEW_LINE> <INDENT> webbrowser.open(self.trailer_youtube_url)
This class provides a way to store movie related information
62599070ec188e330fdfa143
class HighlightsMenu(ABCMenu): <NEW_LINE> <INDENT> def __init__(self, sender): <NEW_LINE> <INDENT> ABCMenu.__init__(self, title2=sender.itemTitle) <NEW_LINE> self.AddMenu(FeaturedHighlightsMenu, 'Featured Highlights') <NEW_LINE> self.AddMenu(TeamListMenu, 'Team Highlights', submenu="HighlightsSearchMenu") <NEW_LINE> self.AddMenu(HighlightsSearchMenu, 'MLB.com FastCast', query='FastCast') <NEW_LINE> self.AddMenu(HighlightsSearchMenu, 'MLB Network') <NEW_LINE> self.AddMenu(HighlightsSearchMenu, 'Plays of the Day') <NEW_LINE> self.AddSearch(HighlightsSearchMenu, label='Search Highlights')
The highlights/ Menu
6259907032920d7e50bc78e8
class ReceiveService(BaseService): <NEW_LINE> <INDENT> def __init__(self, connection, context, **kwargs): <NEW_LINE> <INDENT> super().__init__(connection, context, **kwargs) <NEW_LINE> <DEDENT> def pre_service(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def post_service(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def call_service(self, pattern, timeout=None, size=None, trim_buffer=True, target=None, *args, **kwargs): <NEW_LINE> <INDENT> spawn = self.get_spawn(target) <NEW_LINE> self.result = False <NEW_LINE> self.connection.receiveBuffer = '' <NEW_LINE> try: <NEW_LINE> <INDENT> if pattern == r'nopattern^': <NEW_LINE> <INDENT> sleep(timeout or 10) <NEW_LINE> self.connection.receiveBuffer = spawn.expect(r'.*', size, *args, **kwargs).match_output <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.connection.receiveBuffer = spawn.expect(pattern, timeout, size, *args, **kwargs).match_output <NEW_LINE> self.result = True <NEW_LINE> <DEDENT> <DEDENT> except TimeoutError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> raise SubCommandFailure("Receive service failed", err) from err <NEW_LINE> <DEDENT> <DEDENT> def get_service_result(self): <NEW_LINE> <INDENT> return self.result
match a pattern from spawn buffer Arguments: pattern: regular expression pattern to match. If r'nopattern^' is used then no pattern is match and all data till timeout period will be matched. timeout: time to wait for the pattern to match. size: read size in bytes for reading the buffer. trim_buffer: whether to trim the buffer after a successful match. Default is True. target: ``standby`` to match a list of pattern against the buffer on standby spawn channel. search_size: maximum size in bytes to search at the end of the buffer Returns: Bool: True or False True: If data is matched by provided pattern. False: If nothing is matched by pattern. Data matched by pattern is set to receiveBuffer attribute of connection object. Raises: No Exception is raised if pattern does not get match or timeout happens. SubCommandFailure will be raised if any Exception is raised apart from TimeoutError. Example: .. code-block:: python rtr.sendline("a command") rtr.receive(r'some_pattern', timeout=10, target='standby')
6259907076e4537e8c3f0e25
class PhpBase(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._php_construct(*args, **kwargs) <NEW_LINE> <DEDENT> def _php_construct(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Object"
The base class for all "php" classes"
625990704e4d562566373ca7
class CPPIntegrationTestCase(interface.ProcessTestCase): <NEW_LINE> <INDENT> REGISTERED_NAME = "cpp_integration_test" <NEW_LINE> def __init__(self, logger, program_executable, program_options=None): <NEW_LINE> <INDENT> interface.ProcessTestCase.__init__(self, logger, "C++ integration test", program_executable) <NEW_LINE> self.program_executable = program_executable <NEW_LINE> self.program_options = utils.default_if_none(program_options, {}).copy() <NEW_LINE> <DEDENT> def configure(self, fixture, *args, **kwargs): <NEW_LINE> <INDENT> interface.ProcessTestCase.configure(self, fixture, *args, **kwargs) <NEW_LINE> self.program_options["connectionString"] = self.fixture.get_internal_connection_string() <NEW_LINE> <DEDENT> def _make_process(self): <NEW_LINE> <INDENT> return core.programs.generic_program(self.logger, [self.program_executable], **self.program_options)
A C++ integration test to execute.
625990701b99ca4002290186
class SoftwareOrdersViewSet(mixins.ListModelMixin, viewsets.GenericViewSet, generics.CreateAPIView): <NEW_LINE> <INDENT> queryset = SoftwareOrder.objects.all() <NEW_LINE> serializer_class = SoftwareOrdersSerializer <NEW_LINE> pagination_class = SoftwareOrderPagination
List all snippets, or create a new snippet.
62599070bf627c535bcb2d6d
class ContactDetail(element.Element): <NEW_LINE> <INDENT> resource_type = "ContactDetail" <NEW_LINE> def __init__(self, jsondict=None, strict=True, **kwargs): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> self.telecom = None <NEW_LINE> super(ContactDetail, self).__init__(jsondict=jsondict, strict=strict, **kwargs) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(ContactDetail, self).elementProperties() <NEW_LINE> js.extend([ ("name", "name", str, False, None, False), ("telecom", "telecom", contactpoint.ContactPoint, True, None, False), ]) <NEW_LINE> return js
Contact information. Specifies contact information for a person or organization.
625990701f037a2d8b9e54bb
class BinaryCELoss(Loss): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(name='binary_crossentropy') <NEW_LINE> <DEDENT> def __call__(self, gt, pr): <NEW_LINE> <INDENT> return F.binary_crossentropy(gt, pr, **self.submodules)
Creates a criterion that measures the Binary Cross Entropy between the ground truth (gt) and the prediction (pr). .. math:: L(gt, pr) = - gt \cdot \log(pr) - (1 - gt) \cdot \log(1 - pr) Returns: A callable ``binary_crossentropy`` instance. Can be used in ``model.compile(...)`` function or combined with other losses. Example: .. code:: python loss = BinaryCELoss() model.compile('SGD', loss=loss)
62599070be8e80087fbc0930
class PagesRenderer(object): <NEW_LINE> <INDENT> def __init__(self, rendered_items: List[T.html_tag], page_css: bytes=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.rendered_items = rendered_items <NEW_LINE> self.pages = {} <NEW_LINE> self.resources = { _NAVBAR_CSS: load_resource(Path(_NAVBAR_CSS)), _STYLE_CSS: load_resource(Path(_STYLE_CSS)), _PAGE_CSS: page_css if page_css is not None else b'', } <NEW_LINE> <DEDENT> def render(self, posts_per_page=25): <NEW_LINE> <INDENT> pages_count = (len(self.rendered_items) + posts_per_page - 1) // posts_per_page <NEW_LINE> page_names = ["%d.html" % i for i in range(pages_count)] <NEW_LINE> nav_section = T.nav( *[T.a(str(i), href=page_names[i], target=_PAGE_FRAME) for i in range(pages_count)] ) <NEW_LINE> index = T.html( T.head( T.meta(charset='utf-8'), stylesheet(_STYLE_CSS), stylesheet(_NAVBAR_CSS), T.script(src=_JQUERY_URL), inline_script_from(get_resource(Path(_SELECTED_JS))) ), T.body( nav_section, T.iframe(name=_PAGE_FRAME, src=page_names[0] if pages_count > 0 else 'none', width='100%', height='100%', style='border:none') ) ) <NEW_LINE> self.pages['index.html'] = index <NEW_LINE> for page_index in range(pages_count): <NEW_LINE> <INDENT> page_items = self.rendered_items[page_index * posts_per_page: (page_index + 1) * posts_per_page] <NEW_LINE> chunk_html = T.html( T.head(stylesheet('page.css')), T.body( T.div(*page_items, id=ID_CONTAINER) ) ) <NEW_LINE> self.pages[page_names[page_index]] = chunk_html <NEW_LINE> <DEDENT> <DEDENT> def save_to(self, html_path: Path): <NEW_LINE> <INDENT> for name, data in self.resources.items(): <NEW_LINE> <INDENT> with html_path.joinpath(name).open('wb') as fo: <NEW_LINE> <INDENT> fo.write(data) <NEW_LINE> <DEDENT> <DEDENT> for name, html in self.pages.items(): <NEW_LINE> <INDENT> with html_path.joinpath(name).open('w') as fo: <NEW_LINE> <INDENT> fo.write(html.render())
:type rendered_items: list[tags.html_tag] :type pages: dict[str, tags.html] :type resources: dict[str, bytes]
62599070a8370b77170f1c6a
class KNearestNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.X_train = X <NEW_LINE> self.y_train = y <NEW_LINE> <DEDENT> def predict(self, X, k=1, num_loops=0): <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> dists = self.compute_distances_no_loops(X) <NEW_LINE> <DEDENT> elif num_loops == 1: <NEW_LINE> <INDENT> dists = self.compute_distances_one_loop(X) <NEW_LINE> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> dists = self.compute_distances_two_loops(X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid value %d for num_loops' % num_loops) <NEW_LINE> <DEDENT> return self.predict_labels(dists, k=k) <NEW_LINE> <DEDENT> def compute_distances_two_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> for j in xrange(num_train): <NEW_LINE> <INDENT> dists[i,j] = math.sqrt(np.sum((X[i,:] - self.X_train[j,:]) ** 2)) <NEW_LINE> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_one_loop(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> dists[i] = np.sqrt(np.sum((X[i] - self.X_train) ** 2, axis=1)) <NEW_LINE> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> XSquare = np.tile(np.sum(X ** 2, axis=1), [num_train,1]).T <NEW_LINE> YSquare = np.tile(np.sum(self.X_train ** 2, axis=1), [num_test,1]) <NEW_LINE> XY2 = 2 * X.dot(self.X_train.T) <NEW_LINE> dists = np.sqrt(XSquare + YSquare - XY2) <NEW_LINE> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists, k=1): <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> closest_y = [] <NEW_LINE> sortedIndices = np.argsort(dists[i]) <NEW_LINE> closest_y = self.y_train[sortedIndices[:k]] <NEW_LINE> u, indices = np.unique(closest_y, return_inverse=True) <NEW_LINE> y_pred[i] = u[np.argmax(np.bincount(indices))] <NEW_LINE> <DEDENT> return y_pred
a kNN classifier with L2 distance
625990705fdd1c0f98e5f828
class ShortTable(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.authors = [] <NEW_LINE> self.change_history = [] <NEW_LINE> self.contact = None <NEW_LINE> self.contributors = [] <NEW_LINE> self.label = None <NEW_LINE> self.name = None <NEW_LINE> self.properties = [] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.properties)
Wraps s short-table, i.e. a grouped subset of specializations.
625990708e7ae83300eea932
class room_long(base_room): <NEW_LINE> <INDENT> pass
Large narrow room of random orientation
625990700a50d4780f706a12
class RPC(WrapperPimpl): <NEW_LINE> <INDENT> class InnerWrapper(Wrapper): <NEW_LINE> <INDENT> def __init__(self, flux_handle, topic, payload=None, nodeid=flux.constants.FLUX_NODEID_ANY, flags=0, handle=None): <NEW_LINE> <INDENT> self._handle = flux_handle <NEW_LINE> dest = raw.flux_future_destroy <NEW_LINE> super(self.__class__, self).__init__(ffi, lib, handle=handle, match=ffi.typeof( lib.flux_rpc).result, prefixes=[ 'flux_rpc_', ], destructor=dest,) <NEW_LINE> if handle is None: <NEW_LINE> <INDENT> if isinstance(flux_handle, Wrapper): <NEW_LINE> <INDENT> flux_handle = flux_handle.handle <NEW_LINE> <DEDENT> if payload is None or payload == ffi.NULL: <NEW_LINE> <INDENT> payload = ffi.NULL <NEW_LINE> <DEDENT> elif not isinstance(payload, basestring): <NEW_LINE> <INDENT> payload = json.dumps(payload) <NEW_LINE> <DEDENT> if isinstance(nodeid, basestring): <NEW_LINE> <INDENT> self.handle = lib.flux_rpc_multi( flux_handle, topic, payload, nodeid, flags) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.handle = lib.flux_rpc( flux_handle, topic, payload, nodeid, flags) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __init__(self, flux_handle, topic, payload=None, nodeid=flux.constants.FLUX_NODEID_ANY, flags=0, handle=None): <NEW_LINE> <INDENT> super(RPC, self).__init__() <NEW_LINE> self.pimpl = self.InnerWrapper(flux_handle, topic, payload, nodeid, flags, handle) <NEW_LINE> self.then_args = None <NEW_LINE> self.then_cb = None <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> return bool(self.pimpl.check()) <NEW_LINE> <DEDENT> def completed(self): <NEW_LINE> <INDENT> return bool(self.pimpl.completed()) <NEW_LINE> <DEDENT> def get_str(self): <NEW_LINE> <INDENT> j_str = ffi.new('char *[1]') <NEW_LINE> self.pimpl.get(j_str) <NEW_LINE> return ffi.string(j_str[0]) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> return json.loads(self.get_str()) <NEW_LINE> <DEDENT> def then(self, callback, args): <NEW_LINE> <INDENT> def cb_then_wrapper(trash, arg): <NEW_LINE> <INDENT> rpc_handle = ffi.from_handle(arg) <NEW_LINE> callback(rpc_handle, rpc_handle.then_args) <NEW_LINE> <DEDENT> self.then_cb = ffi.callback('flux_then_f', cb_then_wrapper) <NEW_LINE> self.then_args = args <NEW_LINE> return self.pimpl.then(self.then_cb, ffi.new_handle(self))
An RPC state object
6259907016aa5153ce401d7c
@keras_export('keras.layers.experimental.preprocessing.RandomContrast') <NEW_LINE> class RandomContrast(Layer): <NEW_LINE> <INDENT> def __init__(self, factor, seed=None, name=None, **kwargs): <NEW_LINE> <INDENT> self.factor = factor <NEW_LINE> if isinstance(factor, (tuple, list)): <NEW_LINE> <INDENT> self.lower = factor[0] <NEW_LINE> self.upper = factor[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.lower = self.upper = factor <NEW_LINE> <DEDENT> if self.lower < 0. or self.upper < 0. or self.lower > 1.: <NEW_LINE> <INDENT> raise ValueError('Factor cannot have negative values, ' 'got {}'.format(factor)) <NEW_LINE> <DEDENT> self.seed = seed <NEW_LINE> self.input_spec = InputSpec(ndim=4) <NEW_LINE> super(RandomContrast, self).__init__(name=name, **kwargs) <NEW_LINE> <DEDENT> def call(self, inputs, training=True): <NEW_LINE> <INDENT> if training is None: <NEW_LINE> <INDENT> training = K.learning_phase() <NEW_LINE> <DEDENT> def random_contrasted_inputs(): <NEW_LINE> <INDENT> return image_ops.random_contrast(inputs, 1. - self.lower, 1. + self.upper, self.seed) <NEW_LINE> <DEDENT> output = tf_utils.smart_cond(training, random_contrasted_inputs, lambda: inputs) <NEW_LINE> output.set_shape(inputs.shape) <NEW_LINE> return output <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return input_shape <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = { 'factor': self.factor, 'seed': self.seed, } <NEW_LINE> base_config = super(RandomContrast, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items()))
Adjust the contrast of an image or images by a random factor. Contrast is adjusted independently for each channel of each image during training. For each channel, this layer computes the mean of the image pixels in the channel and then adjusts each component `x` of each pixel to `(x - mean) * contrast_factor + mean`. Input shape: 4D tensor with shape: `(samples, height, width, channels)`, data_format='channels_last'. Output shape: 4D tensor with shape: `(samples, height, width, channels)`, data_format='channels_last'. Attributes: factor: a positive float represented as fraction of value, or a tuple of size 2 representing lower and upper bound. When represented as a single float, lower = upper. The contrast factor will be randomly picked between [1.0 - lower, 1.0 + upper]. seed: Integer. Used to create a random seed. name: A string, the name of the layer. Raise: ValueError: if lower bound is not between [0, 1], or upper bound is negative.
625990703346ee7daa3382b0
class Player(object): <NEW_LINE> <INDENT> def __init__(self, index): <NEW_LINE> <INDENT> super(Player, self).__init__() <NEW_LINE> self.index = index <NEW_LINE> self.wealth = 0 <NEW_LINE> self.active_round = 0
docstring for Player
625990701f5feb6acb164494
class MediaAiAnalysisFrameTagSegmentItem(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.StartTimeOffset = None <NEW_LINE> self.EndTimeOffset = None <NEW_LINE> self.TagSet = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.StartTimeOffset = params.get("StartTimeOffset") <NEW_LINE> self.EndTimeOffset = params.get("EndTimeOffset") <NEW_LINE> if params.get("TagSet") is not None: <NEW_LINE> <INDENT> self.TagSet = [] <NEW_LINE> for item in params.get("TagSet"): <NEW_LINE> <INDENT> obj = MediaAiAnalysisFrameTagItem() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.TagSet.append(obj)
按帧标签片段列表
62599070ec188e330fdfa145
class MakeVideo(QuickApp): <NEW_LINE> <INDENT> def define_options(self, params): <NEW_LINE> <INDENT> params.add_string('dir', help='Directory containing logs') <NEW_LINE> params.add_string('tmpdir', help='Working directory ', default='tmp') <NEW_LINE> <DEDENT> def define_jobs_context(self, context): <NEW_LINE> <INDENT> options = self.get_options() <NEW_LINE> dirname = options.dir <NEW_LINE> if not os.path.exists(dirname): <NEW_LINE> <INDENT> msg = "Path does not exist: %r." % dirname <NEW_LINE> raise UserError(msg) <NEW_LINE> <DEDENT> from conf_tools.utils import locate_files <NEW_LINE> bags = list(locate_files(dirname, pattern="*.bag", followlinks=True)) <NEW_LINE> self.info('I found %d bags in %s' % (len(bags), dirname)) <NEW_LINE> if len(bags) == 0: <NEW_LINE> <INDENT> msg = "Could not find any bag in %r." % dirname <NEW_LINE> raise UserError(msg) <NEW_LINE> <DEDENT> def short(f): <NEW_LINE> <INDENT> return os.path.splitext(os.path.basename(f))[0] <NEW_LINE> <DEDENT> for f in bags: <NEW_LINE> <INDENT> s = short(f) <NEW_LINE> context.comp_dynamic(process, bag_filename=f, tmpdir=options.tmpdir, models=['bag2mp4_fixfps'], job_id=s)
Simplest app example
62599070e76e3b2f99fda2a5
class AnonymousSurvey(): <NEW_LINE> <INDENT> def __init__(self,question): <NEW_LINE> <INDENT> self.question = question <NEW_LINE> self.responses = [] <NEW_LINE> <DEDENT> def show_question(self): <NEW_LINE> <INDENT> print(self.question) <NEW_LINE> <DEDENT> def store_response(self,new_response): <NEW_LINE> <INDENT> self.responses.append(new_response) <NEW_LINE> <DEDENT> def show_results(self): <NEW_LINE> <INDENT> print("Survey results:") <NEW_LINE> for response in self.responses: <NEW_LINE> <INDENT> print('- ' + response)
收集匿名调查问卷的答案
62599070097d151d1a2c2914
class GithubSpider(scrapy.Spider): <NEW_LINE> <INDENT> name = 'GithubRepos' <NEW_LINE> @property <NEW_LINE> def start_urls(self): <NEW_LINE> <INDENT> url_templ = 'https://github.com/shiyanlou?page={}&tab=repositories' <NEW_LINE> urls = (url_templ.format(i) for i in range(1, 5)) <NEW_LINE> return urls <NEW_LINE> <DEDENT> def parse(self, response): <NEW_LINE> <INDENT> for repo in response.css('li.col-12'): <NEW_LINE> <INDENT> yield { 'name': repo.css('div.d-inline-block a[itemprop="name codeRepository"]::text').extract_first(), 'update_time': repo.css('div.f6 relative-time::attr(datetime)').extract_first() }
所有 scrapy 爬蟲需要寫一個 Spider 類,這個類要繼承 scrapy.Spider 類。在這個類中定義要請求的網站和鏈接、如何從返回的網頁提取數據等等。
62599070796e427e5385001b
class ProdConfig(Config): <NEW_LINE> <INDENT> pass <NEW_LINE> SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL")
Production configuration child class Args: Config: The parent configuration class with General configuration settings
62599070d486a94d0ba2d862
class NumRecord(object): <NEW_LINE> <INDENT> def __init__( self, time_ns=0, value=0 ): <NEW_LINE> <INDENT> self.time_ns = time_ns <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%s, %s)" % ( self.__class__.__name__, repr( self.time_ns ), repr( self.value ) ) <NEW_LINE> <DEDENT> def marshal( self, marshaller ): <NEW_LINE> <INDENT> marshaller.writeUint64( "time_ns", long(self.time_ns) ) <NEW_LINE> marshaller.writeDouble( "value", self.value ) <NEW_LINE> <DEDENT> def unmarshal( self, unmarshaller ): <NEW_LINE> <INDENT> self.time_ns = unmarshaller.readUint64( "time_ns" ) <NEW_LINE> self.value = unmarshaller.readDouble( "value" )
Numeric record object definition. A record contains: - the measurement time in nanoseconds - the measured value (a integer, long or float)
6259907001c39578d7f14386
class Meta(Ice.Object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> if __builtin__.type(self) == _M_Murmur.Meta: <NEW_LINE> <INDENT> raise RuntimeError('Murmur.Meta is an abstract class') <NEW_LINE> <DEDENT> <DEDENT> def ice_ids(self, current=None): <NEW_LINE> <INDENT> return ('::Ice::Object', '::Murmur::Meta') <NEW_LINE> <DEDENT> def ice_id(self, current=None): <NEW_LINE> <INDENT> return '::Murmur::Meta' <NEW_LINE> <DEDENT> def ice_staticId(): <NEW_LINE> <INDENT> return '::Murmur::Meta' <NEW_LINE> <DEDENT> ice_staticId = staticmethod(ice_staticId) <NEW_LINE> def getServer_async(self, _cb, id, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def newServer_async(self, _cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getBootedServers_async(self, _cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getAllServers_async(self, _cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getDefaultConf_async(self, _cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getVersion_async(self, _cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def addCallback_async(self, _cb, cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def removeCallback_async(self, _cb, cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getUptime_async(self, _cb, current=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return IcePy.stringify(self, _M_Murmur._t_Meta) <NEW_LINE> <DEDENT> __repr__ = __str__
This is the meta interface. It is primarily used for retrieving the [Server] interfaces for each individual server.
62599070fff4ab517ebcf0bd
class App(PlaybookApp): <NEW_LINE> <INDENT> def __init__(self, _tcex: TcEx): <NEW_LINE> <INDENT> super().__init__(_tcex) <NEW_LINE> self.pretty_json = {} <NEW_LINE> <DEDENT> def run(self) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> indent: str = self.tcex.playbook.read(self.args.indent) <NEW_LINE> indent = int(indent) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.tcex.exit(1, f'Invalid value ("{indent}") passed for indent.') <NEW_LINE> <DEDENT> json_data = self.tcex.playbook.read(self.args.json_data) <NEW_LINE> json_data_type: str = self.tcex.playbook.variable_type(self.args.json_data) <NEW_LINE> try: <NEW_LINE> <INDENT> if json_data_type in ['String']: <NEW_LINE> <INDENT> json_data: dict = json.loads(json_data) <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.tcex.exit(1, 'Failed parsing JSON data.') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.pretty_json: str = json.dumps( json_data, indent=indent, sort_keys=self.args.sort_keys ) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.tcex.exit(1, 'Failed parsing JSON data.') <NEW_LINE> <DEDENT> self.exit_message = 'JSON prettified.' <NEW_LINE> <DEDENT> def write_output(self): <NEW_LINE> <INDENT> self.tcex.log.info('Writing Output') <NEW_LINE> self.tcex.playbook.create_output('json.pretty', self.pretty_json)
Playbook App
62599070a8370b77170f1c6c
class StorageObjectsGetIamPolicyRequest(_messages.Message): <NEW_LINE> <INDENT> bucket = _messages.StringField(1, required=True) <NEW_LINE> generation = _messages.IntegerField(2) <NEW_LINE> object = _messages.StringField(3, required=True) <NEW_LINE> userProject = _messages.StringField(4)
A StorageObjectsGetIamPolicyRequest object. Fields: bucket: Name of the bucket in which the object resides. generation: If present, selects a specific revision of this object (as opposed to the latest version, the default). object: Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts. userProject: The project to be billed for this request. Required for Requester Pays buckets.
6259907099cbb53fe683278c
class BaseBaker(object): <NEW_LINE> <INDENT> timestamp_format = "%Y%m%dT%H%M%S" <NEW_LINE> def __init__(self, **filter_kwargs): <NEW_LINE> <INDENT> self.filter_kwargs = filter_kwargs <NEW_LINE> <DEDENT> def default_outputdir(self): <NEW_LINE> <INDENT> return os.path.join(COUNTRY_DIR, 'bakery') <NEW_LINE> <DEDENT> def filename(self, fmt, timestamp=None, **filter_kwargs): <NEW_LINE> <INDENT> if timestamp is None: <NEW_LINE> <INDENT> timestamp = datetime.now() <NEW_LINE> <DEDENT> state = self.filter_kwargs.get('state') <NEW_LINE> return "%s_%s.%s" % (state.lower(), timestamp.strftime(self.timestamp_format), fmt) <NEW_LINE> <DEDENT> def manifest_filename(self, timestamp, **filter_kwargs): <NEW_LINE> <INDENT> state = self.filter_kwargs.get('state') <NEW_LINE> return "%s_%s_manifest.txt" % (state.lower(), timestamp.strftime(self.timestamp_format)) <NEW_LINE> <DEDENT> def collect_items(self): <NEW_LINE> <INDENT> self._items = [] <NEW_LINE> self._fields = [] <NEW_LINE> return self <NEW_LINE> <DEDENT> def get_items(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> def get_fields(self): <NEW_LINE> <INDENT> return self._fields <NEW_LINE> <DEDENT> def write(self, fmt='csv', outputdir=None, timestamp=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fmt_method = getattr(self, 'write_' + fmt) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise UnsupportedFormatError("Format %s is not supported" % (fmt)) <NEW_LINE> <DEDENT> if outputdir is None: <NEW_LINE> <INDENT> outputdir = self.default_outputdir() <NEW_LINE> <DEDENT> if not os.path.exists(outputdir): <NEW_LINE> <INDENT> os.makedirs(outputdir) <NEW_LINE> <DEDENT> if timestamp is None: <NEW_LINE> <INDENT> timestamp = datetime.now() <NEW_LINE> <DEDENT> return fmt_method(outputdir, timestamp) <NEW_LINE> <DEDENT> def write_csv(self, outputdir, timestamp): <NEW_LINE> <INDENT> path = os.path.join(outputdir, self.filename('csv', timestamp, **self.filter_kwargs)) <NEW_LINE> with open(path, 'w') as csvfile: <NEW_LINE> <INDENT> writer = DictWriter(csvfile, self.get_fields()) <NEW_LINE> writer.writeheader() <NEW_LINE> for row in self.get_items(): <NEW_LINE> <INDENT> writer.writerow(row) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def write_json(self, outputdir, timestamp): <NEW_LINE> <INDENT> path = os.path.join(outputdir, self.filename('json', timestamp, **self.filter_kwargs)) <NEW_LINE> with open(path, 'w') as f: <NEW_LINE> <INDENT> f.write(json.dumps(self.get_items(), default=json_util.default)) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def write_manifest(self, outputdir=None, timestamp=None): <NEW_LINE> <INDENT> if outputdir is None: <NEW_LINE> <INDENT> outputdir = self.default_outputdir() <NEW_LINE> <DEDENT> if not os.path.exists(outputdir): <NEW_LINE> <INDENT> os.makedirs(outputdir) <NEW_LINE> <DEDENT> if timestamp is None: <NEW_LINE> <INDENT> timestamp = datetime.now() <NEW_LINE> <DEDENT> path = os.path.join(outputdir, self.manifest_filename(timestamp, **self.filter_kwargs)) <NEW_LINE> with open(path, 'w') as f: <NEW_LINE> <INDENT> f.write("Generated on %s\n" % timestamp.strftime(self.timestamp_format)) <NEW_LINE> f.write("\n") <NEW_LINE> f.write("Filters:\n\n") <NEW_LINE> for k, v in self.filter_kwargs.items(): <NEW_LINE> <INDENT> f.write("%s: %s\n" % (k, v)) <NEW_LINE> <DEDENT> <DEDENT> return self
Base class for classes that write election and candidate data to structured files
6259907099fddb7c1ca63a24
class Route(object): <NEW_LINE> <INDENT> def __init__(self, operator=None, route=None): <NEW_LINE> <INDENT> self.swagger_types = { 'operator': 'str', 'route': 'str' } <NEW_LINE> self.attribute_map = { 'operator': 'operator', 'route': 'route' } <NEW_LINE> self._operator = operator <NEW_LINE> self._route = route <NEW_LINE> <DEDENT> @property <NEW_LINE> def operator(self): <NEW_LINE> <INDENT> return self._operator <NEW_LINE> <DEDENT> @operator.setter <NEW_LINE> def operator(self, operator): <NEW_LINE> <INDENT> self._operator = operator <NEW_LINE> <DEDENT> @property <NEW_LINE> def route(self): <NEW_LINE> <INDENT> return self._route <NEW_LINE> <DEDENT> @route.setter <NEW_LINE> def route(self, route): <NEW_LINE> <INDENT> self._route = route <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990702ae34c7f260ac98d
class AbstractTxtLineParserFieldsStrategy(AbstractTxtLineParserTargetStrategy): <NEW_LINE> <INDENT> default_field_handler_prefix = 'parse_fields' <NEW_LINE> def field_targets(self): <NEW_LINE> <INDENT> for f in self.fields: <NEW_LINE> <INDENT> f_ = f.split(':') <NEW_LINE> if len(f_) > 2: yield f_[2] <NEW_LINE> else: yield f_[0] <NEW_LINE> <DEDENT> <DEDENT> def run_field_parse(self, text, onto, method, at, target): <NEW_LINE> <INDENT> if not target: target = method <NEW_LINE> if not at: at = self.default_access_type <NEW_LINE> while at in self.access_type_alias: <NEW_LINE> <INDENT> at = self.access_type_alias[at] <NEW_LINE> <DEDENT> descr = self.field_descriptor(onto, at, target) <NEW_LINE> return self.run_or_default(method, text, descr) <NEW_LINE> <DEDENT> def parse_fields(self, text, descr): <NEW_LINE> <INDENT> raise NotImplementedError()
~FieldsStrategy adds a concrete `run_field_parse` for AbstractTxtLineParser based on a three-part spec, and get/set descriptors provided for by ~TargetStrategy. This basic field strategy uses the 'parse_fields_*' signature for methods using this spec, to be distinguished from other res.txt list based parsers with different fieldspecs.
62599070a219f33f346c80ad
class RPCArgumentJSONDecodeError(Exception): <NEW_LINE> <INDENT> HTTP_status_code = 400 <NEW_LINE> pass
Tried to JSON decode an argument which was not JSON encoded
62599070009cb60464d02ddb
class Mongo(object): <NEW_LINE> <INDENT> _client = None <NEW_LINE> _config = None <NEW_LINE> def __init__(self, Config=None): <NEW_LINE> <INDENT> if Config and isinstance(Config, Configuration): <NEW_LINE> <INDENT> self._config = Config <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._config = Configuration() <NEW_LINE> <DEDENT> self._client = self._generate_client() <NEW_LINE> <DEDENT> def Client(self): <NEW_LINE> <INDENT> return self._client <NEW_LINE> <DEDENT> def Close(self): <NEW_LINE> <INDENT> self._client.close() <NEW_LINE> <DEDENT> def _generate_client(self): <NEW_LINE> <INDENT> mongoConf = self._config.get('Connectivity', 'MongoDB') <NEW_LINE> if mongoConf.get('username') and mongoConf.get('password'): <NEW_LINE> <INDENT> return pymongo.MongoClient( "mongodb://{0}:{1}@{2}:{3}/{4}".format( mongoConf.get('username', ''), mongoConf.get('password', ''), mongoConf.get('host', 'localhost'), mongoConf.get('port', 27017), mongoConf.get('db', 'grease') ), w=1 ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return pymongo.MongoClient( host=mongoConf.get('host', 'localhost'), port=mongoConf.get('port', 27017), w=1 )
MongoDB Connection Class Attributes: _client (pymongo.MongoClient): The actual PyMongo Connection _config (Configuration): Configuration Object
625990704a966d76dd5f078e
class SecretPage(models.Model): <NEW_LINE> <INDENT> name = models.CharField( unique=True, max_length=150, ) <NEW_LINE> slug = models.SlugField(unique=True) <NEW_LINE> template = models.CharField( max_length=300, help_text="File extension (.html) is not required." ) <NEW_LINE> active = models.BooleanField(default=False) <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> if self.template.endswith(TEMPLATE_EXTENSION): <NEW_LINE> <INDENT> self.template = self.template[:-len(TEMPLATE_EXTENSION)] <NEW_LINE> <DEDENT> template_path = settings.SECRET_PAGES_TEMPLATE_TEMPLATE.format(self.template) <NEW_LINE> try: <NEW_LINE> <INDENT> get_template(template_path) <NEW_LINE> <DEDENT> except TemplateDoesNotExist: <NEW_LINE> <INDENT> raise ValidationError('Template "{}" cannot be found.'.format(template_path)) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Model for a secret page.
62599070379a373c97d9a8c4
class LimitedMemoryBfgs(ScipyBaseSolver): <NEW_LINE> <INDENT> def __init__(self, objective: ObjectiveBaseClass, **kwargs): <NEW_LINE> <INDENT> super(LimitedMemoryBfgs, self).__init__(objective, method="L-BFGS-B", **kwargs)
Limited memory variant of BFGS (L-BFGS) See the documentation of scipy for the parameter list and description. Parameters ---------- jac: None Is set automatically to objective gradient method. However, if no gradient function is available, e.g., for a custom distance function, then jac can be set to None. callback: callable Differently from the non-scipy solvers the signature is callback(xk) with xk the current set of variables, which are the model parameters flattened to one 1D array.
62599070d268445f2663a7af
class Options(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> print("create Option object") <NEW_LINE> self.emb_dim = FLAGS.embedding_size <NEW_LINE> self.train_data = FLAGS.train_data <NEW_LINE> self.num_samples = FLAGS.num_neg_samples <NEW_LINE> self.learning_rate = FLAGS.learning_rate <NEW_LINE> self.epochs_to_train = FLAGS.epochs_to_train <NEW_LINE> self.concurrent_steps = FLAGS.concurrent_steps <NEW_LINE> self.batch_size = FLAGS.batch_size <NEW_LINE> self.window_size = FLAGS.window_size <NEW_LINE> self.min_count = FLAGS.min_count <NEW_LINE> self.subsample = FLAGS.subsample <NEW_LINE> self.statistics_interval = FLAGS.statistics_interval <NEW_LINE> self.summary_interval = FLAGS.summary_interval <NEW_LINE> self.checkpoint_interval = FLAGS.checkpoint_interval <NEW_LINE> self.save_path = FLAGS.save_path <NEW_LINE> if not os.path.exists(self.save_path): <NEW_LINE> <INDENT> os.makedirs(self.save_path) <NEW_LINE> <DEDENT> self.eval_data = FLAGS.eval_data
Options used by our word2vec model.
6259907021bff66bcd72450d
class DhtNode: <NEW_LINE> <INDENT> def __init__(self, myNode, pred, succ, nodeList): <NEW_LINE> <INDENT> self.myNode = myNode <NEW_LINE> self.pred = pred <NEW_LINE> self.succ = succ <NEW_LINE> self.data = {} <NEW_LINE> self.totalNodesNum = 2 ** 160 <NEW_LINE> self.nodeList = nodeList <NEW_LINE> self.ft = self.build_ft(myNode) <NEW_LINE> <DEDENT> def get_node_detail(self, target): <NEW_LINE> <INDENT> if target == 'pred': <NEW_LINE> <INDENT> return self.pred.get_info() <NEW_LINE> <DEDENT> elif target == 'succ': <NEW_LINE> <INDENT> return self.succ.get_info() <NEW_LINE> <DEDENT> elif target == 'me': <NEW_LINE> <INDENT> return self.myNode.get_info() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1, -1, -1 <NEW_LINE> <DEDENT> <DEDENT> def read_data(self, key): <NEW_LINE> <INDENT> if key in self.data: <NEW_LINE> <INDENT> return self.data[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def update_data(self, key, val): <NEW_LINE> <INDENT> self.data[key] = val <NEW_LINE> <DEDENT> def delete_data(self, key): <NEW_LINE> <INDENT> return self.data.pop(key) <NEW_LINE> <DEDENT> def create_data(self, key, val): <NEW_LINE> <INDENT> self.data[key] = val <NEW_LINE> <DEDENT> def build_ft(self, myNode): <NEW_LINE> <INDENT> entries = [] <NEW_LINE> print('------Populating entries---------') <NEW_LINE> for i in range(1, 161): <NEW_LINE> <INDENT> st = (myNode.hash_ID + 2 ** (i - 1)) % self.totalNodesNum <NEW_LINE> end = (myNode.hash_ID + 2 ** i) % self.totalNodesNum <NEW_LINE> m = [] <NEW_LINE> for node in self.nodeList: <NEW_LINE> <INDENT> if node.hash_ID >= st: <NEW_LINE> <INDENT> m.append(node) <NEW_LINE> <DEDENT> <DEDENT> if len(m) == 0: <NEW_LINE> <INDENT> m.append(self.nodeList[0]) <NEW_LINE> <DEDENT> m.sort(key=lambda x: x.hash_ID) <NEW_LINE> mySuccessor = m[0] <NEW_LINE> entry = Entry(st, end, mySuccessor) <NEW_LINE> entries.append(entry) <NEW_LINE> <DEDENT> succSet = set() <NEW_LINE> for j in range(0, len(entries) - 1): <NEW_LINE> <INDENT> succSet.add(entries[j].successor) <NEW_LINE> <DEDENT> print("set size: {}".format(len(succSet))) <NEW_LINE> for s in succSet: <NEW_LINE> <INDENT> print(s.get_info()) <NEW_LINE> <DEDENT> newEntries = [] <NEW_LINE> for t in succSet: <NEW_LINE> <INDENT> temp = [] <NEW_LINE> for q in entries: <NEW_LINE> <INDENT> if q.successor == t: <NEW_LINE> <INDENT> temp.append(q) <NEW_LINE> <DEDENT> <DEDENT> temp.sort(key=lambda x: x.start) <NEW_LINE> neoEntry = Entry(temp[0].start, temp[-1].end, t) <NEW_LINE> newEntries.append(neoEntry) <NEW_LINE> <DEDENT> print('---------merge done------------') <NEW_LINE> return newEntries <NEW_LINE> <DEDENT> def lookup_table(self, key): <NEW_LINE> <INDENT> key = int(hashlib.sha1(key.encode()).hexdigest(), 16) <NEW_LINE> for entry in self.ft: <NEW_LINE> <INDENT> end = entry.end <NEW_LINE> if entry.end < entry.start: <NEW_LINE> <INDENT> end = entry.end + self.totalNodesNum <NEW_LINE> <DEDENT> if end > key > entry.start: <NEW_LINE> <INDENT> print("Found in ft") <NEW_LINE> return entry.successor <NEW_LINE> <DEDENT> <DEDENT> print("not found in ft, return the last one in ft") <NEW_LINE> return self.pred
Higher level of abstraction to include BaseNode instance for itself, its predecessor, its succesor and the list of the nodes in the chord ring
62599070b7558d5895464b85
class KeysTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def testSpecTables(self): <NEW_LINE> <INDENT> for klass in _SPEC_CLASSES: <NEW_LINE> <INDENT> methods = frozenset(klass.MethodNames) <NEW_LINE> self.assertEqual(methods, frozenset(klass.INPUT_SIGS.keys())) <NEW_LINE> self.assertEqual(methods, frozenset(klass.OUTPUT_SIGS.keys())) <NEW_LINE> self.assertEqual(methods, frozenset(klass.XFORMERS.keys()))
Test that every map contains all the designated keys.
625990704a966d76dd5f078f
class SomePage(): <NEW_LINE> <INDENT> def __init__(self, id): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.url = __class__.__makePageUrl__(self) <NEW_LINE> self.contents = None <NEW_LINE> <DEDENT> def __makePageUrl__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getUrl(self): <NEW_LINE> <INDENT> return self.url <NEW_LINE> <DEDENT> def retrieve(self, sleepSec=0): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> time.sleep(sleepSec) <NEW_LINE> print >> sys.stderr, "Downloading %s after sleeping for %d sec ..." % (self.url, sleepSec), <NEW_LINE> response = urllib2.urlopen(self.url) <NEW_LINE> self.contents = response.read() <NEW_LINE> print >> sys.stderr, "done" <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.contents = '' <NEW_LINE> print >> sys.stderr, "ERROR" <NEW_LINE> raise('Failed HTTP attempt') <NEW_LINE> <DEDENT> <DEDENT> def getContents(self): <NEW_LINE> <INDENT> if self.contents is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.retrieve(sleepSec=random.randint(1,5)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print >> sys.stderr, "Unable to get the contents for %s; continuing anyway" % self.url <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> return self.contents <NEW_LINE> <DEDENT> def __makeFullPathName__(self, dirname=None, filename=None): <NEW_LINE> <INDENT> if filename is None: <NEW_LINE> <INDENT> filename = '%s.html' % self.id <NEW_LINE> <DEDENT> if dirname is None: <NEW_LINE> <INDENT> dirname = '.' <NEW_LINE> <DEDENT> fullpathtofile = "%s/%s" % (dirname, filename) <NEW_LINE> return fullpathtofile <NEW_LINE> <DEDENT> def saveContentsAsFile(self, dirname=None, filename=None, onlyIfNotExists=False): <NEW_LINE> <INDENT> fullpathtofile = self.__makeFullPathName__(dirname, filename) <NEW_LINE> if not os.path.exists(dirname): <NEW_LINE> <INDENT> os.makedirs(dirname) <NEW_LINE> <DEDENT> if onlyIfNotExists and os.path.exists(fullpathtofile) and os.stat(fullpathtofile).st_size > 0: <NEW_LINE> <INDENT> print >> sys.stderr, "%s already exists and is not empty, so no need to write" % fullpathtofile <NEW_LINE> return <NEW_LINE> <DEDENT> if self.contents is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.retrieve(sleepSec=random.randint(1,5)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print >> sys.stderr, "Unable to save the contents for %s; continuing anyway" % self.url <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> with open(fullpathtofile, 'w') as f: <NEW_LINE> <INDENT> f.write(self.contents) <NEW_LINE> <DEDENT> f.close() <NEW_LINE> <DEDENT> def loadContentsFromFile(self, dirname=None, filename=None, verbose=False): <NEW_LINE> <INDENT> fullpathtofile = self.__makeFullPathName__(dirname, filename) <NEW_LINE> try: <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> if self.contents is None: <NEW_LINE> <INDENT> print >> sys.stderr, "Loading contents from file %s ..." % fullpathtofile, <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print >> sys.stderr, "Reloading contents from file %s ..." % fullpathtofile, <NEW_LINE> <DEDENT> <DEDENT> with open(fullpathtofile, 'r') as f: <NEW_LINE> <INDENT> self.contents = f.read() <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> print >> sys.stderr, "done" <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print >> sys.stderr, "Could not (re)load contents from file %s" % fullpathtofile <NEW_LINE> <DEDENT> <DEDENT> def getCsvRow(self): <NEW_LINE> <INDENT> raise("getCvsRow is an abstract method: please implement it")
Represents a page from Box Office Mojo id is the id of the page (a string)
62599070adb09d7d5dc0be10
class MethodMetaView(type): <NEW_LINE> <INDENT> def __init__(cls, name, bases, d): <NEW_LINE> <INDENT> super().__init__(name, bases, d) <NEW_LINE> if "methods" not in d: <NEW_LINE> <INDENT> methods = set() <NEW_LINE> for m in HTTP_METHODS: <NEW_LINE> <INDENT> if hasattr(cls, m): <NEW_LINE> <INDENT> if not iscoroutinefunction(getattr(cls, m)): <NEW_LINE> <INDENT> raise ValueError("View method {}.{} should be awaitable.".format(name, m)) <NEW_LINE> <DEDENT> methods.add(m) <NEW_LINE> <DEDENT> <DEDENT> cls.methods = methods
A meta used by class based class to collect the implemented methods.
625990704e4d562566373cac
class ProviderUserState(object): <NEW_LINE> <INDENT> def __init__(self, enabled_provider, user, association): <NEW_LINE> <INDENT> self.has_account = association is not None <NEW_LINE> if self.has_account: <NEW_LINE> <INDENT> self.association_id = association.id <NEW_LINE> self.remote_id = enabled_provider.get_remote_id_from_social_auth(association) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.association_id = None <NEW_LINE> self.remote_id = None <NEW_LINE> <DEDENT> self.provider = enabled_provider <NEW_LINE> self.user = user <NEW_LINE> <DEDENT> def get_unlink_form_name(self): <NEW_LINE> <INDENT> return self.provider.provider_id + '_unlink_form'
Object representing the provider state (attached or not) for a user. This is intended only for use when rendering templates. See for example lms/templates/dashboard.html.
62599070bf627c535bcb2d71
class Aggregator: <NEW_LINE> <INDENT> def generate_score(self, entry): <NEW_LINE> <INDENT> _diff = (datetime.datetime.timestamp(datetime.datetime.utcnow()) - datetime.datetime.timestamp(entry["date"])) / 25 <NEW_LINE> _rank = { "BBC News - World": 0, "World News - Breaking international news and headlines | Sky News": 15, "Reuters: Top News": 80, "Reuters: World News": 80, "Al Jazeera English": 15 } <NEW_LINE> return _diff + _rank[entry["source"]] <NEW_LINE> <DEDENT> def feed_to_object(self, parsed_feed, count=10): <NEW_LINE> <INDENT> return { "name": parsed_feed.feed.title, "logo": self.get_feed_logo(parsed_feed.feed), "subtitle": parsed_feed.feed.subtitle, "entries": [ { "title": entry.title.replace("&apos;", "'").replace("&#8217;", "'"), "thumbnail": self.get_thumbnail(entry), "link": entry.link, "summary": self.format_summary(entry.summary), "date": self.datetime_from_string( entry.published if "published" in entry.keys() else entry.date if "date" in entry.keys() else "recent") } for entry in (parsed_feed.entries[0:count] if len(parsed_feed.entries) >= count else parsed_feed.entries) ] } <NEW_LINE> <DEDENT> def get_thumbnail(self, entry): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if "media_thumbnail" in entry.keys(): <NEW_LINE> <INDENT> return entry["media_thumbnail"][0]["url"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return entry["media_content"][0]["url"] <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_feed_logo(self, feed): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if "href" in feed["image"].keys(): <NEW_LINE> <INDENT> _logo = feed["image"]["href"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _logo = feed["image"]["link"] <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if _logo not in self.cached_urls.keys(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.cached_urls[_logo] = requests.get( _logo).status_code in [200, 301, 302] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.cached_urls[_logo] = None <NEW_LINE> <DEDENT> <DEDENT> return _logo if self.cached_urls[_logo] else None <NEW_LINE> <DEDENT> def parse(self, feed): <NEW_LINE> <INDENT> return feedparser.parse(feed) <NEW_LINE> <DEDENT> def format_summary(self, summary): <NEW_LINE> <INDENT> summary = summary.replace('\n', ' ').replace('\r', '') <NEW_LINE> if "<" in summary: <NEW_LINE> <INDENT> summary = summary[:summary.index("<")] <NEW_LINE> <DEDENT> if len(summary) > 180: <NEW_LINE> <INDENT> return ((summary[:180] if summary[:179][-1] != " " else summary[:180]) + '...') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return summary <NEW_LINE> <DEDENT> <DEDENT> def datetime_from_string(self, date): <NEW_LINE> <INDENT> _date = date.replace("EDT", "UTC-5").replace("EST", "UTC-5") <NEW_LINE> _date = parser.parse(_date).astimezone(pytz.utc) <NEW_LINE> return _date
Basic aggregation functionality, parent class for Aggregator classes.
625990701f037a2d8b9e54bd
class Knight(ChessPiece): <NEW_LINE> <INDENT> def __init__(self, row, col, color, board): <NEW_LINE> <INDENT> self.char = '♞' if color == Color.WHITE else '♘' <NEW_LINE> super().__init__(row, col, color, board) <NEW_LINE> <DEDENT> def generate_possible_moves(self): <NEW_LINE> <INDENT> possible_moves = [] <NEW_LINE> for row_step, col_step in [(1, 2), (-1, 2), (1, -2), (-1, -2), (2, 1), (2, -1), (-2, 1), (-2, -1)]: <NEW_LINE> <INDENT> row_move = self.row + row_step <NEW_LINE> col_move = self.col + col_step <NEW_LINE> if 0 <= row_move < 8 and 0 <= col_move < 8: <NEW_LINE> <INDENT> if self.board.grid[row_move][col_move] is None: <NEW_LINE> <INDENT> possible_moves.append((row_move, col_move)) <NEW_LINE> <DEDENT> elif self.board.grid[row_move][col_move].color != self.color: <NEW_LINE> <INDENT> possible_moves.append(([row_move, col_move])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return possible_moves
This class will represent a knight piece in chess game The knight (♘ ♞) is a piece in the game of chess, representing a knight (armored cavalry). It is normally represented by a horse's head and neck. Each player starts with two knights, which begin on the row closest to the player, between the rooks and bishops. The knight moves to any of the closest squares that are not on the same rank, file, or diagonal, thus the move forms an "L"-shape: two squares vertically and one square horizontally, or two squares horizontally and one square vertically. The knight is the only piece that can leap over other pieces. Attributes: char (str): a character representing the knight shape
6259907066673b3332c31ca3
class URL(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.routemap = () <NEW_LINE> <DEDENT> def add(self, route, handler): <NEW_LINE> <INDENT> self.routemap = self.routemap + (route + '(/\w*)?(/\w*/?)?', handler) <NEW_LINE> <DEDENT> def get_map(self): <NEW_LINE> <INDENT> return self.routemap
Construye las URL de los nodos del API
62599070f9cc0f698b1c5f1d
class Network(NetworkBase): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> extract_values = True <NEW_LINE> debug = False <NEW_LINE> if "extract_values" in kwargs: <NEW_LINE> <INDENT> extract_values = kwargs.pop("extract_values") <NEW_LINE> <DEDENT> if "debug" in kwargs: <NEW_LINE> <INDENT> debug = kwargs.pop("debug") <NEW_LINE> <DEDENT> status_neopixel = kwargs.pop("status_neopixel", None) <NEW_LINE> if status_neopixel: <NEW_LINE> <INDENT> status_led = neopixel.NeoPixel(status_neopixel, 1, brightness=0.2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> status_led = None <NEW_LINE> <DEDENT> kwargs["status_led"] = status_led <NEW_LINE> wifi = WiFi(**kwargs) <NEW_LINE> super().__init__( wifi, extract_values=extract_values, debug=debug, ) <NEW_LINE> gc.collect() <NEW_LINE> <DEDENT> @property <NEW_LINE> def ip_address(self): <NEW_LINE> <INDENT> return self._wifi.esp.pretty_ip(self._wifi.esp.ip_address)
Class representing the Adafruit RGB Matrix Portal. :param status_neopixel: The pin for the status NeoPixel. Use ``board.NEOPIXEL`` for the on-board NeoPixel. Defaults to ``None``, not the status LED :param esp: A passed ESP32 object, Can be used in cases where the ESP32 chip needs to be used before calling the pyportal class. Defaults to ``None``. :param busio.SPI external_spi: A previously declared spi object. Defaults to ``None``. :param bool extract_values: If true, single-length fetched values are automatically extracted from lists and tuples. Defaults to ``True``. :param debug: Turn on debug print outs. Defaults to False.
6259907097e22403b383c7a8
class DataVariable(VariableInstance): <NEW_LINE> <INDENT> def __init__(self,source,name,units,data): <NEW_LINE> <INDENT> VariableInstance.__init__(self,source,name,units) <NEW_LINE> self._data = data <NEW_LINE> <DEDENT> def get_data(self,period,extent): <NEW_LINE> <INDENT> new_extent = extent.translate_to_origin(global_georef()) <NEW_LINE> return np.ma.MaskedArray(data=self._data[new_extent.indices],mask=new_extent.mask)
Wrapper for creation of AWRA variables around existing data
625990705fc7496912d48ebb
class SpawnLocationGrader(CompoundGrader): <NEW_LINE> <INDENT> def __init__(self, timeout_seconds=8.0): <NEW_LINE> <INDENT> super().__init__([ PassOnSpawn(), FailOnTimeout(timeout_seconds), ])
Checks that the car gets to the ball in a reasonable amount of time.
625990700a50d4780f706a14
class SOURce(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "SOURce" <NEW_LINE> args = [] <NEW_LINE> class ALC(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "ALC" <NEW_LINE> args = [] <NEW_LINE> class BANDwidth(SCPINode, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "BANDwidth" <NEW_LINE> args = [] <NEW_LINE> <DEDENT> BANDwidth = BANDwidth() <NEW_LINE> <DEDENT> ALC = ALC() <NEW_LINE> class ATTenuation(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "ATTenuation" <NEW_LINE> args = ["1"] <NEW_LINE> class STEP(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "STEP" <NEW_LINE> args = [] <NEW_LINE> class INCRement(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "INCRement" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> INCRement = INCRement() <NEW_LINE> <DEDENT> STEP = STEP() <NEW_LINE> <DEDENT> ATTenuation = ATTenuation() <NEW_LINE> class LEVel(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "LEVel" <NEW_LINE> args = ["1"] <NEW_LINE> class STEP(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "STEP" <NEW_LINE> args = [] <NEW_LINE> class INCRement(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "INCRement" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> INCRement = INCRement() <NEW_LINE> <DEDENT> STEP = STEP() <NEW_LINE> <DEDENT> LEVel = LEVel() <NEW_LINE> class MUTing(SCPINode, SCPIBool): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "MUTing" <NEW_LINE> args = ["1", "ON", "OFF"] <NEW_LINE> <DEDENT> MUTing = MUTing() <NEW_LINE> class STATe(SCPINode, SCPIBool): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "STATe" <NEW_LINE> args = ["1", "ON", "OFF"] <NEW_LINE> <DEDENT> STATe = STATe() <NEW_LINE> class ULIMit(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "ULIMit" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> ULIMit = ULIMit()
AMPLitude:SOURce Arguments:
62599070dd821e528d6da5d4
class VATINValidator: <NEW_LINE> <INDENT> messages = { 'country_code': _('%(country_code)s is not a valid country code.'), 'vatin': _('%(vatin)s is not a valid VAT identification number.'), } <NEW_LINE> def __call__(self, value): <NEW_LINE> <INDENT> country_code, number = self.clean(value) <NEW_LINE> try: <NEW_LINE> <INDENT> match = re.match(VATIN_PATTERN_MAP[country_code], value) <NEW_LINE> if not match: <NEW_LINE> <INDENT> raise ValidationError( self.messages['vatin'], code='vatin', params={'vatin': value} ) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValidationError( self.messages['country_code'], code='country_code', params={'country_code': country_code} ) <NEW_LINE> <DEDENT> <DEDENT> def clean(self, value): <NEW_LINE> <INDENT> return value[:VATIN_COUNTRY_CODE_LENGTH], value[VATIN_COUNTRY_CODE_LENGTH:]
A validator for VAT identification numbers. Currently only supports European VIES VAT identification numbers. See See https://en.wikipedia.org/wiki/VAT_identification_number
6259907067a9b606de5476f6
class TestContentSchema(unittest.TestCase): <NEW_LINE> <INDENT> layer = YOUNGLIVES_RESEARCH_TYPES_INTEGRATION_TESTING <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.portal = self.layer['portal'] <NEW_LINE> setRoles(self.portal, TEST_USER_ID, ['Manager']) <NEW_LINE> self.portal.invokeFactory('ResearchDatabase', 'rd1') <NEW_LINE> self.rd1 = getattr(self.portal, 'rd1') <NEW_LINE> self.rd1.invokeFactory('Research', 'r1') <NEW_LINE> self.r1 = getattr(self.rd1, 'r1') <NEW_LINE> <DEDENT> def testSchema(self): <NEW_LINE> <INDENT> r1 = self.r1 <NEW_LINE> schema = r1.schema <NEW_LINE> field_ids = schema.keys() <NEW_LINE> assert 'referenceNumber' in field_ids <NEW_LINE> reference_field = schema['referenceNumber']
Test content type schema
625990704a966d76dd5f0790
class StubProductDirectory(StubCertificateDirectory, ProductDirectory): <NEW_LINE> <INDENT> path = "this/is/a/stub/product/cert/dir" <NEW_LINE> def __init__(self, certificates=None, pids=None): <NEW_LINE> <INDENT> if pids is not None: <NEW_LINE> <INDENT> certificates = [] <NEW_LINE> for pid in pids: <NEW_LINE> <INDENT> certificates.append(StubProductCertificate(StubProduct(pid))) <NEW_LINE> <DEDENT> <DEDENT> super(StubProductDirectory, self).__init__(certificates) <NEW_LINE> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> pass
Stub for mimicing behavior of an on-disk certificate directory. Can be used for both entitlement and product directories as needed.
625990704f88993c371f1173
class AggregateServiceMixin(object): <NEW_LINE> <INDENT> def aggregate(self, output, group_by=None, filter=None, rules=None, sorts=None): <NEW_LINE> <INDENT> data = { 'output': output } <NEW_LINE> if group_by is not None: <NEW_LINE> <INDENT> data['groupBy'] = group_by <NEW_LINE> <DEDENT> if filter is not None: <NEW_LINE> <INDENT> data['filter'] = str(filter) <NEW_LINE> <DEDENT> if rules is not None: <NEW_LINE> <INDENT> data['rules'] = dict([(k, str(v)) for k, v in six.iteritems(rules)]) <NEW_LINE> <DEDENT> if sorts is not None: <NEW_LINE> <INDENT> data['sorts'] = sorts <NEW_LINE> <DEDENT> return self.request('POST', '/aggregate', data, None)
A mixin to support aggregation.
625990707d847024c075dc7f
class TwitterUserKind(db.Model): <NEW_LINE> <INDENT> integerId = db.IntegerProperty() <NEW_LINE> screenName = db.StringProperty() <NEW_LINE> accessToken = db.StringProperty()
Twitter user is identified by integerId.
62599070ec188e330fdfa149
class HostCorrection(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Delta_M = 0.08 <NEW_LINE> <DEDENT> def H_low(self, SNe): <NEW_LINE> <INDENT> H = np.zeros((3*len(SNe), 1)) <NEW_LINE> selection = np.where((SNe['3rdvar'] >= 9) & (SNe['3rdvar'] < 10))[0] <NEW_LINE> H[3*selection] = 1. <NEW_LINE> return H <NEW_LINE> <DEDENT> def H_high(self, SNe): <NEW_LINE> <INDENT> H = np.zeros((3*len(SNe), 1)) <NEW_LINE> selection = np.where((SNe['3rdvar'] > 10) & (SNe['3rdvar'] <= 11))[0] <NEW_LINE> H[3*selection] = 1. <NEW_LINE> return H <NEW_LINE> <DEDENT> def covmat_host(self, SNe): <NEW_LINE> <INDENT> nSNe = len(SNe) <NEW_LINE> chost = np.zeros((3*nSNe, 3*nSNe)) <NEW_LINE> for i in range(nSNe): <NEW_LINE> <INDENT> if np.sign(SNe['3rdvar'][i]-SNe['d3rdvar'][i]-10) != np.sign(SNe['3rdvar'][i] +SNe['d3rdvar'][i]-10): <NEW_LINE> <INDENT> chost[3*i][3*i] += self.Delta_M**2 <NEW_LINE> <DEDENT> <DEDENT> chost += self.Delta_M**2 * np.dot(self.H_low(SNe), self.H_low(SNe).T) <NEW_LINE> chost += self.Delta_M**2 * np.dot(self.H_high(SNe), self.H_high(SNe).T) <NEW_LINE> return chost
functions to contruct covariance matrix for uncertainty in host mass correction
62599070796e427e5385001f
class PublisherClient(object): <NEW_LINE> <INDENT> def __init__( self, project_id: Text, topic_name: Text, credentials: Optional[google.auth.credentials.Credentials] = None ) -> None: <NEW_LINE> <INDENT> self._publisher = pubsub_v1.PublisherClient(credentials=credentials) <NEW_LINE> self._topic = self._publisher.topic_path(project_id, topic_name) <NEW_LINE> <DEDENT> def Publish( self, data: bytes, attributes: Optional[Dict[Text, Text]] = None ) -> google.api_core.future.Future: <NEW_LINE> <INDENT> attributes = attributes if attributes else {} <NEW_LINE> return self._publisher.publish(self._topic, data=data, **attributes)
Pub/Sub publisher client using google.cloud.pubsub_v1.
625990704e4d562566373cad
class AlbumInfoForm(forms.Form): <NEW_LINE> <INDENT> COUNTRY_CHOICES = [(i, i) for i in COUNTRIES] <NEW_LINE> YEAR_CHOICES = [(i, i) for i in range(1955, datetime.now().year)] <NEW_LINE> artist = forms.CharField(label=_("Artist"), max_length=128, required=True) <NEW_LINE> album = forms.CharField(label=_("Album"), max_length=128, required=True) <NEW_LINE> country = forms.ChoiceField(label=_("Country"), choices=COUNTRY_CHOICES) <NEW_LINE> from_year = forms.ChoiceField(label=_("From Year"), choices=YEAR_CHOICES, initial=1960)
Album Info form search
625990704a966d76dd5f0791
class CorsBypassPageSet(page_set_module.PageSet): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(CorsBypassPageSet, self).__init__( archive_data_file='../data/chrome_proxy_bypass.json') <NEW_LINE> urls_list = [ 'http://aws1.mdw.la/test/cors/', ] <NEW_LINE> for url in urls_list: <NEW_LINE> <INDENT> self.AddPage(CorsBypassPage(url, self))
Chrome proxy test sites
62599070d486a94d0ba2d865
class SimpleStringState(object): <NEW_LINE> <INDENT> insert_sre_methods(locals(), 'simple') <NEW_LINE> def __init__(self, string, start=0, end=-1): <NEW_LINE> <INDENT> self.string = string <NEW_LINE> if end < 0: <NEW_LINE> <INDENT> end = len(string) <NEW_LINE> <DEDENT> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def get_char_ord(self, p): <NEW_LINE> <INDENT> return ord(self.string[p]) <NEW_LINE> <DEDENT> def lower(self, char_ord): <NEW_LINE> <INDENT> return getlower(char_ord, 0)
Prebuilt state for matching strings, for testing and for stand-alone RPython applictions that don't worry about unicode.
6259907076e4537e8c3f0e2b
class CheckuserCmd(CmdRouter): <NEW_LINE> <INDENT> optional_args = [] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(CheckuserCmd, self).__init__() <NEW_LINE> self.parser = ThrowingArgumentParser(prog='checkuser', usage='%(prog)s') <NEW_LINE> <DEDENT> def registe_parser(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def parse_args(self, argument): <NEW_LINE> <INDENT> return self.parser.parse_args(argument.split()) <NEW_LINE> <DEDENT> def print_help(self): <NEW_LINE> <INDENT> self.parser.print_help()
the 'checkuser' command: check the current users Command usage: :> 'checkuser'
62599070442bda511e95d9ab
class handleexcel: <NEW_LINE> <INDENT> def __init__(self,filename,sheetname): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.sheetname = sheetname <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> self.wb = openpyxl.load_workbook(self.filename) <NEW_LINE> self.sh = self.wb[self.sheetname] <NEW_LINE> <DEDENT> def read_data(self): <NEW_LINE> <INDENT> self.open() <NEW_LINE> row = list(self.sh.rows) <NEW_LINE> cases = [] <NEW_LINE> title = [] <NEW_LINE> for a in row[0]: <NEW_LINE> <INDENT> title.append(a.value) <NEW_LINE> <DEDENT> for a in row[1:]: <NEW_LINE> <INDENT> data = [] <NEW_LINE> for b in a: <NEW_LINE> <INDENT> data.append(b.value) <NEW_LINE> <DEDENT> cases.append(dict(zip(title,data))) <NEW_LINE> <DEDENT> self.wb.close() <NEW_LINE> return cases <NEW_LINE> <DEDENT> def read_data_obj(self): <NEW_LINE> <INDENT> self.open() <NEW_LINE> row = list(self.sh.rows) <NEW_LINE> cases = [] <NEW_LINE> title = [] <NEW_LINE> for a in row[0]: <NEW_LINE> <INDENT> title.append(a.value) <NEW_LINE> <DEDENT> for a in row[1:]: <NEW_LINE> <INDENT> data = [] <NEW_LINE> for b in a: <NEW_LINE> <INDENT> data.append(b.value) <NEW_LINE> <DEDENT> case = case_data() <NEW_LINE> for c in zip(title,data): <NEW_LINE> <INDENT> print(case) <NEW_LINE> setattr(case,c[0],c[1]) <NEW_LINE> <DEDENT> cases.append(case) <NEW_LINE> <DEDENT> self.wb.close() <NEW_LINE> return cases <NEW_LINE> <DEDENT> def write_data(self,row,column,value): <NEW_LINE> <INDENT> self.open() <NEW_LINE> self.sh.cell(row = row,column = column,value = value) <NEW_LINE> self.wb.save(self.filename) <NEW_LINE> self.wb.close()
初始化函数:类中的实例方法都会用到实例属性实例属性的值不一样
62599070bf627c535bcb2d73
class Question(UpstreamAdapter): <NEW_LINE> <INDENT> _adapter_name = "question" <NEW_LINE> _output_format = "text+code" <NEW_LINE> _cache_needed = True <NEW_LINE> def _get_page(self, topic, request_options=None): <NEW_LINE> <INDENT> if not os.path.exists(CONFIG["path.internal.bin.upstream"]): <NEW_LINE> <INDENT> self._output_format = "ansi" <NEW_LINE> return UpstreamAdapter._get_page(self, topic, request_options=request_options) <NEW_LINE> <DEDENT> topic = topic.replace('+', ' ') <NEW_LINE> if '/' in topic: <NEW_LINE> <INDENT> section_name, topic = topic.split('/', 1) <NEW_LINE> if ':' in section_name: <NEW_LINE> <INDENT> _, section_name = section_name.split(':', 1) <NEW_LINE> <DEDENT> section_name = SO_NAME.get(section_name, section_name) <NEW_LINE> topic = "%s/%s" % (section_name, topic) <NEW_LINE> <DEDENT> topic = re.sub(r"(?<!-)-", ' ', topic) <NEW_LINE> topic_words = topic.split() <NEW_LINE> topic = " ".join(topic_words) <NEW_LINE> lang = 'en' <NEW_LINE> try: <NEW_LINE> <INDENT> query_text = topic <NEW_LINE> query_text = re.sub('^[^/]*/+', '', query_text.rstrip('/')) <NEW_LINE> query_text = re.sub('/[0-9]+$', '', query_text) <NEW_LINE> query_text = re.sub('/[0-9]+$', '', query_text) <NEW_LINE> detector = Detector(query_text) <NEW_LINE> supposed_lang = detector.languages[0].code <NEW_LINE> if len(topic_words) > 2 or supposed_lang in ['az', 'ru', 'uk', 'de', 'fr', 'es', 'it', 'nl']: <NEW_LINE> <INDENT> lang = supposed_lang <NEW_LINE> <DEDENT> if supposed_lang.startswith('zh_') or supposed_lang == 'zh': <NEW_LINE> <INDENT> lang = 'zh' <NEW_LINE> <DEDENT> elif supposed_lang.startswith('pt_'): <NEW_LINE> <INDENT> lang = 'pt' <NEW_LINE> <DEDENT> if supposed_lang in ['ja', 'ko']: <NEW_LINE> <INDENT> lang = supposed_lang <NEW_LINE> <DEDENT> <DEDENT> except UnknownLanguage: <NEW_LINE> <INDENT> print("Unknown language (%s)" % query_text) <NEW_LINE> <DEDENT> if lang != 'en': <NEW_LINE> <INDENT> topic = ['--human-language', lang, topic] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> topic = [topic] <NEW_LINE> <DEDENT> cmd = [CONFIG["path.internal.bin.upstream"]] + topic <NEW_LINE> proc = Popen(cmd, stdin=open(os.devnull, "r"), stdout=PIPE, stderr=PIPE) <NEW_LINE> answer = proc.communicate()[0].decode('utf-8') <NEW_LINE> return answer <NEW_LINE> <DEDENT> def get_list(self, prefix=None): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def is_found(self, topic): <NEW_LINE> <INDENT> return True
Answer to a programming language question, using Stackoverflow as the main data source. Heavy lifting is done by an external program `CONFIG["path.internal.bin.upstream"]`. If the program is not found, fallback to the superclass `UpstreamAdapter`, which queries the upstream server (by default https://cheat.sh/) for the answer
625990701b99ca4002290189
class Resume(AbstractCommand): <NEW_LINE> <INDENT> def setup(self, subparsers): <NEW_LINE> <INDENT> parser = subparsers.add_parser( "resume", help="Pause a cluster by shutting down existing " "VMs, retaining disks and configuration.") <NEW_LINE> parser.set_defaults(func=self) <NEW_LINE> parser.add_argument('cluster', help='name of the cluster') <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> cluster_name = self.params.cluster <NEW_LINE> creator = make_creator(self.params.config, storage_path=self.params.storage) <NEW_LINE> try: <NEW_LINE> <INDENT> cluster = creator.load_cluster(cluster_name) <NEW_LINE> <DEDENT> except (ClusterNotFound, ConfigurationError) as e: <NEW_LINE> <INDENT> log.error("Cannot load cluster `%s`: %s", cluster_name, e) <NEW_LINE> return os.EX_NOINPUT <NEW_LINE> <DEDENT> print("Resuming cluster `%s` ..." % cluster_name) <NEW_LINE> cluster.resume()
Pause the nodes in the cluster, retaining disks and configuration.
6259907055399d3f05627dc0
class Address(BaseModel): <NEW_LINE> <INDENT> user = models.ForeignKey('User', on_delete=models.CASCADE, verbose_name='所属账户') <NEW_LINE> receiver = models.CharField(max_length=20, verbose_name='收件人') <NEW_LINE> addr = models.CharField(max_length=256, verbose_name='收件地址') <NEW_LINE> zip_code = models.CharField(max_length=6, null=True, verbose_name='邮政编码') <NEW_LINE> phone = models.CharField(max_length=11, verbose_name='联系电话') <NEW_LINE> is_default = models.BooleanField(default=False, verbose_name='是否默认') <NEW_LINE> objects = AddressManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'df_address' <NEW_LINE> verbose_name = '地址' <NEW_LINE> verbose_name_plural = verbose_name
地址模型
62599070fff4ab517ebcf0c1
class TimedQueue: <NEW_LINE> <INDENT> def __init__(self, queue, time_limit): <NEW_LINE> <INDENT> self.__queue = queue <NEW_LINE> self.__time_limit = time_limit / 1000 <NEW_LINE> self.__stop_time = None <NEW_LINE> self.agent = None <NEW_LINE> <DEDENT> def start_timer(self): <NEW_LINE> <INDENT> self.__stop_time = self.__time_limit + time.perf_counter() <NEW_LINE> <DEDENT> def put(self, item, block=True, timeout=None): <NEW_LINE> <INDENT> if self.__stop_time and time.perf_counter() > self.__stop_time: <NEW_LINE> <INDENT> raise StopSearch <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.__queue.get_nowait() <NEW_LINE> <DEDENT> except Empty: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.__queue.put_nowait((getattr(self.agent, "context", None), item)) <NEW_LINE> <DEDENT> def put_nowait(self, item): <NEW_LINE> <INDENT> self.put(item, block=False) <NEW_LINE> <DEDENT> def get(self, block=True, timeout=None): <NEW_LINE> <INDENT> return self.__queue.get(block=block, timeout=timeout) <NEW_LINE> <DEDENT> def get_nowait(self): <NEW_LINE> <INDENT> return self.get(block=False) <NEW_LINE> <DEDENT> def qsize(self): return self.__queue.qsize() <NEW_LINE> def empty(self): return self.__queue.empty() <NEW_LINE> def full(self): return self.__queue.full()
Modified Queue class to block .put() after a time limit expires, and to include both a context object & action choice in the queue.
62599070f548e778e596ce34
class Alternation(ParseItem): <NEW_LINE> <INDENT> def __init__(self, optional=False): <NEW_LINE> <INDENT> self.optional = optional <NEW_LINE> self.options = [] <NEW_LINE> self.add_alternate() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> seps = "[]" if self.optional else "()" <NEW_LINE> return seps[0] + "|".join(str(i) for i in self.options) + seps[1] <NEW_LINE> <DEDENT> def finalise(self): <NEW_LINE> <INDENT> if not self.options: <NEW_LINE> <INDENT> raise ParseError("empty alternation") <NEW_LINE> <DEDENT> for option in self.options: <NEW_LINE> <INDENT> option.finalise() <NEW_LINE> <DEDENT> <DEDENT> def add(self, child): <NEW_LINE> <INDENT> assert isinstance(child, ParseItem) <NEW_LINE> self.options[-1].add(child) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> return self.options[-1].pop() <NEW_LINE> <DEDENT> def add_alternate(self): <NEW_LINE> <INDENT> self.options.append(Sequence()) <NEW_LINE> <DEDENT> def match(self, compare_items, fields=None, completions=None, trace=None, context=None): <NEW_LINE> <INDENT> tracer = CallTracer(trace, self, compare_items) <NEW_LINE> errors = set() <NEW_LINE> for option in self.options: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return option.match(compare_items, fields=fields, completions=completions, trace=trace, context=context) <NEW_LINE> <DEDENT> except MatchError as e: <NEW_LINE> <INDENT> errors.add(str(e)) <NEW_LINE> <DEDENT> <DEDENT> if self.optional: <NEW_LINE> <INDENT> return compare_items <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tracer.fail(compare_items) <NEW_LINE> raise MatchError(" and ".join(errors)) <NEW_LINE> <DEDENT> <DEDENT> def match_json(self, json_items, args=None, fields=None, trace=None, context=None): <NEW_LINE> <INDENT> tracer = CallTracer(trace, self, json_items) <NEW_LINE> errors = set() <NEW_LINE> for option in self.options: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return option.match_json(json_items, args=args, fields=fields, trace=trace, context=context) <NEW_LINE> <DEDENT> except MatchError as e: <NEW_LINE> <INDENT> errors.add(str(e)) <NEW_LINE> <DEDENT> <DEDENT> if self.optional: <NEW_LINE> <INDENT> return json_items <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tracer.fail(json_items) <NEW_LINE> raise MatchError(" and ".join(errors))
Matches any of a list of alternative Sequence items. Alternation instances can also be marked optional by setting the ``optional`` parameter to ``True`` in the constructor - this menas that if none of the options match, they'll return success without consuming any items instead of raising :class:`MatchError`. Note that matching is greedy with no back-tracking, so if an optional item matches the command line argument(s) will always be consumed even if this leads to a MatchError later in the string which wouldn't have occurred had the optional item chosen to match nothing instead.
62599070e1aae11d1e7cf460
class ControllerGraph(object): <NEW_LINE> <INDENT> def __init__(self, params, starts, goal, controller, state_dim=3): <NEW_LINE> <INDENT> self._params = params <NEW_LINE> self._controller = controller <NEW_LINE> self._starts = starts <NEW_LINE> self._goal = goal <NEW_LINE> self._g = StateGraph(state_dim=state_dim) <NEW_LINE> self._best_trajs = [] <NEW_LINE> self._node_id = 0 <NEW_LINE> self._max_conc = 1.0 <NEW_LINE> self._max_es = 1.0 <NEW_LINE> self._min_es = 0.0 <NEW_LINE> self._gp = gaussian_process.GaussianProcess(corr='squared_exponential', theta0=1e-2, thetaL=1e-4, thetaU=1e-1) <NEW_LINE> <DEDENT> def initialize_state_graph(self, R, terminal, samples=None): <NEW_LINE> <INDENT> if not isinstance(R, Callable): <NEW_LINE> <INDENT> raise TypeError('*R* must be a callable') <NEW_LINE> <DEDENT> if not isinstance(terminal, Callable): <NEW_LINE> <INDENT> raise TypeError('*terminal* must be a callable') <NEW_LINE> <DEDENT> self._g.clear() <NEW_LINE> self._fixed_init(R, terminal, samples) <NEW_LINE> <DEDENT> def build_graph(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def states(self): <NEW_LINE> <INDENT> return self._g.nodes <NEW_LINE> <DEDENT> def all_actions(self): <NEW_LINE> <INDENT> return self._g.all_edges <NEW_LINE> <DEDENT> def state_actions(self, state): <NEW_LINE> <INDENT> return self._g.out_edges(state) <NEW_LINE> <DEDENT> def _fixed_init(self, R, terminal, samples): <NEW_LINE> <INDENT> CMAX = 100 <NEW_LINE> RMAX = 1 <NEW_LINE> for start in self._starts: <NEW_LINE> <INDENT> self._g.add_node(nid=self._node_id, data=start, cost=0, priority=1, V=RMAX, pi=0, Q=[], ntype='start') <NEW_LINE> self._node_id += 1 <NEW_LINE> <DEDENT> self._g.add_node(nid=self._node_id, data=self._goal, cost=-CMAX, priority=1, V=RMAX, pi=0, Q=[], ntype='goal') <NEW_LINE> self._node_id += 1 <NEW_LINE> init_samples = list(samples) <NEW_LINE> for sample in init_samples: <NEW_LINE> <INDENT> self._g.add_node(nid=self._node_id, data=sample, cost=-CMAX, priority=1, V=RMAX, pi=0, Q=[], ntype='simple') <NEW_LINE> self._node_id += 1 <NEW_LINE> <DEDENT> for n in self._g.nodes: <NEW_LINE> <INDENT> for m in self._g.nodes: <NEW_LINE> <INDENT> if n == m or terminal(n): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ndata, mdata = self._g.gna(n, 'data'), self._g.gna(m, 'data') <NEW_LINE> traj = self._controller.trajectory(ndata, mdata) <NEW_LINE> d = trajectory_length(traj) <NEW_LINE> r, phi = 1, [0, 1, 1] <NEW_LINE> self._g.add_edge(source=n, target=m, reward=r, duration=d, phi=phi, traj=traj) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _traj_init(self, R, terminal, trajectories): <NEW_LINE> <INDENT> pass
A Controller Graph A graph based representation of continuous state MDPs by sampling a few states and connecting them using local controllers (which can be seen as Markov options). Generates a very sparse representation which is efficient and also allows for task constraints to be modeled directly into the MDP action space via these local controllers. Requires partially specified MDP with reward function, and a way to check set of terminal states. The transition function role is handles by the local controller.
62599070627d3e7fe0e0872f
class Block(Sprite): <NEW_LINE> <INDENT> def __init__(self, screen, settings, color, x, y): <NEW_LINE> <INDENT> super(Block, self).__init__() <NEW_LINE> self.screen = screen <NEW_LINE> self.settings = settings <NEW_LINE> self.image = pygame.Surface((20, 20)) <NEW_LINE> self.image.fill(color) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.rect.x = x <NEW_LINE> self.rect.y = y <NEW_LINE> <DEDENT> def blitme(self): <NEW_LINE> <INDENT> self.screen.blit(self.image, self.rect)
A class representing the snake.
625990703539df3088ecdb44
class BaseQuery(DotAccessDict): <NEW_LINE> <INDENT> pass
Object to represent an any RFQ.
62599070a8370b77170f1c71
class LibvirtAOEVolumeDriver(libvirt_volume.LibvirtBaseVolumeDriver): <NEW_LINE> <INDENT> def __init__(self, host): <NEW_LINE> <INDENT> super(LibvirtAOEVolumeDriver, self).__init__(host, is_block_dev=True) <NEW_LINE> self.connector = connector.InitiatorConnector.factory( 'AOE', utils.get_root_helper(), device_scan_attempts=CONF.libvirt.num_aoe_discover_tries) <NEW_LINE> <DEDENT> def get_config(self, connection_info, disk_info): <NEW_LINE> <INDENT> conf = super(LibvirtAOEVolumeDriver, self).get_config(connection_info, disk_info) <NEW_LINE> conf.source_type = "block" <NEW_LINE> conf.source_path = connection_info['data']['device_path'] <NEW_LINE> return conf <NEW_LINE> <DEDENT> def connect_volume(self, connection_info, disk_info, instance): <NEW_LINE> <INDENT> LOG.debug("Calling os-brick to attach AoE Volume") <NEW_LINE> device_info = self.connector.connect_volume(connection_info['data']) <NEW_LINE> LOG.debug("Attached AoE volume %s", device_info) <NEW_LINE> connection_info['data']['device_path'] = device_info['path'] <NEW_LINE> <DEDENT> def disconnect_volume(self, connection_info, disk_dev, instance): <NEW_LINE> <INDENT> LOG.debug("calling os-brick to detach AoE Volume %s", connection_info) <NEW_LINE> self.connector.disconnect_volume(connection_info['data'], None) <NEW_LINE> LOG.debug("Disconnected AoE Volume %s", disk_dev) <NEW_LINE> super(LibvirtAOEVolumeDriver, self).disconnect_volume(connection_info, disk_dev, instance)
Driver to attach AoE volumes to libvirt.
6259907097e22403b383c7ab
class UserDeleteEquipTest(APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> td_john = TestData.user['john'] <NEW_LINE> self.superuser = User.objects.create_superuser(**td_john) <NEW_LINE> self.client.login(**td_john) <NEW_LINE> td_Haulotte = TestData.manufacture['Haulotte'] <NEW_LINE> self.client.post(reverse('manufacture-list'), td_Haulotte) <NEW_LINE> td_star_10 = copy.deepcopy(TestData.model['star-10']) <NEW_LINE> self.client.post(reverse('model-list'), td_star_10) <NEW_LINE> td_equip = TestData.equip['ME 112104'] <NEW_LINE> self.client.post(reverse('equip-list'), td_equip) <NEW_LINE> self.client.logout() <NEW_LINE> <DEDENT> def test_delete_equip(self): <NEW_LINE> <INDENT> response = self.client.get(reverse('equip-list')) <NEW_LINE> equip = response.data[0] <NEW_LINE> response = self.client.delete( reverse('equip-detail', args=[equip['id']])) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
Non-admin user can not delete equip.
625990704f6381625f19a0fc
class TensorFlowRNNRegressor(TensorFlowEstimator, _sklearn.RegressorMixin): <NEW_LINE> <INDENT> def __init__(self, rnn_size, cell_type='gru', num_layers=1, input_op_fn=null_input_op_fn, initial_state=None, bidirectional=False, sequence_length=None, n_classes=0, batch_size=32, steps=50, optimizer='Adagrad', learning_rate=0.1, clip_gradients=5.0, continue_training=False, config=None, verbose=1): <NEW_LINE> <INDENT> self.rnn_size = rnn_size <NEW_LINE> self.cell_type = cell_type <NEW_LINE> self.input_op_fn = input_op_fn <NEW_LINE> self.bidirectional = bidirectional <NEW_LINE> self.num_layers = num_layers <NEW_LINE> self.sequence_length = sequence_length <NEW_LINE> self.initial_state = initial_state <NEW_LINE> super(TensorFlowRNNRegressor, self).__init__( model_fn=self._model_fn, n_classes=n_classes, batch_size=batch_size, steps=steps, optimizer=optimizer, learning_rate=learning_rate, clip_gradients=clip_gradients, continue_training=continue_training, config=config, verbose=verbose) <NEW_LINE> <DEDENT> def _model_fn(self, X, y): <NEW_LINE> <INDENT> return models.get_rnn_model(self.rnn_size, self.cell_type, self.num_layers, self.input_op_fn, self.bidirectional, models.linear_regression, self.sequence_length, self.initial_state)(X, y) <NEW_LINE> <DEDENT> @property <NEW_LINE> def bias_(self): <NEW_LINE> <INDENT> return self.get_tensor_value('linear_regression/bias:0') <NEW_LINE> <DEDENT> @property <NEW_LINE> def weights_(self): <NEW_LINE> <INDENT> return self.get_tensor_value('linear_regression/weights:0')
TensorFlow RNN Regressor model. Parameters: rnn_size: The size for rnn cell, e.g. size of your word embeddings. cell_type: The type of rnn cell, including rnn, gru, and lstm. num_layers: The number of layers of the rnn model. input_op_fn: Function that will transform the input tensor, such as creating word embeddings, byte list, etc. This takes an argument X for input and returns transformed X. bidirectional: boolean, Whether this is a bidirectional rnn. sequence_length: If sequence_length is provided, dynamic calculation is performed. This saves computational time when unrolling past max sequence length. initial_state: An initial state for the RNN. This must be a tensor of appropriate type and shape [batch_size x cell.state_size]. batch_size: Mini batch size. steps: Number of steps to run over data. optimizer: Optimizer name (or class), for example "SGD", "Adam", "Adagrad". learning_rate: If this is constant float value, no decay function is used. Instead, a customized decay function can be passed that accepts global_step as parameter and returns a Tensor. e.g. exponential decay function: def exp_decay(global_step): return tf.train.exponential_decay( learning_rate=0.1, global_step, decay_steps=2, decay_rate=0.001) continue_training: when continue_training is True, once initialized model will be continuely trained on every call of fit. config: RunConfig object that controls the configurations of the session, e.g. num_cores, gpu_memory_fraction, etc. verbose: Controls the verbosity, possible values: 0: the algorithm and debug information is muted. 1: trainer prints the progress. 2: log device placement is printed.
6259907099cbb53fe6832791
class LBaseHessianModel(object): <NEW_LINE> <INDENT> def __init__(self, maxsize, diag0=1.0): <NEW_LINE> <INDENT> self.maxsize = maxsize <NEW_LINE> self.diag0 = diag0 <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return len(self.history) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.history = [] <NEW_LINE> self.backup = None <NEW_LINE> <DEDENT> def feed(self, delta_x, delta_g): <NEW_LINE> <INDENT> if len(delta_x.shape) != 1: <NEW_LINE> <INDENT> raise TypeError('The argument delta_x must be a vector.') <NEW_LINE> <DEDENT> if len(delta_x) < self.maxsize: <NEW_LINE> <INDENT> raise TypeError('The history size is too large for the dimension of delta_x.') <NEW_LINE> <DEDENT> if delta_x.shape != delta_g.shape: <NEW_LINE> <INDENT> raise TypeError('The arguments delta_x and delta_g must have the same shape.') <NEW_LINE> <DEDENT> self.history.append(HessianRecord(delta_x, delta_g)) <NEW_LINE> if len(self.history) > self.maxsize: <NEW_LINE> <INDENT> self.backup = self.history.pop(0) <NEW_LINE> <DEDENT> <DEDENT> def _restore_backup(self): <NEW_LINE> <INDENT> if self.backup is not None: <NEW_LINE> <INDENT> self.history.insert(0, self.backup) <NEW_LINE> self.backup = None <NEW_LINE> <DEDENT> del self.history[-1] <NEW_LINE> <DEDENT> def dot_hessian(self, v): <NEW_LINE> <INDENT> raise NotImplementedError
Base class for all low-memory Hessian models The base functionality is the storage and management of previous steps and corresponding gradient changes.
62599070009cb60464d02ddf
class FFmpegAudio(AudioSource): <NEW_LINE> <INDENT> def __init__(self, source, *, executable='ffmpeg', args, **subprocess_kwargs): <NEW_LINE> <INDENT> self._process = self._stdout = None <NEW_LINE> args = [executable, *args] <NEW_LINE> kwargs = {'stdout': subprocess.PIPE} <NEW_LINE> kwargs.update(subprocess_kwargs) <NEW_LINE> self._process = self._spawn_process(args, **kwargs) <NEW_LINE> self._stdout = self._process.stdout <NEW_LINE> <DEDENT> def _spawn_process(self, args, **subprocess_kwargs): <NEW_LINE> <INDENT> process = None <NEW_LINE> try: <NEW_LINE> <INDENT> process = subprocess.Popen(args, **subprocess_kwargs) <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> executable = args.partition(' ')[0] if isinstance(args, str) else args[0] <NEW_LINE> raise ClientException(executable + ' was not found.') from None <NEW_LINE> <DEDENT> except subprocess.SubprocessError as exc: <NEW_LINE> <INDENT> raise ClientException('Popen failed: {0.__class__.__name__}: {0}'.format(exc)) from exc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return process <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> proc = self._process <NEW_LINE> if proc is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> log.info('Preparing to terminate ffmpeg process %s.', proc.pid) <NEW_LINE> try: <NEW_LINE> <INDENT> proc.kill() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> log.exception("Ignoring error attempting to kill ffmpeg process %s", proc.pid) <NEW_LINE> <DEDENT> if proc.poll() is None: <NEW_LINE> <INDENT> log.info('ffmpeg process %s has not terminated. Waiting to terminate...', proc.pid) <NEW_LINE> proc.communicate() <NEW_LINE> log.info('ffmpeg process %s should have terminated with a return code of %s.', proc.pid, proc.returncode) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.info('ffmpeg process %s successfully terminated with return code of %s.', proc.pid, proc.returncode) <NEW_LINE> <DEDENT> self._process = self._stdout = None
Represents an FFmpeg (or AVConv) based AudioSource. User created AudioSources using FFmpeg differently from how :class:`FFmpegPCMAudio` and :class:`FFmpegOpusAudio` work should subclass this. .. versionadded:: 1.3
6259907067a9b606de5476f7
class LoginForm(Form): <NEW_LINE> <INDENT> username = TextField('Username', [validators.Required('사용자명을 입력하세요.'), validators.Length( min=4, max=50, message='4자리 이상 50자리 이하로 입력하세요.')]) <NEW_LINE> password = PasswordField('New Password', [validators.Required('비밀번호를 입력하세요.'), validators.Length( min=4, max=50, message='4자리 이상 50자리 이하로 입력하세요.')]) <NEW_LINE> next_url = HiddenField('Next URL')
로그인 화면에서 사용자명과 비밀번호 입력값을 검증함
625990708a43f66fc4bf3a3d
class Bond(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, Bond, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, Bond, name) <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _pyUni10.new_Bond(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> def assign(self, *args): <NEW_LINE> <INDENT> return _pyUni10.Bond_assign(self, *args) <NEW_LINE> <DEDENT> def type(self): <NEW_LINE> <INDENT> return _pyUni10.Bond_type(self) <NEW_LINE> <DEDENT> def dim(self): <NEW_LINE> <INDENT> return _pyUni10.Bond_dim(self) <NEW_LINE> <DEDENT> def __eq__(self, *args): <NEW_LINE> <INDENT> return _pyUni10.Bond___eq__(self, *args) <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> return _pyUni10.Bond___copy__(self) <NEW_LINE> <DEDENT> def cp(self): <NEW_LINE> <INDENT> return _pyUni10.Bond_cp(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return _pyUni10.Bond___repr__(self) <NEW_LINE> <DEDENT> def change(self, *args): <NEW_LINE> <INDENT> return _pyUni10.Bond_change(self, *args) <NEW_LINE> <DEDENT> def combine(self, *args): <NEW_LINE> <INDENT> return _pyUni10.Bond_combine(self, *args) <NEW_LINE> <DEDENT> def degeneracy(self): <NEW_LINE> <INDENT> return _pyUni10.Bond_degeneracy(self) <NEW_LINE> <DEDENT> def Qlist(self): <NEW_LINE> <INDENT> return _pyUni10.Bond_Qlist(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _pyUni10.delete_Bond <NEW_LINE> __del__ = lambda self : None;
Proxy of C++ uni10::Bond class
62599070adb09d7d5dc0be14
class Paginator(object): <NEW_LINE> <INDENT> def __init__(self, *args, **params): <NEW_LINE> <INDENT> from disqusapi import InterfaceNotDefined <NEW_LINE> if len(args) == 2: <NEW_LINE> <INDENT> self.method = args[0] <NEW_LINE> self.endpoint = args[1] <NEW_LINE> <DEDENT> elif len(args) == 1: <NEW_LINE> <INDENT> self.method = None <NEW_LINE> self.endpoint = args[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InterfaceNotDefined <NEW_LINE> <DEDENT> self.params = params <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for result in self(): <NEW_LINE> <INDENT> yield result <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, limit=None): <NEW_LINE> <INDENT> params = self.params.copy() <NEW_LINE> num = 0 <NEW_LINE> more = True <NEW_LINE> while more and (not limit or num < limit): <NEW_LINE> <INDENT> if self.method: <NEW_LINE> <INDENT> results = self.method(self.endpoint, **params) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results = self.endpoint(**params) <NEW_LINE> <DEDENT> for result in results: <NEW_LINE> <INDENT> if limit and num >= limit: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> num += 1 <NEW_LINE> yield result <NEW_LINE> <DEDENT> if results.cursor: <NEW_LINE> <INDENT> more = results.cursor['hasNext'] <NEW_LINE> params['cursor'] = results.cursor['next'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> more = False
Paginate through all entries: >>> paginator = Paginator(api, 'trends.listThreads', forum='disqus') >>> for result in paginator: >>> print result Paginate only up to a number of entries: >>> for result in paginator(limit=500): >>> print result
62599070aad79263cf43005f
class LazyDict(collections.abc.Mapping): <NEW_LINE> <INDENT> class Entry(SlotStruct): <NEW_LINE> <INDENT> __slots__ = ( 'lam', 'val', 'isset') <NEW_LINE> <DEDENT> class LazyView(collections.abc.MutableMapping): <NEW_LINE> <INDENT> def __init__(self, dict): <NEW_LINE> <INDENT> self._entries = dict <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> entry = self._entries[key] <NEW_LINE> if entry.lam is None: <NEW_LINE> <INDENT> return lambda: entry.val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return entry.lam <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, lam): <NEW_LINE> <INDENT> if not callable(lam): <NEW_LINE> <INDENT> raise ValueError("Items inserted into LazyDict.LazyView must be callable.") <NEW_LINE> <DEDENT> self._entries[key] = LazyDict.Entry(lam, None, False) <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self._entries[key] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._entries) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._entries) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self._entries = dict() <NEW_LINE> self.lazy = LazyDict.LazyView(self._entries) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> entry = self._entries[key] <NEW_LINE> if not entry.isset: <NEW_LINE> <INDENT> ret = entry.lam() <NEW_LINE> if ret is not None: <NEW_LINE> <INDENT> entry.val = ret <NEW_LINE> <DEDENT> entry.isset = True <NEW_LINE> <DEDENT> return entry.val <NEW_LINE> <DEDENT> def __setitem__(self, key, val): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> entry = self._entries[key] <NEW_LINE> entry.isset = True <NEW_LINE> entry.val = val <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self._entries[key] = LazyDict.Entry(None, val, True) <NEW_LINE> <DEDENT> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self._entries[key] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._entries) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._entries)
A dictionary that stores values that will be later lazily evaluated.
625990701b99ca400229018a
class User(db.Model, CRUDMixin): <NEW_LINE> <INDENT> __tablename__ = 'user' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> username = db.Column(db.String(32), index=True) <NEW_LINE> email = db.Column(db.String(255), unique=True) <NEW_LINE> password = db.Column(db.String(255)) <NEW_LINE> created = db.Column(db.DateTime) <NEW_LINE> confirmed = db.Column(db.Boolean, default=False) <NEW_LINE> confirmed_at = db.Column(db.DateTime()) <NEW_LINE> active = db.Column(db.Boolean()) <NEW_LINE> roles = db.relationship('Role', enable_typechecks=False, secondary=roles_users, ) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<User(username="{}")>'.format(self.username) <NEW_LINE> <DEDENT> def verify_password(self, password): <NEW_LINE> <INDENT> return pwd_context.verify(password, self.password) <NEW_LINE> <DEDENT> def generate_auth_token(self, expiration=600): <NEW_LINE> <INDENT> s = Serializer(flask.current_app.config['SECRET_KEY'], expires_in=expiration) <NEW_LINE> return s.dumps({'id': self.id}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def verify_auth_token(token): <NEW_LINE> <INDENT> s = Serializer(flask.current_app.config['SECRET_KEY']) <NEW_LINE> try: <NEW_LINE> <INDENT> data = s.loads(token) <NEW_LINE> <DEDENT> except SignatureExpired: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> except BadSignature: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> user = User.query.get(data['id']) <NEW_LINE> return user <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def register(cls, username, password, email=None, confirmed=False, roles=None): <NEW_LINE> <INDENT> new_user = cls( username=username, email=email, password=pwd_context.encrypt(password), confirmed=confirmed, ) <NEW_LINE> db.session.add(new_user) <NEW_LINE> db.session.commit() <NEW_LINE> if confirmed: <NEW_LINE> <INDENT> new_user.confirm() <NEW_LINE> <DEDENT> if roles: <NEW_LINE> <INDENT> for role_name in roles: <NEW_LINE> <INDENT> new_user.add_role(role_name) <NEW_LINE> <DEDENT> <DEDENT> flask.current_app.logger.debug("Created user {0}".format(username)) <NEW_LINE> return new_user <NEW_LINE> <DEDENT> def confirm(self): <NEW_LINE> <INDENT> self.confirmed_at = datetime.datetime.now() <NEW_LINE> self.active = True <NEW_LINE> <DEDENT> def add_role(self, role_name): <NEW_LINE> <INDENT> from .role import Role <NEW_LINE> self.roles.append(Role.query.filter_by(name=role_name).first()) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_system_users(cls): <NEW_LINE> <INDENT> from .role import Role <NEW_LINE> roles = [ "admin", "guest", ] <NEW_LINE> for i in roles: <NEW_LINE> <INDENT> if not Role.query.filter_by(name=i).first(): <NEW_LINE> <INDENT> db.session.add(Role(name=i)) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> <DEDENT> cls.register( username='admin', password='qqq', confirmed=True, roles=["admin"], ) <NEW_LINE> cls.register( username='guest', password='guest', confirmed=True, roles=["guest"], )
Used for both users and applications. http://blog.miguelgrinberg.com/post/restful-authentication-with-flask
6259907056b00c62f0fb4178
class JSONEncodedList(MutableType, JSONEncodedDict): <NEW_LINE> <INDENT> def copy_value(self, value): <NEW_LINE> <INDENT> if value : return value[:] <NEW_LINE> else : return []
Adds mutability to list type JSONEncodedDict. Use scarsely
625990704527f215b58eb5f4
class Grouper(object): <NEW_LINE> <INDENT> def __init__(self, stream, max_size=1000): <NEW_LINE> <INDENT> self.max_size = max_size <NEW_LINE> self.stream = stream <NEW_LINE> self.dict = collections.OrderedDict() <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return len(self.dict) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for event in self.stream: <NEW_LINE> <INDENT> while self.size > self.max_size: <NEW_LINE> <INDENT> self.dict.popitem(last=False) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.dict[event['id']].append(event) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.dict[event['id']] = [event] <NEW_LINE> <DEDENT> if '.' not in event['type']: <NEW_LINE> <INDENT> yield self.dict.pop(event['id']) <NEW_LINE> <DEDENT> <DEDENT> raise StopIteration
Utility for grouping events and sub-events together. Events fed into a Grouper are joined by their common 'id'. Encountering the parent event type will trigger emitting a list of all events and sub events for that single id. This assumes that the parent event will be the last encountered. So for example, you might do something like: stream = blueox.client.decode_stream(stdin) for event_group in client.Grouper(stream): ... do some processing of the event group ...
62599070a8370b77170f1c72
class XEP_0280(BasePlugin): <NEW_LINE> <INDENT> name = 'xep_0280' <NEW_LINE> description = 'XEP-0280: Message Carbons' <NEW_LINE> dependencies = set(['xep_0030', 'xep_0297']) <NEW_LINE> stanza = stanza <NEW_LINE> def plugin_init(self): <NEW_LINE> <INDENT> self.xmpp.register_handler( Callback('Carbon Received', StanzaPath('message/carbon_received'), self._handle_carbon_received)) <NEW_LINE> self.xmpp.register_handler( Callback('Carbon Sent', StanzaPath('message/carbon_sent'), self._handle_carbon_sent)) <NEW_LINE> register_stanza_plugin(Message, stanza.ReceivedCarbon) <NEW_LINE> register_stanza_plugin(Message, stanza.SentCarbon) <NEW_LINE> register_stanza_plugin(Message, stanza.PrivateCarbon) <NEW_LINE> register_stanza_plugin(Iq, stanza.CarbonEnable) <NEW_LINE> register_stanza_plugin(Iq, stanza.CarbonDisable) <NEW_LINE> <DEDENT> def plugin_end(self): <NEW_LINE> <INDENT> self.xmpp.remove_handler('Carbon Received') <NEW_LINE> self.xmpp.remove_handler('Carbon Sent') <NEW_LINE> self.xmpp.plugin['xep_0030'].del_feature(feature='urn:xmpp:carbons:1') <NEW_LINE> <DEDENT> def session_bind(self, jid): <NEW_LINE> <INDENT> self.xmpp.plugin['xep_0030'].add_feature('urn:xmpp:carbons:1') <NEW_LINE> <DEDENT> def _handle_carbon_received(self, msg): <NEW_LINE> <INDENT> self.xmpp.event('carbon_received', msg) <NEW_LINE> <DEDENT> def _handle_carbon_sent(self, msg): <NEW_LINE> <INDENT> self.xmpp.event('carbon_sent', msg) <NEW_LINE> <DEDENT> def enable(self, ifrom=None, block=True, timeout=None, callback=None): <NEW_LINE> <INDENT> iq = self.xmpp.Iq() <NEW_LINE> iq['type'] = 'set' <NEW_LINE> iq['from'] = ifrom <NEW_LINE> iq.enable('carbon_enable') <NEW_LINE> return iq.send(block=block, timeout=timeout, callback=callback) <NEW_LINE> <DEDENT> def disable(self, ifrom=None, block=True, timeout=None, callback=None): <NEW_LINE> <INDENT> iq = self.xmpp.Iq() <NEW_LINE> iq['type'] = 'set' <NEW_LINE> iq['from'] = ifrom <NEW_LINE> iq.enable('carbon_disable') <NEW_LINE> return iq.send(block=block, timeout=timeout, callback=callback)
XEP-0280 Message Carbons
625990703539df3088ecdb46
class MultipleSecretStoreLookupFailed(BarbicanException): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> msg = u._("Plugin lookup property 'stores_lookup_suffix' is not " "defined in service configuration") <NEW_LINE> super(MultipleSecretStoreLookupFailed, self).__init__(msg)
Raised when a plugin lookup suffix is missing during config read.
625990702ae34c7f260ac993
class DeleteLayerVersionResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId")
DeleteLayerVersion返回参数结构体
62599070435de62698e9d6af
class Troubleshoot(object): <NEW_LINE> <INDENT> title = None <NEW_LINE> message = None <NEW_LINE> template = 'geosafe/analysis/troubleshoot/generic.html' <NEW_LINE> suggested_actions = []
Base class of troubleshoot. This class is reserved as barebones for suggestion generations.
62599070091ae356687064e0
class PrototypeForm(FlaskForm): <NEW_LINE> <INDENT> supported_functions = [('abc_def', 'Visible Command as Seen By User'), ('abc_xxx', 'Delete an Existing User'), ('abc_xxx', 'Add a New User'), ('abc_xxx', 'Modify an Existing User')] <NEW_LINE> work_function = SelectField(label='Select Function', choices=supported_functions, render_kw={"id": "js1", "class": "abc_def abc_xxx abc_xxx abc_xxx", "docs": docs['all']['work_function']}) <NEW_LINE> some_field = StringField(label='User Name', validators=[Optional()], render_kw={"class": "usr_sr usr_del usr_add usr_mod", "docs": docs['abc_def']['user_name']}) <NEW_LINE> def validate_on_submit(self, db_exec): <NEW_LINE> <INDENT> res = super().validate_on_submit() <NEW_LINE> if not res: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.work_function.data in 'abc_def abc_xxx': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self.work_function.data == 'abc_xxx': <NEW_LINE> <INDENT> if self.filename.data == '': <NEW_LINE> <INDENT> self.errors['field_name'] = [f'Relevant Error message: ....'] <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return False
Use this a a base for creating a new Form.
62599070a17c0f6771d5d7ff
class BingoSquare(BingoLabel): <NEW_LINE> <INDENT> style_name = 'square-label' <NEW_LINE> highlight_names = [None, 'bingo-highlight', 'bingo-bad-highlight'] <NEW_LINE> def __init__(self, name, card, offset, number): <NEW_LINE> <INDENT> super(BingoSquare, self).__init__(name, card, offset, number) <NEW_LINE> self.is_called = False <NEW_LINE> self.marker = common.NamedSprite( 'bingo-marker', (self.x, self.y), scale=self.get_scale()) <NEW_LINE> self.is_focused = False <NEW_LINE> self.focus_marker = common.NamedSprite( 'bingo-close-highlight', (self.x, self.y), scale=self.get_scale() ) <NEW_LINE> <DEDENT> def draw(self, surface): <NEW_LINE> <INDENT> if self.is_called: <NEW_LINE> <INDENT> self.marker.draw(surface) <NEW_LINE> <DEDENT> super(BingoSquare, self).draw(surface) <NEW_LINE> if self.is_active and self.is_focused: <NEW_LINE> <INDENT> self.focus_marker.draw(surface) <NEW_LINE> <DEDENT> <DEDENT> def handle_click(self): <NEW_LINE> <INDENT> if not self.is_active: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.is_called = not self.is_called <NEW_LINE> if self.is_called: <NEW_LINE> <INDENT> self.card.call_square(self.text) <NEW_LINE> B.processEvent((events.E_PLAYER_PICKED, self)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.card.reset_square(self.text) <NEW_LINE> B.processEvent((events.E_PLAYER_UNPICKED, self)) <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> super(BingoSquare, self).reset() <NEW_LINE> self.is_called = False <NEW_LINE> self.is_focused = False <NEW_LINE> <DEDENT> def set_number(self, number): <NEW_LINE> <INDENT> self.text = number <NEW_LINE> self.label.set_text(str(number))
A square on a bingo card
625990705166f23b2e244c7e
class MovingDotEnv(gym.Env): <NEW_LINE> <INDENT> metadata = {'render.modes': ['human']} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(gym.Env, self).__init__() <NEW_LINE> self.dot_size = [2, 2] <NEW_LINE> self.random_start = True <NEW_LINE> self.max_steps = 1000 <NEW_LINE> self.observation_space = spaces.Box(low=0, high=255, shape=(210, 160, 3)) <NEW_LINE> self.centre = np.array([80, 105]) <NEW_LINE> self.viewer = None <NEW_LINE> self.seed() <NEW_LINE> self.ale = ALE() <NEW_LINE> seed = None <NEW_LINE> self.np_random, _ = seeding.np_random(seed) <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> if self.random_start: <NEW_LINE> <INDENT> x = self.np_random.randint(low=0, high=160) <NEW_LINE> y = self.np_random.randint(low=0, high=210) <NEW_LINE> self.pos = [x, y] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pos = [0, 0] <NEW_LINE> <DEDENT> self.steps = 0 <NEW_LINE> ob = self._get_ob() <NEW_LINE> return ob <NEW_LINE> <DEDENT> def seed(self, seed=None): <NEW_LINE> <INDENT> self.np_random, seed = seeding.np_random(seed) <NEW_LINE> return [seed] <NEW_LINE> <DEDENT> def _get_ob(self): <NEW_LINE> <INDENT> ob = np.zeros((210, 160, 3), dtype=np.uint8) <NEW_LINE> x = self.pos[0] <NEW_LINE> y = self.pos[1] <NEW_LINE> w = self.dot_size[0] <NEW_LINE> h = self.dot_size[1] <NEW_LINE> ob[y - h:y + h, x - w:x + w, :] = 255 <NEW_LINE> return ob <NEW_LINE> <DEDENT> def get_action_meanings(self): <NEW_LINE> <INDENT> return ['NOOP', 'DOWN', 'RIGHT', 'UP', 'LEFT'] <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> prev_pos = self.pos[:] <NEW_LINE> self._update_pos(action) <NEW_LINE> ob = self._get_ob() <NEW_LINE> self.steps += 1 <NEW_LINE> if self.steps < self.max_steps: <NEW_LINE> <INDENT> episode_over = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> episode_over = True <NEW_LINE> <DEDENT> dist1 = np.linalg.norm(prev_pos - self.centre) <NEW_LINE> dist2 = np.linalg.norm(self.pos - self.centre) <NEW_LINE> if dist2 < dist1: <NEW_LINE> <INDENT> reward = 1 <NEW_LINE> <DEDENT> elif dist2 == dist1: <NEW_LINE> <INDENT> reward = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reward = -1 <NEW_LINE> <DEDENT> return ob, reward, episode_over, {} <NEW_LINE> <DEDENT> def _update_pos(self, action): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def render(self, mode='human', close=False): <NEW_LINE> <INDENT> if close: <NEW_LINE> <INDENT> if self.viewer is not None: <NEW_LINE> <INDENT> self.viewer.close() <NEW_LINE> self.viewer = None <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> from gym.envs.classic_control import rendering <NEW_LINE> assert mode == 'human', "MovingDot only supports human render mode" <NEW_LINE> img = self._get_ob() <NEW_LINE> if self.viewer is None: <NEW_LINE> <INDENT> self.viewer = rendering.SimpleImageViewer() <NEW_LINE> <DEDENT> self.viewer.imshow(img)
Base class for MovingDot game
6259907021bff66bcd724513
class SellarImplicitDis2(ImplicitComponent): <NEW_LINE> <INDENT> def __init__(self, units=None, scaling=None): <NEW_LINE> <INDENT> super(SellarImplicitDis2, self).__init__() <NEW_LINE> self.execution_count = 0 <NEW_LINE> self._units = units <NEW_LINE> self._scaling = scaling <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> if self._units: <NEW_LINE> <INDENT> units = 'inch' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> units = None <NEW_LINE> <DEDENT> if self._scaling is None: <NEW_LINE> <INDENT> ref = 1.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ref = .18 <NEW_LINE> <DEDENT> self.add_input('z', val=np.zeros(2), units=units) <NEW_LINE> self.add_input('y1', val=1.0, units=units) <NEW_LINE> self.add_output('y2', val=1.0, units=units, ref=ref) <NEW_LINE> <DEDENT> def apply_nonlinear(self, inputs, outputs, resids): <NEW_LINE> <INDENT> z1 = inputs['z'][0] <NEW_LINE> z2 = inputs['z'][1] <NEW_LINE> y1 = inputs['y1'] <NEW_LINE> y2 = outputs['y2'] <NEW_LINE> if y1.real < 0.0: <NEW_LINE> <INDENT> y1 *= -1 <NEW_LINE> <DEDENT> resids['y2'] = -(y1**.5 + z1 + z2 - y2) <NEW_LINE> <DEDENT> def linearize(self, inputs, outputs, J): <NEW_LINE> <INDENT> y1 = inputs['y1'] <NEW_LINE> if y1.real < 0.0: <NEW_LINE> <INDENT> y1 *= -1 <NEW_LINE> <DEDENT> J['y2', 'y1'] = -.5*y1**-.5 <NEW_LINE> J['y2', 'z'] = -np.array([[1.0, 1.0]]) <NEW_LINE> J['y2', 'y2'] = 1.0
Component containing Discipline 2 -- implicit version.
625990705fcc89381b266dac
class precipitation(object): <NEW_LINE> <INDENT> legal_units = [ "IN", "CM" ] <NEW_LINE> legal_gtlt = [ ">", "<" ] <NEW_LINE> def __init__( self, value, units=None, gtlt=None ): <NEW_LINE> <INDENT> if not units: <NEW_LINE> <INDENT> self._units = "IN" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not units.upper() in precipitation.legal_units: <NEW_LINE> <INDENT> raise UnitsError("unrecognized precipitation unit: '"+units+"'") <NEW_LINE> <DEDENT> self._units = units.upper() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if value.startswith('M'): <NEW_LINE> <INDENT> value = value[1:] <NEW_LINE> gtlt = "<" <NEW_LINE> <DEDENT> elif value.startswith('P'): <NEW_LINE> <INDENT> value = value[1:] <NEW_LINE> gtlt = ">" <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if gtlt and not gtlt in precipitation.legal_gtlt: <NEW_LINE> <INDENT> raise ValueError("unrecognized greater-than/less-than symbol: '"+gtlt+"'") <NEW_LINE> <DEDENT> self._gtlt = gtlt <NEW_LINE> self._value = float(value) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.string() <NEW_LINE> <DEDENT> def value( self, units=None ): <NEW_LINE> <INDENT> if not units: <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not units.upper() in precipitation.legal_units: <NEW_LINE> <INDENT> raise UnitsError("unrecognized precipitation unit: '"+units+"'") <NEW_LINE> <DEDENT> units = units.upper() <NEW_LINE> <DEDENT> if units == self._units: <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> if self._units == "CM": <NEW_LINE> <INDENT> i_value = self._value*2.54 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i_value = self._value <NEW_LINE> <DEDENT> if units == "CM": <NEW_LINE> <INDENT> return i_value*2.54 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return i_value <NEW_LINE> <DEDENT> <DEDENT> def string( self, units=None ): <NEW_LINE> <INDENT> if not units: <NEW_LINE> <INDENT> units = self._units <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not units.upper() in precipitation.legal_units: <NEW_LINE> <INDENT> raise UnitsError("unrecognized precipitation unit: '"+units+"'") <NEW_LINE> <DEDENT> units = units.upper() <NEW_LINE> <DEDENT> text = "%.2f" % self.value(units) <NEW_LINE> if units == "CM": <NEW_LINE> <INDENT> text += "cm" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text += "in" <NEW_LINE> <DEDENT> if self._gtlt == ">": <NEW_LINE> <INDENT> text = "greater than "+text <NEW_LINE> <DEDENT> elif self._gtlt == "<": <NEW_LINE> <INDENT> text = "less than "+text <NEW_LINE> <DEDENT> return text
A class representing a precipitation value.
625990703346ee7daa3382b4
class Generic(exceptions.Exception): <NEW_LINE> <INDENT> def __init__(self, err_msg=None): <NEW_LINE> <INDENT> self.err_msg = err_msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.err_msg <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__class__.__name__ + '(' + self.err_msg + ')'
Base class error handlers
625990707047854f46340c63
class Organisation(object): <NEW_LINE> <INDENT> def __init__(self, etree, dataset=None, schema=None): <NEW_LINE> <INDENT> self.etree = etree <NEW_LINE> self.dataset = dataset <NEW_LINE> self._schema = schema <NEW_LINE> self.version = self.schema.version <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> id_ = self.org_identifier <NEW_LINE> id_ = id_ if id_ else '[No identifier]' <NEW_LINE> return '<{} ({})>'.format(self.__class__.__name__, id_) <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> params = {'publisher': self.org_identifier} <NEW_LINE> url = 'http://d-portal.org/ctrack.html?{}#view=main'.format( urlencode(params)) <NEW_LINE> webbrowser.open_new_tab(url) <NEW_LINE> <DEDENT> @property <NEW_LINE> def schema(self): <NEW_LINE> <INDENT> return self._schema <NEW_LINE> <DEDENT> @property <NEW_LINE> def xml(self): <NEW_LINE> <INDENT> return bytes(ET.tostring(self.etree, pretty_print=True)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def org_identifier(self): <NEW_LINE> <INDENT> id_ = self.schema.org_identifier().run(self.etree) <NEW_LINE> if id_: <NEW_LINE> <INDENT> return id_[0].strip() <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def validate_iati(self): <NEW_LINE> <INDENT> etree = ET.Element('iati-organisations') <NEW_LINE> etree.set('version', self.version) <NEW_LINE> etree.append(self.etree) <NEW_LINE> xsd_schema = XSDSchema('organisation', self.version) <NEW_LINE> return xsd_schema.validate(etree) <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self.org_identifier
Class representing an IATI organisation.
625990704e4d562566373cb1
class EffectGiveNode(EffectCmdNode): <NEW_LINE> <INDENT> def __init__(self, selector, effect_id, duration=None, level=None, hide_particles=True): <NEW_LINE> <INDENT> assert_type(selector, SelectorNode) <NEW_LINE> assert_type(effect_id, Token) <NEW_LINE> assert_type(duration, Token, optional=True) <NEW_LINE> assert_type(level, Token, optional=True) <NEW_LINE> assert_type(hide_particles, bool) <NEW_LINE> self.selector = selector <NEW_LINE> self.effect_id = effect_id <NEW_LINE> self.duration = duration <NEW_LINE> self.level = level <NEW_LINE> self.hide_particles = hide_particles
Attributes: selector (SelectorNode) effect_id (Token) duration (Token or None) level (Token or None) hide_particles (bool)
625990704c3428357761bb5f
class TPUDistributionStrategy(object): <NEW_LINE> <INDENT> def __init__(self, tpu_cluster_resolver=None, using_single_core=False): <NEW_LINE> <INDENT> if tpu_cluster_resolver is None: <NEW_LINE> <INDENT> tpu_cluster_resolver = tpu_cluster_resolver_lib.TPUClusterResolver('') <NEW_LINE> <DEDENT> metadata = get_tpu_system_metadata(tpu_cluster_resolver) <NEW_LINE> self._tpu_metadata = metadata <NEW_LINE> self._tpu_cluster_resolver = tpu_cluster_resolver <NEW_LINE> self._num_cores = 1 if using_single_core else metadata.num_cores <NEW_LINE> worker_re = re.compile('/job:([^/]+)') <NEW_LINE> for device in metadata.devices: <NEW_LINE> <INDENT> if 'TPU:0' in device.name: <NEW_LINE> <INDENT> self._worker_name = worker_re.search(device.name).group(1) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> raise Exception('No TPU found on given worker.') <NEW_LINE> <DEDENT> def _make_assignment_for_model(self, cpu_model): <NEW_LINE> <INDENT> num_cores = self._num_cores <NEW_LINE> if num_cores > 1 and cpu_model.stateful: <NEW_LINE> <INDENT> logging.warning( 'Model replication does not currently support stateful models. ' 'Degrading to a single core.') <NEW_LINE> num_cores = 1 <NEW_LINE> <DEDENT> return TPUAssignment(worker_name=self._worker_name, num_cores=num_cores)
The strategy to run Keras model on TPU.
62599070283ffb24f3cf5154
class XOR: <NEW_LINE> <INDENT> def __init__(self, literals): <NEW_LINE> <INDENT> assert(len(literals) >= 2) <NEW_LINE> self.__literals = literals <NEW_LINE> self.__index = 2 <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.__literals) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> return self.__literals[idx] <NEW_LINE> <DEDENT> def __setitem__(self, idx, val): <NEW_LINE> <INDENT> self.__literals[idx] = val <NEW_LINE> return val <NEW_LINE> <DEDENT> def propagate(self, assignment, i): <NEW_LINE> <INDENT> assert(i < 2) <NEW_LINE> for j in chain(range(self.__index, len(self)), range(2, self.__index)): <NEW_LINE> <INDENT> if assignment.value(self[j]) is None: <NEW_LINE> <INDENT> self.__index = j + 1 if j + 1 < len(self) else 2 <NEW_LINE> self[i], self[j] = self[j], self[i] <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def reason_gje(self, columns, assignment, cutoff): <NEW_LINE> <INDENT> state = {} <NEW_LINE> parities = columns["parity"] <NEW_LINE> partial = [] <NEW_LINE> for key, value in columns.items(): <NEW_LINE> <INDENT> if key != "parity": <NEW_LINE> <INDENT> assign = assignment.value(key) <NEW_LINE> if assign == None: <NEW_LINE> <INDENT> state[key] = value <NEW_LINE> <DEDENT> elif assign == True: <NEW_LINE> <INDENT> parities = gje.xor_(value._List__list, parities) <NEW_LINE> partial.append( key) <NEW_LINE> <DEDENT> elif assign == False: <NEW_LINE> <INDENT> partial.append(-key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> xor_lits = [ lit for lit in state ] <NEW_LINE> m = [ col._List__list for col in state.values()] <NEW_LINE> m.append(parities) <NEW_LINE> matrix = np.array(m).T <NEW_LINE> conflict, clause = gje.check_sat_(matrix, xor_lits) <NEW_LINE> if conflict: <NEW_LINE> <INDENT> return conflict, partial, clause <NEW_LINE> <DEDENT> if len(matrix[0]) > 2 and len(parities)>1: <NEW_LINE> <INDENT> matrix = gje.remove_rows_zeros_(matrix) <NEW_LINE> matrix = gje.perform_gauss_jordan_elimination_(matrix, False) <NEW_LINE> conflict, clause = gje.check_sat_(matrix, xor_lits) <NEW_LINE> <DEDENT> return conflict, partial, clause
A XOR constraint maintains the following invariants: 1. there are at least two literals, and 2. the first two literals are unassigned, or all literals are assigned and the first two literals have been assigned last on the same decision level. Furthermore, an index pointing to the literal after the literal assigned last is maintained. We start the search for the next unassigned literal from this point. This is important to get the amortized linear propagation time.
6259907001c39578d7f1438a
class ImportVenuesRawForm(forms.Form): <NEW_LINE> <INDENT> venues_raw = forms.CharField(widget=forms.Textarea(attrs={'rows': 20})) <NEW_LINE> def clean_venues_raw(self): <NEW_LINE> <INDENT> lines = self.cleaned_data['venues_raw'].split('\n') <NEW_LINE> venues = [] <NEW_LINE> for i, line in enumerate(csv.reader(lines), start=1): <NEW_LINE> <INDENT> if len(line) < 1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> params = {} <NEW_LINE> params['name'] = line[0] <NEW_LINE> params['priority'] = line[1] if len(line) > 1 else '100' <NEW_LINE> params = {k: v.strip() for k, v in params.items()} <NEW_LINE> venues.append(params) <NEW_LINE> <DEDENT> if len(venues) == 0: <NEW_LINE> <INDENT> raise ValidationError(_("There were no rooms to import.")) <NEW_LINE> <DEDENT> max_allowed = MAX_FORM_DATA_FIELDS // (len(VenueDetailsForm.base_fields) + 1) <NEW_LINE> if len(venues) > max_allowed: <NEW_LINE> <INDENT> raise ValidationError(ngettext( "Sorry, you can only import up to %(max_allowed)d room at a " "time. (You currently have %(given)d.) " "Try splitting your import into smaller chunks.", "Sorry, you can only import up to %(max_allowed)d rooms at a " "time. (You currently have %(given)d.) " "Try splitting your import into smaller chunks.", max_allowed) % {'max_allowed': max_allowed, 'given': len(venues)}) <NEW_LINE> <DEDENT> return venues
Form that takes in a CSV-style list of venues, splits it and stores the split data.
6259907099cbb53fe6832795
class ProcessInfo: <NEW_LINE> <INDENT> def __init__(self, process_id=os.getpid(), program_name='python{}.{} {}'.format( sys.version_info.major, sys.version_info.minor, program_name()), arguments=copy.copy(sys.argv), start_time=process_start_time(), executing_user=None, rsb_version=rsb.version.get_version()): <NEW_LINE> <INDENT> self._id = process_id <NEW_LINE> self._program_name = program_name <NEW_LINE> self._arguments = arguments <NEW_LINE> self._start_time = start_time <NEW_LINE> self._executing_user = executing_user <NEW_LINE> if not self._executing_user: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._executing_user = getpass.getuser() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self._rsb_version = rsb_version <NEW_LINE> <DEDENT> @property <NEW_LINE> def process_id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @property <NEW_LINE> def program_name(self): <NEW_LINE> <INDENT> return self._program_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def arguments(self): <NEW_LINE> <INDENT> return self._arguments <NEW_LINE> <DEDENT> @property <NEW_LINE> def start_time(self): <NEW_LINE> <INDENT> return self._start_time <NEW_LINE> <DEDENT> @property <NEW_LINE> def executing_user(self): <NEW_LINE> <INDENT> return self._executing_user <NEW_LINE> <DEDENT> @property <NEW_LINE> def rsb_version(self): <NEW_LINE> <INDENT> return self._rsb_version <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<{} {} [{}] at 0x{:x}>'.format( type(self).__name__, self.program_name, self.process_id, id(self)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self)
Stores information about operating system processes. The stored information can describe the current process, a different process on the local machine or a remote process. .. codeauthor:: jmoringe
625990703317a56b869bf19a
class VelocityEstimator(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> from collections import deque <NEW_LINE> self.positions=deque([],2) <NEW_LINE> <DEDENT> def append(self, time, new_position): <NEW_LINE> <INDENT> from copy import deepcopy <NEW_LINE> self.positions.append((time, deepcopy(new_position))) <NEW_LINE> <DEDENT> def get_velocity_estimation(self): <NEW_LINE> <INDENT> last_time, last_position=self.positions[-1] <NEW_LINE> try: <NEW_LINE> <INDENT> first_time, first_position=self.positions[-2] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return last_position*0 <NEW_LINE> <DEDENT> if last_time-first_time>0: <NEW_LINE> <INDENT> return (last_position-first_position)*1.0/(last_time-first_time) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return last_position*0
Calculates a series of points based on the historic set of time,positions
625990704428ac0f6e659ddf
class LastCountryOfUserMapperTestCase(MapperTestMixin, TestCase): <NEW_LINE> <INDENT> user_id = 1 <NEW_LINE> timestamp = '2013-12-17T15:38:32.805444' <NEW_LINE> ip_address = FakeGeoLocation.ip_address_1 <NEW_LINE> course_id = 'DummyX/Course/ID' <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.task_class = LastCountryOfUser <NEW_LINE> super(LastCountryOfUserMapperTestCase, self).setUp() <NEW_LINE> <DEDENT> def test_mapper(self): <NEW_LINE> <INDENT> line = '\t'.join([self.timestamp, self.ip_address, str(self.user_id), self.course_id]) <NEW_LINE> self.assert_single_map_output(line, self.user_id, (self.timestamp, self.ip_address)) <NEW_LINE> <DEDENT> def test_requires_local(self): <NEW_LINE> <INDENT> tasks = self.task.requires_local() <NEW_LINE> self.assertEquals(len(tasks), 2) <NEW_LINE> self.assertEquals(tasks['geolocation_data'].url, 'test://data/data.file') <NEW_LINE> self.assertTrue(isinstance(tasks['user_addresses_task'], LastDailyIpAddressOfUserTask)) <NEW_LINE> <DEDENT> def test_requires_hadoop(self): <NEW_LINE> <INDENT> tasks = self.task.requires_hadoop() <NEW_LINE> self.assertEquals(len(tasks), 2) <NEW_LINE> self.assertTrue(isinstance(tasks['path_selection_task'], PathSelectionByDateIntervalTask)) <NEW_LINE> self.assertEquals(len(tasks['downstream_input_tasks']), 14)
Tests of LastCountryOfUser.mapper()
62599070dd821e528d6da5d7
class Result(Enum): <NEW_LINE> <INDENT> UNKNOWN = 0 <NEW_LINE> SUCCESS = 1 <NEW_LINE> IN_PROGRESS = 2 <NEW_LINE> BUSY = 3 <NEW_LINE> DENIED = 4 <NEW_LINE> ERROR = 5 <NEW_LINE> TIMEOUT = 6 <NEW_LINE> WRONG_ARGUMENT = 7 <NEW_LINE> NO_SYSTEM = 8 <NEW_LINE> def translate_to_rpc(self): <NEW_LINE> <INDENT> if self == CameraResult.Result.UNKNOWN: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_UNKNOWN <NEW_LINE> <DEDENT> if self == CameraResult.Result.SUCCESS: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_SUCCESS <NEW_LINE> <DEDENT> if self == CameraResult.Result.IN_PROGRESS: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_IN_PROGRESS <NEW_LINE> <DEDENT> if self == CameraResult.Result.BUSY: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_BUSY <NEW_LINE> <DEDENT> if self == CameraResult.Result.DENIED: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_DENIED <NEW_LINE> <DEDENT> if self == CameraResult.Result.ERROR: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_ERROR <NEW_LINE> <DEDENT> if self == CameraResult.Result.TIMEOUT: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_TIMEOUT <NEW_LINE> <DEDENT> if self == CameraResult.Result.WRONG_ARGUMENT: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_WRONG_ARGUMENT <NEW_LINE> <DEDENT> if self == CameraResult.Result.NO_SYSTEM: <NEW_LINE> <INDENT> return camera_pb2.CameraResult.RESULT_NO_SYSTEM <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def translate_from_rpc(rpc_enum_value): <NEW_LINE> <INDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_UNKNOWN: <NEW_LINE> <INDENT> return CameraResult.Result.UNKNOWN <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_SUCCESS: <NEW_LINE> <INDENT> return CameraResult.Result.SUCCESS <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_IN_PROGRESS: <NEW_LINE> <INDENT> return CameraResult.Result.IN_PROGRESS <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_BUSY: <NEW_LINE> <INDENT> return CameraResult.Result.BUSY <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_DENIED: <NEW_LINE> <INDENT> return CameraResult.Result.DENIED <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_ERROR: <NEW_LINE> <INDENT> return CameraResult.Result.ERROR <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_TIMEOUT: <NEW_LINE> <INDENT> return CameraResult.Result.TIMEOUT <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_WRONG_ARGUMENT: <NEW_LINE> <INDENT> return CameraResult.Result.WRONG_ARGUMENT <NEW_LINE> <DEDENT> if rpc_enum_value == camera_pb2.CameraResult.RESULT_NO_SYSTEM: <NEW_LINE> <INDENT> return CameraResult.Result.NO_SYSTEM <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Possible results returned for camera commands Values ------ UNKNOWN Unknown result SUCCESS Command executed successfully IN_PROGRESS Command in progress BUSY Camera is busy and rejected command DENIED Camera denied the command ERROR An error has occurred while executing the command TIMEOUT Command timed out WRONG_ARGUMENT Command has wrong argument(s) NO_SYSTEM No system connected
62599070435de62698e9d6b1
class HTTPHeaders(dict): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> dict.__init__(self) <NEW_LINE> for k in kwargs.keys(): <NEW_LINE> <INDENT> self.__setitem__(k, kwargs[k]) <NEW_LINE> <DEDENT> <DEDENT> def add(self, key, value): <NEW_LINE> <INDENT> if not isinstance(key, str): <NEW_LINE> <INDENT> raise KeyError("key must be a str") <NEW_LINE> <DEDENT> key = key.strip().lower() <NEW_LINE> if isinstance(value, tuple): <NEW_LINE> <INDENT> value = list(value) <NEW_LINE> <DEDENT> if self.has_key(key): <NEW_LINE> <INDENT> current_value = self.__getitem__(key) <NEW_LINE> if isinstance(current_value, tuple): <NEW_LINE> <INDENT> current_value = list(current_value) <NEW_LINE> <DEDENT> elif not isinstance(current_value, list): <NEW_LINE> <INDENT> current_value = [current_value] <NEW_LINE> <DEDENT> if not isinstance(value, list): <NEW_LINE> <INDENT> value = [value] <NEW_LINE> <DEDENT> current_value += value <NEW_LINE> dict.__setitem__(self, key, current_value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__setitem__(key, value) <NEW_LINE> <DEDENT> <DEDENT> def fload(self, fp): <NEW_LINE> <INDENT> line = [] <NEW_LINE> while not "".join(line) in ("\n", "\r\n"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> line.append(fp.read(1)) <NEW_LINE> <DEDENT> except socket.timeout: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if line and line[-1] == '\n': <NEW_LINE> <INDENT> if ':' in line: <NEW_LINE> <INDENT> k, v = "".join(line).split(':', 1) <NEW_LINE> v = v.strip() <NEW_LINE> for _type in (int, float): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> v = _type(v) <NEW_LINE> break <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.add(k.strip(), v) <NEW_LINE> line = [] <NEW_LINE> <DEDENT> elif "".join(line).rstrip("\r\n"): <NEW_LINE> <INDENT> line = list("".join(line).strip() + ' ') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get(self, key, default = None): <NEW_LINE> <INDENT> if not isinstance(key, str): <NEW_LINE> <INDENT> raise KeyError("key must be a str") <NEW_LINE> <DEDENT> key = key.strip() <NEW_LINE> if self.has_key(key): <NEW_LINE> <INDENT> return self.__getitem__(key) <NEW_LINE> <DEDENT> return default <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if not isinstance(key, str): <NEW_LINE> <INDENT> raise KeyError("key must be a str") <NEW_LINE> <DEDENT> return dict.__getitem__(self, key.strip().lower()) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if not isinstance(key, str): <NEW_LINE> <INDENT> raise KeyError("key must be a str") <NEW_LINE> <DEDENT> dict.__setitem__(self, key.strip().lower(), value) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> pairs = [] <NEW_LINE> for k, v in sorted(self.iteritems(), key = lambda e: e[0]): <NEW_LINE> <INDENT> k = k.capitalize() <NEW_LINE> if isinstance(v, list): <NEW_LINE> <INDENT> for _v in v: <NEW_LINE> <INDENT> pairs.append((k, _v)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pairs.append((k, v)) <NEW_LINE> <DEDENT> <DEDENT> return "\r\n".join([": ".join((k, str(v))) for k, v in pairs] + ["", ""])
a dictionary of strings mapped to values useful for loading MIME headers from a file-like object
62599070460517430c432cad
class NotAuthenticatedError(Error): <NEW_LINE> <INDENT> pass
Authentication failed.
625990705166f23b2e244c80
class EEADM_File_State(EEADM): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> tic = time.perf_counter() <NEW_LINE> args = ["eeadm", "file", "state", "-s", path] <NEW_LINE> proc = subprocess.run( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=10, check=True, universal_newlines=True, ) <NEW_LINE> toc = time.perf_counter() <NEW_LINE> logger.info(f"Duration: {toc - tic:0.4f} Called {args}") <NEW_LINE> results = list() <NEW_LINE> logger.debug(proc.stdout) <NEW_LINE> results.append(proc.stdout) <NEW_LINE> self.files = [] <NEW_LINE> for entry in results: <NEW_LINE> <INDENT> match = re.match( r"([PMR])\s+(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+\-\-\s+(\/.+)", entry ) <NEW_LINE> tapes = [] <NEW_LINE> for tape in [match[3], match[4], match[5]]: <NEW_LINE> <INDENT> if not tape == "-": <NEW_LINE> <INDENT> tapes.append(tape) <NEW_LINE> <DEDENT> <DEDENT> logger.debug(f"File Entry: {entry}") <NEW_LINE> self.files.append( LtfseeFile( state=match[1], replicas=match[2], tapes=tapes, path=match[6] ) ) <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.files)
Wrapper around eeadm file state -s <path>. path - Absolute path to files on filesystem Wild cards are accepted (TODO) returns list of LtfseeFile objects
625990705fcc89381b266dad