code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class InvalidTargetResource(Exception): <NEW_LINE> <INDENT> pass | This exception ca be raised, when a known resource target is invalid.
This exception will prevent a retry of a failed action. | 62599073be8e80087fbc0996 |
class EventLoop(object): <NEW_LINE> <INDENT> def alarm(self, seconds, callback): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def enter_idle(self, callback): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def remove_alarm(self, handle): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def remove_enter_idle(self, handle): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def remove_watch_file(self, handle): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def watch_file(self, fd, callback): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def set_signal_handler(self, signum, handler): <NEW_LINE> <INDENT> return signal.signal(signum, handler) | Abstract class representing an event loop to be used by :class:`MainLoop`. | 625990734f6381625f19a12c |
@register('salles') <NEW_LINE> class SalleLookup(LookupChannel): <NEW_LINE> <INDENT> model = Salle <NEW_LINE> def get_query(self, q, request): <NEW_LINE> <INDENT> return self.model.objects.filter(nom__icontains=q).order_by('nom')[:50] <NEW_LINE> <DEDENT> def format_item_display(self, item): <NEW_LINE> <INDENT> return u"<span class='tag'>%s</span>" % item.nom | It customize classroom research in a field form | 6259907397e22403b383c809 |
class COPY_QUEUE: <NEW_LINE> <INDENT> def __init__(self, src_dir, dst_dir, num_copy_thread=3): <NEW_LINE> <INDENT> self.src_dir = src_dir <NEW_LINE> self.dst_dir = dst_dir <NEW_LINE> self.file_Q = Queue.Queue() <NEW_LINE> self.copied_file_list = list() <NEW_LINE> self.num_copy_thread = num_copy_thread <NEW_LINE> self._copy_thread_list = list() <NEW_LINE> self._polling_thread = threading.Thread(target=self._copy_file_listener) <NEW_LINE> self._polling_thread.daemon = True <NEW_LINE> self._polling_thread.start() <NEW_LINE> <DEDENT> def add_file(self, file_name): <NEW_LINE> <INDENT> self.file_Q.put(file_name) <NEW_LINE> <DEDENT> def _copy_file_worker(self, file_name): <NEW_LINE> <INDENT> print("[copyQ] Copying <%s>." % file_name) <NEW_LINE> shutil.copy2( (self.src_dir + file_name), self.dst_dir) <NEW_LINE> print("[copyQ] Finishing copying <%s>." % file_name) <NEW_LINE> <DEDENT> def _remove_idle_threads(self): <NEW_LINE> <INDENT> _idx = 0 <NEW_LINE> while _idx < len(self._copy_thread_list): <NEW_LINE> <INDENT> if not self._copy_thread_list[_idx].isAlive(): <NEW_LINE> <INDENT> del self._copy_thread_list[_idx] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _idx += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _copy_file_listener(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> while not self.file_Q.empty(): <NEW_LINE> <INDENT> self._remove_idle_threads() <NEW_LINE> if len(self._copy_thread_list) >= self.num_copy_thread: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> a_file = self.file_Q.get() <NEW_LINE> print("[copyQ] Get <%s> from list." % a_file) <NEW_LINE> if not a_file in self.copied_file_list: <NEW_LINE> <INDENT> self.copied_file_list.append(a_file) <NEW_LINE> _t = threading.Thread(target=self._copy_file_worker, args=(a_file,) ) <NEW_LINE> self._copy_thread_list.append(_t) <NEW_LINE> _t.start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("[copyQ] Not to copy <%s>." % a_file) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> if len(self._copy_thread_list) > 0: <NEW_LINE> <INDENT> self._remove_idle_threads() <NEW_LINE> print("[CopyQ] Number of thread busying = %d" % len(self._copy_thread_list) ) <NEW_LINE> <DEDENT> time.sleep(0.2) | This is the class for handling the file copying. | 625990738e7ae83300eea997 |
class UpdateReplacePolicy(CloudFormationLintRule): <NEW_LINE> <INDENT> id = 'E3036' <NEW_LINE> shortdesc = 'Check UpdateReplacePolicy values for Resources' <NEW_LINE> description = 'Check that the UpdateReplacePolicy values are valid' <NEW_LINE> source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-updatereplacepolicy.html' <NEW_LINE> tags = ['resources', 'updatereplacepolicy'] <NEW_LINE> def check_value(self, key, path, res_type): <NEW_LINE> <INDENT> matches = [] <NEW_LINE> valid_values = [ 'Delete', 'Retain', 'Snapshot' ] <NEW_LINE> valid_snapshot_types = [ 'AWS::EC2::Volume', 'AWS::ElastiCache::CacheCluster', 'AWS::ElastiCache::ReplicationGroup', 'AWS::Neptune::DBCluster', 'AWS::RDS::DBCluster', 'AWS::RDS::DBInstance', 'AWS::Redshift::Cluster' ] <NEW_LINE> if not isinstance(key, (six.text_type, six.string_types)): <NEW_LINE> <INDENT> message = 'UpdateReplacePolicy values should be of string at {0}' <NEW_LINE> matches.append(RuleMatch(path, message.format('/'.join(map(str, path))))) <NEW_LINE> return matches <NEW_LINE> <DEDENT> if key not in valid_values: <NEW_LINE> <INDENT> message = 'UpdateReplacePolicy should be only one of {0} at {1}' <NEW_LINE> matches.append(RuleMatch( path, message.format(', '.join(map(str, valid_values)), '/'.join(map(str, path))))) <NEW_LINE> <DEDENT> if key == 'Snapshot' and res_type not in valid_snapshot_types: <NEW_LINE> <INDENT> message = 'UpdateReplacePolicy cannot be Snapshot for resources of type {0} at {1}' <NEW_LINE> matches.append(RuleMatch( path, message.format(res_type, '/'.join(map(str, path))))) <NEW_LINE> <DEDENT> return matches <NEW_LINE> <DEDENT> def match(self, cfn): <NEW_LINE> <INDENT> matches = [] <NEW_LINE> resources = cfn.get_resources() <NEW_LINE> for resource_name, resource_values in resources.items(): <NEW_LINE> <INDENT> updatereplace_policies = resource_values.get('UpdateReplacePolicy') <NEW_LINE> if updatereplace_policies: <NEW_LINE> <INDENT> path = ['Resources', resource_name, 'UpdateReplacePolicy'] <NEW_LINE> res_type = resource_values.get('Type') <NEW_LINE> self.logger.debug('Validating UpdateReplacePolicy for %s base configuration', resource_name) <NEW_LINE> if isinstance(updatereplace_policies, list): <NEW_LINE> <INDENT> message = 'Only one UpdateReplacePolicy allowed per resource at {0}' <NEW_LINE> matches.append(RuleMatch(path, message.format('/'.join(map(str, path))))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> matches.extend(self.check_value(updatereplace_policies, path, res_type)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return matches | Check Base Resource Configuration | 625990734428ac0f6e659e3a |
class FlaskTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> app = Flask(__name__) <NEW_LINE> app.config['DEBUG'] = True <NEW_LINE> app.config['TESTING'] = True <NEW_LINE> app.logger.disabled = True <NEW_LINE> self.app = app <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.app = None | Mix-in class for creating the Flask application | 625990735fc7496912d48eec |
class create: <NEW_LINE> <INDENT> def cube(objName): <NEW_LINE> <INDENT> bpy.ops.mesh.primitive_cube_add(radius=0.5, location=(0, 0, 0)) <NEW_LINE> act.rename(objName) <NEW_LINE> <DEDENT> def sphere(objName): <NEW_LINE> <INDENT> bpy.ops.mesh.primitive_uv_sphere_add(size=0.5, location=(0, 0, 0)) <NEW_LINE> act.rename(objName) <NEW_LINE> <DEDENT> def cone(objName): <NEW_LINE> <INDENT> bpy.ops.mesh.primitive_cone_add(radius1=0.5, location=(0, 0, 0)) <NEW_LINE> act.rename(objName) | Function Class for CREATING Objects | 62599073a17c0f6771d5d82e |
class ubuntu(Blueprint): <NEW_LINE> <INDENT> services = [Ubuntu] <NEW_LINE> packages = [PackageUbuntu, ubuntu_20_04_cloud] <NEW_LINE> substrates = [UbuntuVM] <NEW_LINE> profiles = [Default] <NEW_LINE> credentials = [BP_CRED_LINUX, BP_CRED_INFOBLOX] | Ubuntu server 20.04 basic installation | 625990737d847024c075dce0 |
class ImplementationError(PackageBaseException): <NEW_LINE> <INDENT> pass | A class to raise when is not properly implemented. | 625990731f5feb6acb1644f9 |
class CLIContext: <NEW_LINE> <INDENT> def __init__(self, app, no_color, workdir, quiet=False): <NEW_LINE> <INDENT> self.app = app or get_current_app() <NEW_LINE> self.no_color = no_color <NEW_LINE> self.quiet = quiet <NEW_LINE> self.workdir = workdir <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def OK(self): <NEW_LINE> <INDENT> return self.style("OK", fg="green", bold=True) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def ERROR(self): <NEW_LINE> <INDENT> return self.style("ERROR", fg="red", bold=True) <NEW_LINE> <DEDENT> def style(self, message=None, **kwargs): <NEW_LINE> <INDENT> if self.no_color: <NEW_LINE> <INDENT> return message <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return click.style(message, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def secho(self, message=None, **kwargs): <NEW_LINE> <INDENT> if self.no_color: <NEW_LINE> <INDENT> kwargs['color'] = False <NEW_LINE> click.echo(message, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> click.secho(message, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def echo(self, message=None, **kwargs): <NEW_LINE> <INDENT> if self.no_color: <NEW_LINE> <INDENT> kwargs['color'] = False <NEW_LINE> click.echo(message, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> click.echo(message, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def error(self, message=None, **kwargs): <NEW_LINE> <INDENT> kwargs['err'] = True <NEW_LINE> if self.no_color: <NEW_LINE> <INDENT> kwargs['color'] = False <NEW_LINE> click.echo(message, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> click.secho(message, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def pretty(self, n): <NEW_LINE> <INDENT> if isinstance(n, list): <NEW_LINE> <INDENT> return self.OK, self.pretty_list(n) <NEW_LINE> <DEDENT> if isinstance(n, dict): <NEW_LINE> <INDENT> if 'ok' in n or 'error' in n: <NEW_LINE> <INDENT> return self.pretty_dict_ok_error(n) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = json.dumps(n, sort_keys=True, indent=4) <NEW_LINE> if not self.no_color: <NEW_LINE> <INDENT> s = highlight(s, LEXER, FORMATTER) <NEW_LINE> <DEDENT> return self.OK, s <NEW_LINE> <DEDENT> <DEDENT> if isinstance(n, str): <NEW_LINE> <INDENT> return self.OK, n <NEW_LINE> <DEDENT> return self.OK, pformat(n) <NEW_LINE> <DEDENT> def pretty_list(self, n): <NEW_LINE> <INDENT> if not n: <NEW_LINE> <INDENT> return '- empty -' <NEW_LINE> <DEDENT> return '\n'.join( f'{self.style("*", fg="white")} {item}' for item in n ) <NEW_LINE> <DEDENT> def pretty_dict_ok_error(self, n): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.OK, text.indent(self.pretty(n['ok'])[1], 4)) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return (self.ERROR, text.indent(self.pretty(n['error'])[1], 4)) <NEW_LINE> <DEDENT> def say_chat(self, direction, title, body='', show_body=False): <NEW_LINE> <INDENT> if direction == '<-' and self.quiet: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> dirstr = not self.quiet and f'{self.style(direction, fg="white", bold=True)} ' or '' <NEW_LINE> self.echo(f'{dirstr} {title}') <NEW_LINE> if body and show_body: <NEW_LINE> <INDENT> self.echo(body) | Context Object for the CLI. | 6259907321bff66bcd72456e |
class StandardConditionsReportingPeriodAggregateModeledEnergyUse(BSElement): <NEW_LINE> <INDENT> element_type = "xs:decimal" | Applicable when the NormalizationMethod is Standard Conditions. As documented in Annex B4.5 of ASRHAE Guideline 14-2018: "In many cases, it is necessary to normalize the savings to a typical or average period (usually a year) at the site. It was shown in Section B4.3 that when measurement errors are negligible, the uncertainty in calculating actual savings using a weather-based regression is due to the error in normalizing the baseline energy use to the postretrofit period. Normalized savings requires two regression equations: one that correlates baseline energy use with baseline weather conditions and one that correlates postretrofit energy use with postretrofit weather conditions. This value represents the "normalized postretrofit energy use", or the predicted energy consumption using the Reporting (or postretrofit) model when data from a standard year (or typical year) is supplied to it. | 625990732c8b7c6e89bd50ee |
class EmptyText(base.BaseRule): <NEW_LINE> <INDENT> def elements(self): <NEW_LINE> <INDENT> return ["Text"] <NEW_LINE> <DEDENT> def check(self, element): <NEW_LINE> <INDENT> if element.text is not None and not element.text.strip(): <NEW_LINE> <INDENT> raise loggers.ElectionWarning.from_message("Text is empty", element) | Check that Text elements are not strictly whitespace. | 62599073a219f33f346c8111 |
class vardcentraler_csv(object): <NEW_LINE> <INDENT> def __init__(self, fname): <NEW_LINE> <INDENT> self.fname = fname <NEW_LINE> self.csvfh = open(fname, "rb") <NEW_LINE> self.heading = ['PARENT_WORKPLACE_NAME','PARENT_WORKPLACE_ADDRESS','PARENT_WORKPLACE_ZIP','PARENT_WORKPLACE_CITY','PARENT_WORKPLACE_PHONE'] <NEW_LINE> self.reader = csv.DictReader(self.csvfh, fieldnames=self.heading, delimiter=";", quotechar='"') <NEW_LINE> self.reader.next() <NEW_LINE> self.reader.next() <NEW_LINE> <DEDENT> def read (self): <NEW_LINE> <INDENT> for row_h in self.reader: <NEW_LINE> <INDENT> yield row_h | class: vardcentraler_csv | 6259907367a9b606de547727 |
class DevSettings(BaseConfig): <NEW_LINE> <INDENT> config = Config() <NEW_LINE> DEBUG = config("DEBUG", cast=bool, default=True) <NEW_LINE> DB_USER = config("DB_USER", cast=str, default="postgres") <NEW_LINE> DB_PASSWORD = config("DB_PASSWORD", cast=Secret, default="postgres") <NEW_LINE> DB_HOST = config("DB_HOST", cast=str, default="db") <NEW_LINE> DB_PORT = config("DB_PORT", cast=str, default="5432") <NEW_LINE> DB_NAME = config("DB_NAME", cast=str, default="postgres") <NEW_LINE> INCLUDE_SCHEMA = config("INCLUDE_SCHEMA", cast=bool, default=True) <NEW_LINE> DATABASE_URL = config( "DATABASE_URL", default=f"asyncpg://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}", ) | Configuration class for site development environment | 625990737d43ff2487428096 |
class MirrorChecker: <NEW_LINE> <INDENT> def __init__(self, path, base, mirror): <NEW_LINE> <INDENT> self.mirror = mirror <NEW_LINE> self.url = posixpath.join(mirror.url, str(path.relative_to(base / mirror.subdir))) <NEW_LINE> self.sha256 = None <NEW_LINE> self.status = None <NEW_LINE> self.error = None <NEW_LINE> self.history = None <NEW_LINE> <DEDENT> async def check(self, session): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> digest = hashlib.sha256() <NEW_LINE> async with session.get(self.url) as response: <NEW_LINE> <INDENT> self.history = response.history <NEW_LINE> self.status = response.status <NEW_LINE> try: <NEW_LINE> <INDENT> response.raise_for_status() <NEW_LINE> <DEDENT> except aiohttp.ClientResponseError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> chunk = await response.content.read(4096) <NEW_LINE> if not chunk: <NEW_LINE> <INDENT> self.sha256 = digest.hexdigest() <NEW_LINE> return <NEW_LINE> <DEDENT> digest.update(chunk) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> self.error = str(err) <NEW_LINE> <DEDENT> <DEDENT> def asdict(self): <NEW_LINE> <INDENT> return {'status': self.status, 'history': self.history, 'sha256': self.sha256, 'error': self.error} | Checker for single mirror | 62599073ad47b63b2c5a9155 |
class SkuSignupOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> NONE = "None" <NEW_LINE> AVAILABLE = "Available" | Sku can be signed up by customer or not.
| 62599073a8370b77170f1cd2 |
class Coordinator(): <NEW_LINE> <INDENT> def __init__(self, csv_file = None): <NEW_LINE> <INDENT> self.players = self._read_file_return_players(csv_file) <NEW_LINE> self.playersort = self._player_sort() <NEW_LINE> self.teams = self._create_team() <NEW_LINE> self.write_file = self._write_to_file() <NEW_LINE> <DEDENT> def _read_file_return_players(self,csv_file): <NEW_LINE> <INDENT> with open('soccer_players.csv', newline = '') as csvfile: <NEW_LINE> <INDENT> fieldnames = ['name', 'height','experience','gaurdians'] <NEW_LINE> reader = csv.DictReader(csvfile, fieldnames = fieldnames) <NEW_LINE> player_pool = [Player(**row) for row in reader][1:] <NEW_LINE> <DEDENT> return player_pool <NEW_LINE> <DEDENT> def _player_sort(self): <NEW_LINE> <INDENT> sorted_list = sorted(self.players, key = lambda player: player.experience) <NEW_LINE> List_no = sorted_list[:9] <NEW_LINE> List_yes = sorted_list[9:] <NEW_LINE> Sharks = List_no[:3] + List_yes[:3] <NEW_LINE> Raptors = List_no[3:6] + List_yes[3:6] <NEW_LINE> Dragons = List_no[6:9] + List_yes[6:9] <NEW_LINE> Sharksdict = dict([('name','Sharks'), ('players', Sharks)]) <NEW_LINE> Dragonsdict = dict([('name','Dragons'), ('players', Dragons)]) <NEW_LINE> Raptorsdict = dict([('name','Raptors'), ('players', Raptors)]) <NEW_LINE> Compl_Teams = Sharksdict, Dragonsdict, Raptorsdict <NEW_LINE> return Compl_Teams <NEW_LINE> <DEDENT> def _create_team(self): <NEW_LINE> <INDENT> Team_list = [ Team(**item) for item in self.playersort] <NEW_LINE> return Team_list <NEW_LINE> <DEDENT> def _write_to_file(self): <NEW_LINE> <INDENT> with open("team.txt", "a") as file: <NEW_LINE> <INDENT> for item in self.teams: <NEW_LINE> <INDENT> team_name = str(item) <NEW_LINE> file.write('\n''\n') <NEW_LINE> file.write(team_name) <NEW_LINE> for player in item.players: <NEW_LINE> <INDENT> playerstats = str(player) <NEW_LINE> file.write('\n') <NEW_LINE> file.write(playerstats) | docstring for Coordinator | 62599073e1aae11d1e7cf491 |
@unittest.skip("test not implemented yet") <NEW_LINE> class MessageValuesRetypeTests(SenderReceiverTestCase): <NEW_LINE> <INDENT> pass | retype message fields test group | 6259907332920d7e50bc794e |
class sublime_linter_lint(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def want_event(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_visible(self, event=None, **kwargs): <NEW_LINE> <INDENT> return ( util.is_lintable(self.view) and any( info["settings"].get("lint_mode") != "background" for info in elect.runnable_linters_for_view(self.view, "on_user_request") ) ) if event else True <NEW_LINE> <DEDENT> def run(self, edit, event=None): <NEW_LINE> <INDENT> assignable_linters = list( elect.assignable_linters_for_view(self.view, "on_user_request") ) <NEW_LINE> if not assignable_linters: <NEW_LINE> <INDENT> flash(self.view, "No linters available for this view") <NEW_LINE> return <NEW_LINE> <DEDENT> runnable_linters = [ info["name"] for info in elect.filter_runnable_linters(assignable_linters) ] <NEW_LINE> if not runnable_linters: <NEW_LINE> <INDENT> flash(self.view, "No runnable linters, probably save first") <NEW_LINE> return <NEW_LINE> <DEDENT> flash(self.view, "Running {}".format(", ".join(runnable_linters))) <NEW_LINE> hit(self.view, 'on_user_request') | A command that lints the current view if it has a linter. | 6259907301c39578d7f143b8 |
class ListAllEventsResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | A ResultSet with methods tailored to the values returned by the ListAllEvents Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution. | 62599073460517430c432cdb |
class Die(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=256) <NEW_LINE> instructions = models.TextField('Instructions', blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return ("%s" % (self.name)) | A model storing information for a die that has been imaged. | 625990732ae34c7f260ac9eb |
class StatusSummary(object): <NEW_LINE> <INDENT> def __init__(self, jobs): <NEW_LINE> <INDENT> assert type(jobs) == list <NEW_LINE> self._successful = 0 <NEW_LINE> self._pending = 0 <NEW_LINE> self._running = 0 <NEW_LINE> self._coalesced = 0 <NEW_LINE> self._failed = 0 <NEW_LINE> for job in jobs: <NEW_LINE> <INDENT> status = QUERY_SOURCE.get_job_status(job) <NEW_LINE> if status == PENDING: <NEW_LINE> <INDENT> self._pending += 1 <NEW_LINE> <DEDENT> if status in (RUNNING, UNKNOWN): <NEW_LINE> <INDENT> self._running += 1 <NEW_LINE> <DEDENT> if status == SUCCESS: <NEW_LINE> <INDENT> self._successful += 1 <NEW_LINE> <DEDENT> if status == COALESCED: <NEW_LINE> <INDENT> self._coalesced += 1 <NEW_LINE> <DEDENT> if status in (FAILURE, WARNING, EXCEPTION, RETRY): <NEW_LINE> <INDENT> self._failed += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def successful_jobs(self): <NEW_LINE> <INDENT> return self._successful <NEW_LINE> <DEDENT> @property <NEW_LINE> def pending_jobs(self): <NEW_LINE> <INDENT> return self._pending <NEW_LINE> <DEDENT> @property <NEW_LINE> def running_jobs(self): <NEW_LINE> <INDENT> return self._running <NEW_LINE> <DEDENT> @property <NEW_LINE> def coalesced_jobs(self): <NEW_LINE> <INDENT> return self._coalesced <NEW_LINE> <DEDENT> @property <NEW_LINE> def failed_jobs(self): <NEW_LINE> <INDENT> return self._failed <NEW_LINE> <DEDENT> @property <NEW_LINE> def potential_jobs(self): <NEW_LINE> <INDENT> return self._successful + self._pending + self._running + self._failed | class which represent the summary of status | 62599073283ffb24f3cf51b1 |
class IPrincipalExported(IPersonalProfile): <NEW_LINE> <INDENT> title = TextLine(title=_(u'Principal full name')) <NEW_LINE> firstname = TextLine(title=_(u'Principal first name')) <NEW_LINE> lastname = TextLine(title=_(u'Principal last name')) <NEW_LINE> email = TextLine(title=_(u'Principal email')) <NEW_LINE> location = TextLine(title=_(u'Location')) <NEW_LINE> department = TextLine(title=_(u'Department')) | member exported | 625990738e7ae83300eea998 |
class MonitoringClient(object): <NEW_LINE> <INDENT> def __init__(self, meter_id, url='https://smart-comp.honda-ri.de/app.php', database='monitoring'): <NEW_LINE> <INDENT> self.logger = configure_logging(meter_id) <NEW_LINE> self.API_ENDPOINT = 'api' <NEW_LINE> self.METER_FORMATSTRING = '("{}")' <NEW_LINE> self.meter_id = meter_id <NEW_LINE> self.api_url = url + param(self.API_ENDPOINT) <NEW_LINE> self.database = database <NEW_LINE> info = 'API Endpoint {}'.format(self.api_url) <NEW_LINE> self.logger.debug(green(info)) <NEW_LINE> session = requests.Session() <NEW_LINE> session.verify = False <NEW_LINE> session.trust_env = False <NEW_LINE> self._test_connection(session) <NEW_LINE> self.api = session <NEW_LINE> <DEDENT> def get_single_reading(self, method=Method.CURRENT_JSON, timemode=Timemode.LOCAL, when=Time.NOW): <NEW_LINE> <INDENT> url = build_url(self.api_url, self.meter_id, self.METER_FORMATSTRING, method, self.database, timemode, when=when ) <NEW_LINE> resp = self._request_blocking(url) <NEW_LINE> data = json.loads(resp.content)[self.meter_id] <NEW_LINE> reading, timestamp = data['value'], data['timeStamp']/1000 <NEW_LINE> info = 'Got {} kWh at t={} from Server'.format(reading, timestamp) <NEW_LINE> self.logger.info(green(info)) <NEW_LINE> return (reading, timestamp) <NEW_LINE> <DEDENT> def get_multiple_readings(self, start_time, end_time, timemode=Timemode.LOCAL, num_format=NumberFormat.DE, **kwargs): <NEW_LINE> <INDENT> url = build_url(self.api_url, self.meter_id, self.METER_FORMATSTRING, Method.URNS_CSV, self.database, timemode, start_time=start_time, end_time=end_time, num_format=num_format, **kwargs ) <NEW_LINE> resp = self._request_blocking(url) <NEW_LINE> return (resp.text) <NEW_LINE> <DEDENT> def _request_blocking(self, url): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resp = self.api.get(url) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> info = 'Fetching {}'.format(url) <NEW_LINE> self.logger.debug(green(info)) <NEW_LINE> return resp <NEW_LINE> <DEDENT> def _test_connection(self, session): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> session.get(self.api_url) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.logger.info("Cannot reach API!") <NEW_LINE> sys.exit(-1) | Monitoring server module:
https://smart-comp.honda-ri.de/app.php/monitor/monitoring/help/rest | 6259907371ff763f4b5e90b1 |
class MainView(generic.TemplateView): <NEW_LINE> <INDENT> template_name = 'ListJobs/main.html' | Loads the main page | 625990737b180e01f3e49ce8 |
class NetIpsecIkesa(NetIpsecIkesaSchema): <NEW_LINE> <INDENT> cli_command = "/mgmt/tm/net/ipsec/ike-sa" <NEW_LINE> def rest(self): <NEW_LINE> <INDENT> response = self.device.get(self.cli_command) <NEW_LINE> response_json = response.json() <NEW_LINE> if not response_json: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> return response_json | To F5 resource for /mgmt/tm/net/ipsec/ike-sa
| 625990737c178a314d78e86f |
class Game(ndb.Model): <NEW_LINE> <INDENT> game_over = ndb.BooleanProperty(required=True, default=False) <NEW_LINE> marks = ndb.StringProperty(required=True, default='000000000') <NEW_LINE> user = ndb.KeyProperty(required=True, kind='User') <NEW_LINE> cancelled = ndb.BooleanProperty(required=False) <NEW_LINE> history = ndb.PickleProperty(required=False, default=[('000000000', 'Start')]) <NEW_LINE> @classmethod <NEW_LINE> def new_game(cls, user): <NEW_LINE> <INDENT> game = Game(user=user, game_over=False) <NEW_LINE> game.put() <NEW_LINE> return game <NEW_LINE> <DEDENT> def to_form(self, message): <NEW_LINE> <INDENT> form = GameForm() <NEW_LINE> form.urlsafe_key = self.key.urlsafe() <NEW_LINE> form.user_name = self.user.get().name <NEW_LINE> form.game_over = self.game_over <NEW_LINE> form.cancelled = self.cancelled <NEW_LINE> form.message = message <NEW_LINE> return form <NEW_LINE> <DEDENT> def end_game(self, won=False): <NEW_LINE> <INDENT> if won is True: <NEW_LINE> <INDENT> self.history.append('You win!') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.history.append('You lose!') <NEW_LINE> <DEDENT> self.game_over = True <NEW_LINE> self.put() <NEW_LINE> score = Score(user=self.user, date=date.today(), won=won) <NEW_LINE> score.put() | Game object | 62599073b7558d5895464bb7 |
class Rank(object): <NEW_LINE> <INDENT> ace, two, three, four, five, six, seven, eight, nine, ten, jack, queen, king = 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 <NEW_LINE> ranks = [ace, two, three, four, five, six, seven, eight, nine, ten, jack, queen, king] <NEW_LINE> def __init__(self, rank): <NEW_LINE> <INDENT> assert(rank in self.ranks) <NEW_LINE> self.rank = rank <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.rank == other.rank <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.rank < other.rank <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.rank == 1: <NEW_LINE> <INDENT> return 'A' <NEW_LINE> <DEDENT> elif self.rank == 11: <NEW_LINE> <INDENT> return 'J' <NEW_LINE> <DEDENT> elif self.rank == 12: <NEW_LINE> <INDENT> return 'Q' <NEW_LINE> <DEDENT> elif self.rank == 13: <NEW_LINE> <INDENT> return 'K' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return str(self.rank) <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(str(self)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{}({})".format(self.__class__.__name__, self.rank) | A class representing the rank of a playing card | 625990734a966d76dd5f07f2 |
class TimberMachiningCut: <NEW_LINE> <INDENT> def __init__(self,obj): <NEW_LINE> <INDENT> obj.addProperty("App::PropertyLinkSub","Face","Timber","The face's plane to make the cut") <NEW_LINE> obj.addProperty("App::PropertyLink","Structure","Timber","The Timber Structure to cut") <NEW_LINE> obj.Proxy = self <NEW_LINE> <DEDENT> def execute(self, obj): <NEW_LINE> <INDENT> face = obj.Face <NEW_LINE> faceObject = face[0] <NEW_LINE> faceNumber = int(face[1][0][4:]) - 1 <NEW_LINE> face = faceObject.Shape.Faces[faceNumber] <NEW_LINE> structure = obj.Structure <NEW_LINE> cutVolume = ArchCommands.getCutVolume(face, structure.Shape) <NEW_LINE> machining = cutVolume[2].common(beam.Shape) <NEW_LINE> obj.Shape = machining | The Cut Timber Machning object | 6259907355399d3f05627e21 |
class ModifyAlarmPolicyTasksResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId") | ModifyAlarmPolicyTasks返回参数结构体
| 625990735fcc89381b266ddc |
@registerElement <NEW_LINE> class Timezones (WebDAVEmptyElement): <NEW_LINE> <INDENT> namespace = calendarserver_namespace <NEW_LINE> name = "timezones" | Denotes a timezone service resource.
(Apple Extension to CalDAV) | 62599073aad79263cf4300bf |
class TestDefaultApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = swagger_client.apis.default_api.DefaultApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_calls_get(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_features_charts_get(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_features_reports_get(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_features_widget_get(self): <NEW_LINE> <INDENT> pass | DefaultApi unit test stubs | 625990734c3428357761bbbe |
class EventCreateForm(forms.ModelForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Event <NEW_LINE> fields = ( 'name', 'description', ) | Форма для создания мероприятия | 62599073ec188e330fdfa1ad |
class CleanCommand(SubCommand): <NEW_LINE> <INDENT> name = "clean" <NEW_LINE> def run(self, args, argv): <NEW_LINE> <INDENT> Docker().clean() <NEW_LINE> return 0 | Clean up docker instances | 62599073796e427e53850082 |
class DatasetIterator(DistributedIteratorV1): <NEW_LINE> <INDENT> def __init__(self, dataset, input_workers, strategy, num_replicas_in_sync=None, input_context=None): <NEW_LINE> <INDENT> dist_dataset = DistributedDatasetV1( dataset, input_workers, strategy, num_replicas_in_sync=num_replicas_in_sync, input_context=input_context) <NEW_LINE> worker_iterators = _create_iterators_per_worker( dist_dataset._cloned_datasets, input_workers, True) <NEW_LINE> super(DatasetIterator, self).__init__(input_workers, worker_iterators, strategy, dist_dataset.cardinality, dist_dataset._enable_get_next_as_optional) <NEW_LINE> self._element_spec = dist_dataset.element_spec | Iterator created from input dataset. | 62599073cc0a2c111447c755 |
class MyMNIST(MNIST): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(MyMNIST, self).__init__(*args, **kwargs) <NEW_LINE> self.semi_targets = torch.zeros_like(self.targets) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> img, target, semi_target = self.data[index], int(self.targets[index]), int(self.semi_targets[index]) <NEW_LINE> img = Image.fromarray(img.numpy(), mode='L') <NEW_LINE> if self.transform is not None: <NEW_LINE> <INDENT> img = self.transform(img) <NEW_LINE> <DEDENT> if self.target_transform is not None: <NEW_LINE> <INDENT> target = self.target_transform(target) <NEW_LINE> <DEDENT> return img, target, semi_target, index | Torchvision MNIST class with additional targets for the semi-supervised setting and patch of __getitem__ method
to also return the semi-supervised target as well as the index of a data sample. | 6259907332920d7e50bc7950 |
class AdminTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.superuser = create_superuser() <NEW_LINE> self.client.login(username='admin', password='secret') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_inline_model_detail_view(self): <NEW_LINE> <INDENT> obj = MasterModel(title='test') <NEW_LINE> obj.save() <NEW_LINE> url = reverse('admin:test_app_mastermodel_change', args=(obj.id, )) <NEW_LINE> response = self.client.get(url, follow=True) <NEW_LINE> self.assertEqual(response.status_code, 200) | check some basic admin views
TODO: check with custom User Model! | 625990738a43f66fc4bf3a9e |
class Apply(Expr): <NEW_LINE> <INDENT> _arguments = '_child', 'func', '_asdshape', '_splittable' <NEW_LINE> def _schema(self): <NEW_LINE> <INDENT> if iscollection(self.dshape): <NEW_LINE> <INDENT> return self.dshape.subshape[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Non-tabular datashape, %s" % self.dshape) <NEW_LINE> <DEDENT> <DEDENT> def _dshape(self): <NEW_LINE> <INDENT> return self._asdshape | Apply an arbitrary Python function onto an expression
Examples
--------
>>> t = symbol('t', 'var * {name: string, amount: int}')
>>> h = t.apply(hash, dshape='int64') # Hash value of resultant dataset
You must provide the datashape of the result with the ``dshape=`` keyword.
For datashape examples see
http://datashape.pydata.org/grammar.html#some-simple-examples
If using a chunking backend and your operation may be safely split and
concatenated then add the ``splittable=True`` keyword argument
>>> t.apply(f, dshape='...', splittable=True) # doctest: +SKIP
See Also
--------
blaze.expr.expressions.Map | 6259907363b5f9789fe86a6d |
class ManagerTest: <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self._client_mock = ClientMock(self.RESOURCE) <NEW_LINE> self.manager = self.MANAGER(self._client_mock, 141) <NEW_LINE> <DEDENT> @property <NEW_LINE> def last_mock(self): <NEW_LINE> <INDENT> return self._client_mock._last_mock | Base class for ResourceManager subclasses tests; main purpose
is request automoking. | 62599073091ae35668706542 |
class JobException(WavesException): <NEW_LINE> <INDENT> def __init__(self, message, job=None): <NEW_LINE> <INDENT> if job: <NEW_LINE> <INDENT> message = '[job:%s][%s] - %s' % (job.slug, job.remote_job_id, message) <NEW_LINE> <DEDENT> super(JobException, self).__init__(message) | Base Exception class for all job related errors | 62599073b7558d5895464bb8 |
class And(BinaryOperator): <NEW_LINE> <INDENT> def __init__(self, left_expr: Expr, right_expr: Expr): <NEW_LINE> <INDENT> super(And, self).__init__("AND", left_expr, right_expr) <NEW_LINE> <DEDENT> def eval(self): <NEW_LINE> <INDENT> left_expr_eval = self.left_expr.eval() <NEW_LINE> if not left_expr_eval: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.right_expr.eval() <NEW_LINE> <DEDENT> <DEDENT> def negate(self): <NEW_LINE> <INDENT> return Or(self.left_expr.negate(), self.right_expr.negate()) <NEW_LINE> <DEDENT> def simplify(self): <NEW_LINE> <INDENT> return And(self.left_expr.simplify(), self.right_expr.simplify()) | Defines an "AND" operator in propositional logic | 62599073a17c0f6771d5d830 |
class AstavomsRESTError(Exception): <NEW_LINE> <INDENT> status_code = None <NEW_LINE> def __init__(self, message=None, status_code=None, payload=None): <NEW_LINE> <INDENT> Exception.__init__(self) <NEW_LINE> self.message = message or self.__doc__ <NEW_LINE> if status_code is not None: <NEW_LINE> <INDENT> self.status_code = status_code <NEW_LINE> <DEDENT> self.payload = payload <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> rv = dict(self.payload or ()) <NEW_LINE> rv['message'] = self.message <NEW_LINE> return rv | Template class for Astavoms errors | 625990731f5feb6acb1644fd |
class DeleteAttachmentView(DeleteView): <NEW_LINE> <INDENT> model = Attachment <NEW_LINE> def post(self, request, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> if not check_access(self.object.project, self.request.user): <NEW_LINE> <INDENT> raise PermissionDenied <NEW_LINE> <DEDENT> return super(DeleteAttachmentView, self).post(request, **kwargs) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> return reverse('projects:attachment-list', kwargs={ 'project_slug': self.object.project.slug, }) | Delete attachment if user has permissions.
| 625990737b25080760ed8969 |
class classproperty(property): <NEW_LINE> <INDENT> def __get__(self, cls, owner): <NEW_LINE> <INDENT> return classmethod(self.fget).__get__(None, owner)() | Class property decorator. | 625990737047854f46340cc2 |
class Detail(LoginRequiredMixin, base_views.BaseDetailView): <NEW_LINE> <INDENT> model = models.MachineInput <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(Detail, self).__init__() | Detail of a MachineInput | 625990733317a56b869bf1ca |
class GitHub(BaseModel): <NEW_LINE> <INDENT> resume = models.ForeignKey(Resume, on_delete=models.CASCADE) <NEW_LINE> user_id = models.IntegerField(help_text="User's Github id") <NEW_LINE> user_name = models.CharField(max_length=50, help_text="User's Github Username") <NEW_LINE> profile_name = models.CharField(max_length=50, blank=True, help_text="User's Full Name", null=True) <NEW_LINE> email = models.EmailField(help_text="User's Email", null=True) <NEW_LINE> profile_url = models.URLField(help_text="User's Github Url") <NEW_LINE> profile_image_url = models.URLField(help_text="User's profile image url") <NEW_LINE> gists_url = models.URLField(help_text="Github's API url for all user gists") <NEW_LINE> location = models.CharField(max_length=100, blank=True, help_text="User's Location", null=True) <NEW_LINE> blog_url = models.URLField(help_text="User's Website/Blog Url", blank=True, null=True) <NEW_LINE> company = models.CharField(max_length=50, help_text="Company the user is currently working in.", null=True) <NEW_LINE> followers = models.PositiveIntegerField() <NEW_LINE> following = models.PositiveIntegerField() <NEW_LINE> hireable = models.NullBooleanField(null=True) <NEW_LINE> public_repos = models.PositiveIntegerField(help_text="Number of Public repository") <NEW_LINE> total_private_repos = models.PositiveIntegerField(help_text="Total Number of Private repository", null=True) <NEW_LINE> owned_private_repos = models.PositiveIntegerField(help_text="Private repositories owned by User", null=True) <NEW_LINE> public_gists = models.PositiveIntegerField(help_text="Public gists owned by User", null=True) <NEW_LINE> private_gists = models.PositiveIntegerField(help_text="Private gists owned by User", null=True) <NEW_LINE> account_created_at = models.DateField(help_text="Date of User's Account Creation") <NEW_LINE> repo_updated_at = models.DateTimeField(help_text="Date when user updated a repository") <NEW_LINE> account_modified_at = models.DateTimeField(help_text="Date when user modified the account") <NEW_LINE> reputation_score = models.FloatField(default=0) <NEW_LINE> contribution_score = models.FloatField(default=0) <NEW_LINE> activity_score = models.FloatField(default=0) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "%s" % self.user_name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ('-created_date', ) | Model to store the github user details | 62599073ad47b63b2c5a9159 |
class U2Gate(Gate): <NEW_LINE> <INDENT> def __init__(self, phi, lam, qubit, circ=None): <NEW_LINE> <INDENT> super().__init__("u2", [phi, lam], [qubit], circ) <NEW_LINE> <DEDENT> def qasm(self): <NEW_LINE> <INDENT> qubit = self.arg[0] <NEW_LINE> phi = self.param[0] <NEW_LINE> lam = self.param[1] <NEW_LINE> return self._qasmif("u2(%s,%s) %s[%d];" % (phi, lam, qubit[0].name, qubit[1])) <NEW_LINE> <DEDENT> def inverse(self): <NEW_LINE> <INDENT> phi = self.param[0] <NEW_LINE> self.param[0] = -self.param[1] - pi <NEW_LINE> self.param[1] = -phi + pi <NEW_LINE> return self <NEW_LINE> <DEDENT> def reapply(self, circ): <NEW_LINE> <INDENT> self._modifiers(circ.u2(self.param[0], self.param[1], self.arg[0])) | One-pulse single-qubit gate. | 6259907399cbb53fe68327f5 |
class ClassReport(Metric): <NEW_LINE> <INDENT> def __init__(self,target_names = None): <NEW_LINE> <INDENT> super(ClassReport).__init__() <NEW_LINE> self.target_names = target_names <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.y_pred = 0 <NEW_LINE> self.y_true = 0 <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> score = classification_report(y_true = self.y_true, y_pred = self.y_pred, target_names=self.target_names) <NEW_LINE> print(f"\n\n classification report: {score}") <NEW_LINE> <DEDENT> def __call__(self,logits,target): <NEW_LINE> <INDENT> _, y_pred = torch.max(logits.data, 1) <NEW_LINE> self.y_pred = y_pred.cpu().numpy() <NEW_LINE> self.y_true = target.cpu().numpy() <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return "class_report" | class report | 62599073a8370b77170f1cd6 |
class Oclint(Package): <NEW_LINE> <INDENT> homepage = "http://oclint.org/" <NEW_LINE> url = "https://github.com/oclint/oclint/archive/v0.13.tar.gz" <NEW_LINE> version('0.13', '1d0e605eb7815ac15e6a2a82327d2dd8') <NEW_LINE> depends_on('python', type=('build')) <NEW_LINE> depends_on('py-argparse', type=('build')) <NEW_LINE> depends_on('git', type=('build')) <NEW_LINE> depends_on('subversion', type=('build')) <NEW_LINE> depends_on('cmake', type=('build')) <NEW_LINE> depends_on('ninja', type=('build')) <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> patch('bundle.patch', level=0) <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> cd('oclint-scripts') <NEW_LINE> build_script = Executable(join_path('.', 'build')) <NEW_LINE> bundle_script = Executable(join_path('.', 'bundle')) <NEW_LINE> build_script('-release', '-clean', '-llvm-root={0}'.format(spec['llvm'].prefix), '-use-system-compiler', '-no-analytics', 'all') <NEW_LINE> bundle_script('-release', '-llvm-root={0}'.format(spec['llvm'].prefix)) <NEW_LINE> cd(join_path('..', 'build')) <NEW_LINE> install_tree(join_path('oclint-release', 'include'), prefix.include) <NEW_LINE> install_tree(join_path('oclint-release', 'lib'), prefix.lib) <NEW_LINE> install_tree(join_path('oclint-release', 'bin'), prefix.bin) | OClint: a static analysis tool for C, C++, and Objective-C code
OCLint is a static code analysis tool for improving quality and
reducing defects by inspecting C, C++ and Objective-C code and
looking for potential problems | 62599073097d151d1a2c297d |
class UdsRestServer(RestServer): <NEW_LINE> <INDENT> def __init__(self, socket): <NEW_LINE> <INDENT> self.socket = socket <NEW_LINE> <DEDENT> def _setup_auth(self): <NEW_LINE> <INDENT> _LOGGER.info('Starting REST (noauth) server on %s', self.socket) <NEW_LINE> <DEDENT> def _setup_endpoint(self, http_server): <NEW_LINE> <INDENT> unix_socket = tornado.netutil.bind_unix_socket(self.socket) <NEW_LINE> http_server.add_socket(unix_socket) | UNIX domain socket based REST Server. | 6259907399fddb7c1ca63a59 |
class _SlideDataset(dataset_ops.Dataset): <NEW_LINE> <INDENT> def __init__(self, input_dataset, window_size, stride=1): <NEW_LINE> <INDENT> super(_SlideDataset, self).__init__() <NEW_LINE> self._input_dataset = input_dataset <NEW_LINE> self._window_size = ops.convert_to_tensor( window_size, dtype=dtypes.int64, name="window_size") <NEW_LINE> self._stride = ops.convert_to_tensor( stride, dtype=dtypes.int64, name="stride") <NEW_LINE> <DEDENT> def _as_variant_tensor(self): <NEW_LINE> <INDENT> return gen_dataset_ops.slide_dataset( self._input_dataset._as_variant_tensor(), window_size=self._window_size, stride=self._stride, output_shapes=nest.flatten( sparse.as_dense_shapes(self.output_shapes, self.output_classes)), output_types=nest.flatten( sparse.as_dense_types(self.output_types, self.output_classes))) <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_classes(self): <NEW_LINE> <INDENT> return self._input_dataset.output_classes <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_shapes(self): <NEW_LINE> <INDENT> input_shapes = self._input_dataset.output_shapes <NEW_LINE> return nest.pack_sequence_as(input_shapes, [ tensor_shape.vector(None).concatenate(s) for s in nest.flatten(self._input_dataset.output_shapes) ]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_types(self): <NEW_LINE> <INDENT> return self._input_dataset.output_types | A `Dataset` that passes a sliding window over its input. | 6259907316aa5153ce401de4 |
class ZoAuth2IntrospectionEndpoint(IntrospectionEndpoint): <NEW_LINE> <INDENT> def query_token(self, token, token_type_hint, client): <NEW_LINE> <INDENT> if token_type_hint == 'access_token': <NEW_LINE> <INDENT> tok = OAuth2Token.objects.filter(access_token=token).first() <NEW_LINE> <DEDENT> elif token_type_hint == 'refresh_token': <NEW_LINE> <INDENT> tok = OAuth2Token.objects.filter(refresh_token=token).first() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tok = OAuth2Token.objects.filter(access_token=token).first() <NEW_LINE> if not tok: <NEW_LINE> <INDENT> tok = OAuth2Token.objects.filter(refresh_token=token).first() <NEW_LINE> <DEDENT> <DEDENT> if tok: <NEW_LINE> <INDENT> return tok <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_active(token): <NEW_LINE> <INDENT> if now_timestamp() < token.get_expires_at() and not token.revoked: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def introspect_token(self, token): <NEW_LINE> <INDENT> active = True <NEW_LINE> return { 'active': active, 'client_id': token.client_id, 'token_type': token.token_type, 'username': token.user_id, 'scope': token.get_scope(), 'sub': 'placeholder', 'aud': token.client_id, 'iss': 'https://server.example.com/', 'exp': token.get_expires_at(), 'iat': token.issued_at, } | ZoAuthIntrospectionEndpoint. | 62599073dd821e528d6da607 |
class MarkdownCheatsheetCommand(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def run(self, edit): <NEW_LINE> <INDENT> lines = '\n'.join(load_resource('sample.md').splitlines()) <NEW_LINE> view = new_scratch_view(self.view.window(), lines) <NEW_LINE> view.set_name("Markdown Cheatsheet") <NEW_LINE> extended_syntax = sublime.find_resources('*Markdown Extended.tmLanguage') <NEW_LINE> for syntax in extended_syntax: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> view.set_syntax_file(syntax) <NEW_LINE> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if not view.settings().get('syntax').endswith('/Markdown Extended.tmLanguage'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> view.set_syntax_file("Packages/Markdown/Markdown.tmLanguage") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> sublime.status_message('Markdown cheat sheet opened') | open our markdown cheat sheet in ST2 | 62599073fff4ab517ebcf126 |
class MapGetter(object): <NEW_LINE> <INDENT> def __init__(self, mapping=None, default=_sentinel): <NEW_LINE> <INDENT> if mapping is None and default is _sentinel: <NEW_LINE> <INDENT> raise TypeError("MapGetter must be called with at least one of mapping or default (value/factory function)") <NEW_LINE> <DEDENT> self.builtins = __builtins__ if isinstance(__builtins__, dict) else __builtins__.__dict__ <NEW_LINE> self.mapping = mapping if mapping is not None else {} <NEW_LINE> self.default = default <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.original_import = self.builtins["__import__"] <NEW_LINE> self.builtins["__import__"] = self._map_getter <NEW_LINE> self._thread = threading.current_thread() <NEW_LINE> return self.mapping <NEW_LINE> <DEDENT> def _map_getter(self, name, globals_, locals_, from_list, level=-1): <NEW_LINE> <INDENT> if threading.current_thread() != self._thread or sys._getframe().f_back.f_locals.get(name, None) is not self.mapping: <NEW_LINE> <INDENT> return self.original_import(name, globals_, locals_, from_list, level) <NEW_LINE> <DEDENT> return _PseudoModule(self.mapping, self.default) <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> self.builtins["__import__"] = self.original_import <NEW_LINE> return False | A context manager to allow one to "import" variables from a mapping or
factory function. This helps preserve DRY principle:
# Example:
>>> a = dict(b=1, c=2)
>>> with MapGetter(a) as blah:
... from blah import b, c
>>> print((b, c))
(1, 2)
It is intesresting to note that it will work for ordinary attributes
from Python objects, and, as well, for constant names inside a Python
enum.Enum class. That may be the major use case of this:
In[1]: import enum
In [2]: from extradict import MapGetter
In 43]: class Colors(enum.Enum):
...: RED = 1, 0, 0
...: GREEN = 0, 1, 0
...: BLUE = 0, 0, 1
...:
In [4]: with MapGetter(Colors):
...: from Colors import RED, GREEN, BLUE
...:
In [5]: RED, GREEN, BLUE
Out[5]: (<Colors.RED: (1, 0, 0)>, <Colors.GREEN: (0, 1, 0)>, <Colors.BLUE: (0, 0, 1)>) | 625990734f6381625f19a12f |
class Meta: <NEW_LINE> <INDENT> model = Patient <NEW_LINE> fields = "__all__" <NEW_LINE> exclude = ("campaign",) <NEW_LINE> labels = { "phone_number": "Phone number", "email_address": "Email address", "social_security_number": "National I.D. Number", } <NEW_LINE> widgets = { "date_of_birth": DateInputOverride( attrs={ "placeholder": "dd/mm/yyyy", } ), "state": autocomplete.ModelSelect2(url="main:state-autocomplete"), } | Metaclass controlling model references. | 6259907376e4537e8c3f0e8a |
class Uniform(Initializer): <NEW_LINE> <INDENT> def __init__(self, low=0.0, high=1.0, name="uniformInit"): <NEW_LINE> <INDENT> super(Uniform, self).__init__(name=name) <NEW_LINE> self.low, self.high = (low, high) <NEW_LINE> <DEDENT> def fill(self, param): <NEW_LINE> <INDENT> param[:] = self.be.rng.uniform(self.low, self.high, param.shape) | A class for initializing parameter tensors with values drawn from
a uniform distribution.
Args:
low (Optional[float]): Lower bound of range from which we draw values.
high (Optional[float]): Upper bound of range from which we draw values. | 6259907392d797404e3897e1 |
class ChoicesMeta(enum.EnumMeta): <NEW_LINE> <INDENT> def __new__(metacls, classname, bases, classdict, **kwds): <NEW_LINE> <INDENT> labels = [] <NEW_LINE> for key in classdict._member_names: <NEW_LINE> <INDENT> value = classdict[key] <NEW_LINE> if ( isinstance(value, (list, tuple)) and len(value) > 1 and isinstance(value[-1], (Promise, str)) ): <NEW_LINE> <INDENT> *value, label = value <NEW_LINE> value = tuple(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label = key.replace('_', ' ').title() <NEW_LINE> <DEDENT> labels.append(label) <NEW_LINE> dict.__setitem__(classdict, key, value) <NEW_LINE> <DEDENT> cls = super().__new__(metacls, classname, bases, classdict, **kwds) <NEW_LINE> for member, label in zip(cls.__members__.values(), labels): <NEW_LINE> <INDENT> member._label_ = label <NEW_LINE> <DEDENT> return enum.unique(cls) <NEW_LINE> <DEDENT> def __contains__(cls, member): <NEW_LINE> <INDENT> if not isinstance(member, enum.Enum): <NEW_LINE> <INDENT> return any(x.value == member for x in cls) <NEW_LINE> <DEDENT> return super().__contains__(member) <NEW_LINE> <DEDENT> @property <NEW_LINE> def names(cls): <NEW_LINE> <INDENT> empty = ['__empty__'] if hasattr(cls, '__empty__') else [] <NEW_LINE> return empty + [member.name for member in cls] <NEW_LINE> <DEDENT> @property <NEW_LINE> def choices(cls): <NEW_LINE> <INDENT> empty = [(None, cls.__empty__)] if hasattr(cls, '__empty__') else [] <NEW_LINE> return empty + [(member.value, member.label) for member in cls] <NEW_LINE> <DEDENT> @property <NEW_LINE> def labels(cls): <NEW_LINE> <INDENT> return [label for _, label in cls.choices] <NEW_LINE> <DEDENT> @property <NEW_LINE> def values(cls): <NEW_LINE> <INDENT> return [value for value, _ in cls.choices] | A metaclass for creating a enum choices. | 6259907391f36d47f2231b14 |
class DarwinDay(Resource): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.args = parser.parse_args() <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> ndays = age_to_darwin( self.args['year'], self.args['month'], self.args['day']) <NEW_LINE> jdata = { 'name': self.args['name'], 'ndays': ndays, 'direction': "younger" if ndays > 0 else "older", } <NEW_LINE> return jdata | get function calls GET to | 625990733539df3088ecdba2 |
class helpSpagediManual(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.enabled = True <NEW_LINE> self.checked = False <NEW_LINE> <DEDENT> def onClick(self): <NEW_LINE> <INDENT> target_file = os.path.join(os.path.dirname(settings.spagedi_executable_path), 'Manual-SPAGeDi_1-4.pdf') <NEW_LINE> os.startfile(target_file) | Implementation for genegis_spagedi_manual.button (Button) | 625990737b180e01f3e49cea |
class MessageStatus: <NEW_LINE> <INDENT> PENDING = 0 <NEW_LINE> SUCCESS = 1 <NEW_LINE> FAIL = 2 | Message status used by client to keep track of their request.
PENDING: the client has not received an ACK message yet.
SUCCESS: the client receives an ACK message and the request completes.
FAIL: the request has failed. | 62599073379a373c97d9a92c |
class BasicTestQtController: <NEW_LINE> <INDENT> def __init__(self, model: ITestQtModel, view: BasicTestQtView): <NEW_LINE> <INDENT> self._model = model <NEW_LINE> self._view = view <NEW_LINE> self._view.show() <NEW_LINE> self._connectSignals() <NEW_LINE> <DEDENT> def _updateMessage(self): <NEW_LINE> <INDENT> self._view.displayMessage(self._model.message) <NEW_LINE> <DEDENT> def _connectSignals(self): <NEW_LINE> <INDENT> self._view.registerUpdateMessageRequest(self._updateMessage) | Controller for the MvcQt application. | 62599073091ae35668706544 |
class RobotsTxtPool(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._parsers = {} <NEW_LINE> <DEDENT> def has_parser(self, url_info): <NEW_LINE> <INDENT> key = self.url_info_key(url_info) <NEW_LINE> return key in self._parsers <NEW_LINE> <DEDENT> def can_fetch(self, url_info, user_agent): <NEW_LINE> <INDENT> key = self.url_info_key(url_info) <NEW_LINE> parser = self._parsers[key] <NEW_LINE> return parser.is_allowed(user_agent, url_info.url) <NEW_LINE> <DEDENT> def load_robots_txt(self, url_info, text): <NEW_LINE> <INDENT> key = self.url_info_key(url_info) <NEW_LINE> parser = robotexclusionrulesparser.RobotExclusionRulesParser() <NEW_LINE> parser.parse(text) <NEW_LINE> self._parsers[key] = parser <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def url_info_key(cls, url_info): <NEW_LINE> <INDENT> return (url_info.scheme, url_info.hostname, url_info.port) | Pool of robots.txt parsers. | 625990735fc7496912d48eef |
class HTTPUnavailableForLegalReasons(HTTPClientError): <NEW_LINE> <INDENT> status_code = 451 | HTTP/451 - Unavailable For Legal Reasons | 6259907323849d37ff8529c3 |
class Container(object): <NEW_LINE> <INDENT> def __init__(self, tree, title, filename): <NEW_LINE> <INDENT> self.tree = tree <NEW_LINE> self.title = title <NEW_LINE> self.filename = filename <NEW_LINE> self.dirname = os.path.dirname(os.path.abspath(filename)) <NEW_LINE> self.basename = os.path.basename(os.path.abspath(filename)) <NEW_LINE> self._pre = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def body(self): <NEW_LINE> <INDENT> elem = lxml.etree.Element('body') <NEW_LINE> if self._pre is not None: <NEW_LINE> <INDENT> elem.append(self._pre) <NEW_LINE> <DEDENT> elem.append(self.tree) <NEW_LINE> return elem <NEW_LINE> <DEDENT> @property <NEW_LINE> def full_tree(self): <NEW_LINE> <INDENT> return builder.HTML( builder.HEAD( builder.TITLE(self.title), builder.META(charset="utf-8")), self.body) <NEW_LINE> <DEDENT> @property <NEW_LINE> def html(self): <NEW_LINE> <INDENT> return lxml.html.tostring( self.full_tree, pretty_print=True, encoding='utf-8', method='html', doctype='<!DOCTYPE html>') <NEW_LINE> <DEDENT> @property <NEW_LINE> def xhtml_filename(self): <NEW_LINE> <INDENT> return self.filename.replace('.html', '.xhtml') <NEW_LINE> <DEDENT> def write(self): <NEW_LINE> <INDENT> with open(self.filename, 'w') as f: <NEW_LINE> <INDENT> f.write(self.html) | Generic xhtml container | 625990739c8ee82313040e0d |
class PromiseProxy(Proxy): <NEW_LINE> <INDENT> __slots__ = ('__pending__', ) <NEW_LINE> def _get_current_object(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return object.__getattribute__(self, '__thing') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return self.__evaluate__() <NEW_LINE> <DEDENT> <DEDENT> def __then__(self, fun, *args, **kwargs): <NEW_LINE> <INDENT> if self.__evaluated__(): <NEW_LINE> <INDENT> return fun(*args, **kwargs) <NEW_LINE> <DEDENT> from collections import deque <NEW_LINE> try: <NEW_LINE> <INDENT> pending = object.__getattribute__(self, '__pending__') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pending = None <NEW_LINE> <DEDENT> if pending is None: <NEW_LINE> <INDENT> pending = deque() <NEW_LINE> object.__setattr__(self, '__pending__', pending) <NEW_LINE> <DEDENT> pending.append((fun, args, kwargs)) <NEW_LINE> <DEDENT> def __evaluated__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> object.__getattribute__(self, '__thing') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __maybe_evaluate__(self): <NEW_LINE> <INDENT> return self._get_current_object() <NEW_LINE> <DEDENT> def __evaluate__(self, _clean=('_Proxy__local', '_Proxy__args', '_Proxy__kwargs')): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> thing = Proxy._get_current_object(self) <NEW_LINE> object.__setattr__(self, '__thing', thing) <NEW_LINE> return thing <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> for attr in _clean: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> object.__delattr__(self, attr) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> pending = object.__getattribute__(self, '__pending__') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while pending: <NEW_LINE> <INDENT> fun, args, kwargs = pending.popleft() <NEW_LINE> fun(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> object.__delattr__(self, '__pending__') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass | This is a proxy to an object that has not yet been evaulated.
:class:`Proxy` will evaluate the object each time, while the
promise will only evaluate it once. | 625990734e4d562566373d13 |
class GpuUsageTimeseries(Plugin): <NEW_LINE> <INDENT> name = property(lambda x: "gpu_usage") <NEW_LINE> mode = property(lambda x: "timeseries") <NEW_LINE> requiredMetrics = property(lambda x: ["nvidia.gpuactive"]) <NEW_LINE> optionalMetrics = property(lambda x: []) <NEW_LINE> derivedMetrics = property(lambda x: []) <NEW_LINE> def __init__(self, job): <NEW_LINE> <INDENT> super(GpuUsageTimeseries, self).__init__(job) <NEW_LINE> self._data = TimeseriesAccumulator(job.nodecount, self._job.walltime) <NEW_LINE> self._hostdata = {} <NEW_LINE> self._hostdevnames = {} <NEW_LINE> <DEDENT> def process(self, nodemeta, timestamp, data, description): <NEW_LINE> <INDENT> hostidx = nodemeta.nodeindex <NEW_LINE> if len(data[0]) == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if nodemeta.nodeindex not in self._hostdata: <NEW_LINE> <INDENT> self._hostdata[hostidx] = numpy.empty((TimeseriesAccumulator.MAX_DATAPOINTS, len(data[0]))) <NEW_LINE> self._hostdevnames[hostidx] = dict((str(k), str(v)) for k, v in zip(description[0][0], description[0][1])) <NEW_LINE> <DEDENT> avg_usage = numpy.mean(data[0]) <NEW_LINE> insertat = self._data.adddata(hostidx, timestamp, avg_usage) <NEW_LINE> if insertat != None: <NEW_LINE> <INDENT> self._hostdata[hostidx][insertat] = data[0] <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def results(self): <NEW_LINE> <INDENT> values = self._data.get() <NEW_LINE> if len(self._hostdata) > 64: <NEW_LINE> <INDENT> memdata = values[:, :, 1] <NEW_LINE> sortarr = numpy.argsort(memdata.T, axis=1) <NEW_LINE> retdata = { "min": self.collatedata(sortarr[:, 0], memdata), "max": self.collatedata(sortarr[:, -1], memdata), "med": self.collatedata(sortarr[:, sortarr.shape[1] / 2], memdata), "times": values[0, :, 0].tolist(), "hosts": {} } <NEW_LINE> uniqhosts = Counter(sortarr[:, 0]) <NEW_LINE> uniqhosts.update(sortarr[:, -1]) <NEW_LINE> uniqhosts.update(sortarr[:, sortarr.shape[1] / 2]) <NEW_LINE> includelist = uniqhosts.keys() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> retdata = { "times": values[0, :, 0].tolist(), "hosts": {} } <NEW_LINE> includelist = self._hostdata.keys() <NEW_LINE> <DEDENT> for hostidx in includelist: <NEW_LINE> <INDENT> retdata['hosts'][str(hostidx)] = {} <NEW_LINE> retdata['hosts'][str(hostidx)]['all'] = values[hostidx, :, 1].tolist() <NEW_LINE> retdata['hosts'][str(hostidx)]['dev'] = {} <NEW_LINE> for devid in self._hostdevnames[hostidx].iterkeys(): <NEW_LINE> <INDENT> dpnts = len(values[hostidx, :, 0]) <NEW_LINE> retdata['hosts'][str(hostidx)]['dev'][devid] = self._hostdata[hostidx][:dpnts, int(devid)].tolist() <NEW_LINE> <DEDENT> retdata['hosts'][str(hostidx)]['names'] = self._hostdevnames[hostidx] <NEW_LINE> <DEDENT> return retdata <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def collatedata(args, rates): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for timepoint, hostidx in enumerate(args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result.append([rates[hostidx, timepoint], int(hostidx)]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return result | Generate the CPU usage as a timeseries data | 62599073fff4ab517ebcf127 |
class ResCompany(models.Model): <NEW_LINE> <INDENT> _inherit = "res.company" <NEW_LINE> default_resource_calendar_id = fields.Many2one( string=u'Calendário Padrão', comodel_name=u'resource.calendar', help=u'Calendário que indica os feriados padrões da empresa.', ) | Override company to activate validate phones | 625990734f88993c371f11a7 |
class HKNBoard(backend.controller.Controller): <NEW_LINE> <INDENT> def each(self, lights): <NEW_LINE> <INDENT> self.set(lights); <NEW_LINE> <DEDENT> def all(self, color): <NEW_LINE> <INDENT> _ = [ color for i in range(5) ]; <NEW_LINE> self.set(_); | these are the LED boards I created to demo ACRIS at the HKN expo | 62599073ec188e330fdfa1b1 |
@swagger.model <NEW_LINE> class NeuronParameter(object): <NEW_LINE> <INDENT> resource_fields = { 'parameterName': fields.String, 'value': fields.Float, } <NEW_LINE> required = ['parameterName', 'value'] | NeuronParameter
Only used for swagger documentation | 6259907301c39578d7f143bb |
class ComputeInstanceTemplatesDeleteRequest(_messages.Message): <NEW_LINE> <INDENT> instanceTemplate = _messages.StringField(1, required=True) <NEW_LINE> project = _messages.StringField(2, required=True) | A ComputeInstanceTemplatesDeleteRequest object.
Fields:
instanceTemplate: The name of the instance template to delete.
project: Project ID for this request. | 625990732ae34c7f260ac9f1 |
class Normal2D(tf.keras.layers.Layer): <NEW_LINE> <INDENT> def __init__(self, stddev_x1=1.0, stddev_x2=1.0, correlation=0.99, noise_sigma=0.0, uniform_noise=True): <NEW_LINE> <INDENT> super(Normal2D, self).__init__() <NEW_LINE> cov = [[stddev_x1**2.0, correlation*stddev_x1*stddev_x2], [correlation*stddev_x1*stddev_x2, stddev_x2**2.0]] <NEW_LINE> self.cov = tf.convert_to_tensor(cov) <NEW_LINE> self.dist = tfp.distributions.MultivariateNormalTriL( loc=tf.zeros((2,)), scale_tril=tf.linalg.cholesky(self.cov)) <NEW_LINE> self.uniform_noise = uniform_noise <NEW_LINE> self.noise_sigma = noise_sigma <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> self.w = self.add_weight(shape=(2,), trainable=True) <NEW_LINE> <DEDENT> def call(self, inputs): <NEW_LINE> <INDENT> if self.uniform_noise: <NEW_LINE> <INDENT> w = _add_gradient_noise(self.w, self.noise_sigma) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> w = tf.cond(tf.reduce_sum(self.w) <= 0.0, lambda: _add_gradient_noise(self.w, self.noise_sigma), lambda: self.w) <NEW_LINE> <DEDENT> return -self.dist.log_prob(w) | 2D Normal model suitable for testing SG-MCMC procedures. | 6259907376e4537e8c3f0e8c |
class LongListFilterMixin(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def media(self): <NEW_LINE> <INDENT> cdn_base = 'https://ajax.googleapis.com/ajax/libs/' <NEW_LINE> show = getattr(self, 'long_list_filter_show', 'active') <NEW_LINE> threshold = getattr(self, 'long_list_filter_threshold', '300') <NEW_LINE> height = getattr(self, 'long_list_filter_height', '100') <NEW_LINE> media = super(LongListFilterMixin, self).media <NEW_LINE> media.add_js([ '{}jqueryui/1.11.4/jquery-ui.min.js'.format(cdn_base), 'js/ixxy_admin_utils/long_list_filter.js?show={}&threshold={}&height={}'.format( show, threshold, height, ), ]) <NEW_LINE> media.add_css({ 'all': [ '{}jqueryui/1.11.4/themes/smoothness/jquery-ui.css'.format(cdn_base) ] }) <NEW_LINE> return media | Automatically reduce the amount of space taken up by very long filters.
It hides the list of options and replaces it with an input field that autocompletes.
Unlike a true autocomplete this won't save queries or speed up page load
but it's a quick and dirty improvement to the UI | 62599073e5267d203ee6d044 |
class User(AbstractUser): <NEW_LINE> <INDENT> avatar = models.ImageField(null=True, blank=True) <NEW_LINE> gender = models.CharField( choices=GENDER_CHOICES, max_length=10, null=True, blank=True ) <NEW_LINE> bio = models.TextField(default="", blank=True) <NEW_LINE> birthdate = models.DateField(null=True) <NEW_LINE> langauge = models.CharField( choices=LANGUAGE_CHOICES, max_length=2, null=True, blank=True ) <NEW_LINE> currency = models.CharField( choices=CURRENCY_CHOICES, max_length=3, null=True, blank=True ) <NEW_LINE> superhost = models.BooleanField(default=False) | Custom User mode | 62599073d268445f2663a7e4 |
class Lemmatizer(IModifier): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> IModifier.__init__(self) <NEW_LINE> self.lemmatizer = nltk.stem.WordNetLemmatizer() <NEW_LINE> <DEDENT> def process(self, word, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert(type(word) is str) <NEW_LINE> return self.lemmatizer.lemmatize(word) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) | Wrapper on nltk.stem.WordNetLemmatizer for lemmatizing words | 62599073283ffb24f3cf51b7 |
class JuneFifth: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def threeSum(nums: List[int]) -> List[List[int]]: <NEW_LINE> <INDENT> res = [] <NEW_LINE> if not nums or len(nums) < 3: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> nums.sort() <NEW_LINE> for i in range(len(nums)): <NEW_LINE> <INDENT> if nums[i] > 0: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> if i > 0 and nums[i] == nums[i - 1]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> left, right = i + 1, len(nums) - 1 <NEW_LINE> while left < right: <NEW_LINE> <INDENT> if nums[i] + nums[left] + nums[right] == 0: <NEW_LINE> <INDENT> res.append([nums[i], nums[left], nums[right]]) <NEW_LINE> while left < right and nums[left] == nums[left + 1]: <NEW_LINE> <INDENT> left += 1 <NEW_LINE> <DEDENT> while left < right and nums[right] == nums[right - 1]: <NEW_LINE> <INDENT> right -= 1 <NEW_LINE> <DEDENT> left += 1 <NEW_LINE> right -= 1 <NEW_LINE> <DEDENT> elif nums[i] + nums[left] + nums[right] > 0: <NEW_LINE> <INDENT> right -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> left += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return res | 2020/06/12 15. 三数之和
给你一个包含 n 个整数的数组 nums,判断 nums 中是否存在三个元素 a,b,c ,使得 a + b + c = 0 ?
请你找出所有满足条件且不重复的三元组。
示例:
给定数组 nums = [-1, 0, 1, 2, -1, -4],
满足要求的三元组集合为:
[[-1, 0, 1],[-1, -1, 2]]
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/3sum
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 | 62599073442bda511e95d9de |
class AdminHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.render("admin.html") | 管理后台 | 625990731f5feb6acb164501 |
class OrderedSet(collections.MutableSet): <NEW_LINE> <INDENT> def __init__(self, iterable=None): <NEW_LINE> <INDENT> self.end = end = [] <NEW_LINE> end += [None, end, end] <NEW_LINE> self.map = {} <NEW_LINE> if iterable is not None: <NEW_LINE> <INDENT> self |= iterable <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.map) <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return key in self.map <NEW_LINE> <DEDENT> def add(self, key): <NEW_LINE> <INDENT> if key not in self.map: <NEW_LINE> <INDENT> end = self.end <NEW_LINE> curr = end[1] <NEW_LINE> curr[2] = end[1] = self.map[key] = [key, curr, end] <NEW_LINE> <DEDENT> <DEDENT> def promote(self, key): <NEW_LINE> <INDENT> if key in self.map: <NEW_LINE> <INDENT> self.discard(key) <NEW_LINE> <DEDENT> begin = self.end[2] <NEW_LINE> curr = begin[1] <NEW_LINE> curr[2] = begin[1] = self.map[key] = [key, curr, begin] <NEW_LINE> <DEDENT> def discard(self, key): <NEW_LINE> <INDENT> if key in self.map: <NEW_LINE> <INDENT> key, prev_item, next_item = self.map.pop(key) <NEW_LINE> prev_item[2] = next_item <NEW_LINE> next_item[1] = prev_item <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> end = self.end <NEW_LINE> curr = end[2] <NEW_LINE> while curr is not end: <NEW_LINE> <INDENT> yield curr[0] <NEW_LINE> curr = curr[2] <NEW_LINE> <DEDENT> <DEDENT> def __reversed__(self): <NEW_LINE> <INDENT> end = self.end <NEW_LINE> curr = end[1] <NEW_LINE> while curr is not end: <NEW_LINE> <INDENT> yield curr[0] <NEW_LINE> curr = curr[1] <NEW_LINE> <DEDENT> <DEDENT> def pop(self, last=True): <NEW_LINE> <INDENT> if not self: <NEW_LINE> <INDENT> raise KeyError('set is empty') <NEW_LINE> <DEDENT> key = self.end[1][0] if last else self.end[2][0] <NEW_LINE> self.discard(key) <NEW_LINE> return key <NEW_LINE> <DEDENT> def update(self, *args): <NEW_LINE> <INDENT> for item in chain(*args): <NEW_LINE> <INDENT> self.add(item) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if not self: <NEW_LINE> <INDENT> return '%s()' % (self.__class__.__name__,) <NEW_LINE> <DEDENT> return '%s(%r)' % (self.__class__.__name__, list(self)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, OrderedSet): <NEW_LINE> <INDENT> return len(self) == len(other) and list(self) == list(other) <NEW_LINE> <DEDENT> return set(self) == set(other) | Ordered set taken from http://code.activestate.com/recipes/576694/ | 62599073e76e3b2f99fda311 |
class Tree(GitObject): <NEW_LINE> <INDENT> typename = 'tree' <NEW_LINE> default_perm = '040000' <NEW_LINE> def __init__(self, repository, sha1): <NEW_LINE> <INDENT> self.sha1 = sha1 <NEW_LINE> self._repository = repository <NEW_LINE> self._data = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> if self._data is None: <NEW_LINE> <INDENT> self._data = TreeData.parse( self._repository, self._repository.run(['git', 'ls-tree', '-z', self.sha1]).output_lines( '\0' ), ) <NEW_LINE> <DEDENT> return self._data <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Tree<sha1: %s>' % self.sha1 | Represents a Git tree object.
The actual data content of the tree object is stored in :attr:`data` member, which
is a :class:`TreeData` instance. | 6259907356b00c62f0fb41de |
class CallContext(Context): <NEW_LINE> <INDENT> def __init__(self, parent: Context, session_details: SessionDetails, call_details: CallDetails, **kwargs) -> None: <NEW_LINE> <INDENT> super().__init__(parent, **kwargs) <NEW_LINE> self.session_id = session_details.session <NEW_LINE> self.progress = call_details.progress <NEW_LINE> self.caller_session_id = call_details.caller <NEW_LINE> self.caller_auth_id = call_details.caller_authid <NEW_LINE> self.caller_auth_role = call_details.caller_authrole <NEW_LINE> self.procedure = call_details.procedure <NEW_LINE> self.enc_algo = call_details.enc_algo | Context class for procedure calls.
Procedure call handlers are passed an instance of this class as the first argument.
:ivar int session_id: our own WAMP session ID
:ivar Optional[Callable] progress: a callable through which the handler can send
progress information to the caller
:ivar Optional[int] caller_session_id: WAMP session ID of the caller (if disclosed)
:ivar Optional[str] caller_auth_id: WAMP authentication ID (username) of the caller
(if disclosed)
:ivar Optional[str] caller_auth_role: WAMP authentication role of the caller (if disclosed)
:ivar Optional[str] procedure: the actual name of the procedure (when using a pattern based
registration)
:ivar Optional[str] enc_algo: payload encryption algorithm that was in use, if any
(e.g. `cryptobox`, or a custom algorithm) | 625990734e4d562566373d15 |
class GetRulesResponse(object): <NEW_LINE> <INDENT> openapi_types = { 'rules': 'list[Rule]' } <NEW_LINE> attribute_map = { 'rules': 'rules' } <NEW_LINE> def __init__(self, rules=None): <NEW_LINE> <INDENT> self._rules = None <NEW_LINE> self.discriminator = None <NEW_LINE> if rules is not None: <NEW_LINE> <INDENT> self.rules = rules <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def rules(self): <NEW_LINE> <INDENT> return self._rules <NEW_LINE> <DEDENT> @rules.setter <NEW_LINE> def rules(self, rules): <NEW_LINE> <INDENT> self._rules = rules <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, GetRulesResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 6259907344b2445a339b75e5 |
class LinkFinder(externals.atom.data.LinkFinder): <NEW_LINE> <INDENT> def find_html_link(self): <NEW_LINE> <INDENT> for link in self.link: <NEW_LINE> <INDENT> if link.rel == 'alternate' and link.type == 'text/html': <NEW_LINE> <INDENT> return link.href <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> FindHtmlLink = find_html_link <NEW_LINE> def get_html_link(self): <NEW_LINE> <INDENT> for a_link in self.link: <NEW_LINE> <INDENT> if a_link.rel == 'alternate' and a_link.type == 'text/html': <NEW_LINE> <INDENT> return a_link <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> GetHtmlLink = get_html_link <NEW_LINE> def find_post_link(self): <NEW_LINE> <INDENT> return self.find_url('http://schemas.google.com/g/2005#post') <NEW_LINE> <DEDENT> FindPostLink = find_post_link <NEW_LINE> def get_post_link(self): <NEW_LINE> <INDENT> return self.get_link('http://schemas.google.com/g/2005#post') <NEW_LINE> <DEDENT> GetPostLink = get_post_link <NEW_LINE> def find_acl_link(self): <NEW_LINE> <INDENT> acl_link = self.get_acl_link() <NEW_LINE> if acl_link: <NEW_LINE> <INDENT> return acl_link.href <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> FindAclLink = find_acl_link <NEW_LINE> def get_acl_link(self): <NEW_LINE> <INDENT> acl_link = self.get_link(ACL_REL) <NEW_LINE> if acl_link: <NEW_LINE> <INDENT> return acl_link <NEW_LINE> <DEDENT> elif hasattr(self, 'feed_link'): <NEW_LINE> <INDENT> for a_feed_link in self.feed_link: <NEW_LINE> <INDENT> if a_feed_link.rel == ACL_REL: <NEW_LINE> <INDENT> return a_feed_link <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> GetAclLink = get_acl_link <NEW_LINE> def find_feed_link(self): <NEW_LINE> <INDENT> return self.find_url('http://schemas.google.com/g/2005#feed') <NEW_LINE> <DEDENT> FindFeedLink = find_feed_link <NEW_LINE> def get_feed_link(self): <NEW_LINE> <INDENT> return self.get_link('http://schemas.google.com/g/2005#feed') <NEW_LINE> <DEDENT> GetFeedLink = get_feed_link <NEW_LINE> def find_previous_link(self): <NEW_LINE> <INDENT> return self.find_url('previous') <NEW_LINE> <DEDENT> FindPreviousLink = find_previous_link <NEW_LINE> def get_previous_link(self): <NEW_LINE> <INDENT> return self.get_link('previous') <NEW_LINE> <DEDENT> GetPreviousLink = get_previous_link | Mixin used in Feed and Entry classes to simplify link lookups by type.
Provides lookup methods for edit, edit-media, post, ACL and other special
links which are common across Google Data APIs. | 62599073fff4ab517ebcf129 |
class McAuthInfoException(Exception): <NEW_LINE> <INDENT> pass | Profile information exception. | 6259907367a9b606de54772b |
class ScannerThread(threading.Thread): <NEW_LINE> <INDENT> output_lock = threading.Lock() <NEW_LINE> def __init__(self, squeue, ports, scan_ports): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.squeue = squeue <NEW_LINE> self.ports = ports <NEW_LINE> self.scan_ports = scan_ports <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self.scan_ports() <NEW_LINE> self.squeue.task_done() | Multithreading class | 625990733317a56b869bf1cc |
class HTMLReporter(Reporter): <NEW_LINE> <INDENT> def __init__(self, stats, report_file=None, report_dir=".", template_file="html.mako", template_dir=None, **kwargs): <NEW_LINE> <INDENT> super(HTMLReporter, self).__init__( stats, report_file=report_file, report_dir=report_dir, template_file=template_file, template_dir=template_dir, **kwargs) <NEW_LINE> self._init_template(template_filters=['unicode', 'h']) | A SQLTap Reporter that generates HTML format reports | 62599073097d151d1a2c2981 |
class DataPackageUpdatedSensor(BaseSensorOperator): <NEW_LINE> <INDENT> ui_color = '#33ccff' <NEW_LINE> @apply_defaults <NEW_LINE> def __init__(self, path, dependencies, *args, **kwargs): <NEW_LINE> <INDENT> if not osp.exists(path): <NEW_LINE> <INDENT> raise FileNotFoundError('dataset not found: {}'.format(path)) <NEW_LINE> <DEDENT> for p in dependencies: <NEW_LINE> <INDENT> if not osp.exists(p): <NEW_LINE> <INDENT> raise FileNotFoundError('dataset not found: {}'.format(p)) <NEW_LINE> <DEDENT> <DEDENT> self.path = path <NEW_LINE> self.dependencies = dependencies <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def poke(self, context): <NEW_LINE> <INDENT> dp = json.load(open(osp.join(self.path, 'datapackage.json'))) <NEW_LINE> last_update = dp['last_updated'] <NEW_LINE> for p in self.dependencies: <NEW_LINE> <INDENT> dp_other = json.load(open(osp.join(p, 'datapackage.json'))) <NEW_LINE> last_update_other = dp_other['last_updated'] <NEW_LINE> if to_datetime(last_update_other) > to_datetime(last_update): <NEW_LINE> <INDENT> self.last_update = last_update <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> raise AirflowSkipException('no need to update') | Sensor Operation to detect dataset changes. | 625990738a43f66fc4bf3aa4 |
class Router: <NEW_LINE> <INDENT> def __init__(self, app=None): <NEW_LINE> <INDENT> if app is not None: <NEW_LINE> <INDENT> self.register_blueprint(app) <NEW_LINE> <DEDENT> <DEDENT> def register_blueprint(self, app): <NEW_LINE> <INDENT> from app.api import account <NEW_LINE> app.register_blueprint(account.api.blueprint) <NEW_LINE> from app.api.account import follow <NEW_LINE> app.register_blueprint(follow.api.blueprint) <NEW_LINE> from app.api import post <NEW_LINE> app.register_blueprint(post.api.blueprint) | 기능 별 blueprint 들을 모아서 register 해주는 class 구현.
:param app: A flask application | 625990731b99ca40022901bd |
class Usuário(TimeStampedModel, AbstractUser): <NEW_LINE> <INDENT> histórico = AuditlogHistoryField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['-id'] | Usuário base do projeto. | 625990732ae34c7f260ac9f3 |
class LabelSmoothing(nn.Module): <NEW_LINE> <INDENT> def __init__(self, size: int, padding_idx: int, smoothing=0.0): <NEW_LINE> <INDENT> super(LabelSmoothing, self).__init__() <NEW_LINE> self.loss_fn = nn.KLDivLoss(reduction='sum') <NEW_LINE> self.padding_idx = padding_idx <NEW_LINE> self.confidence = 1.0 - smoothing <NEW_LINE> self.smoothing = smoothing <NEW_LINE> self.size = size <NEW_LINE> self.true_dist = None <NEW_LINE> <DEDENT> def forward(self, x, target): <NEW_LINE> <INDENT> assert x.size(1) == self.size <NEW_LINE> true_dist = x.clone() <NEW_LINE> true_dist.fill_(self.smoothing / (self.size - 2)) <NEW_LINE> true_dist.scatter_(1, target.unsqueeze(1), self.confidence) <NEW_LINE> true_dist[:, self.padding_idx] = 0 <NEW_LINE> mask = torch.nonzero(target == self.padding_idx) <NEW_LINE> if mask.dim() > 0 and len(mask) > 0: <NEW_LINE> <INDENT> true_dist.index_fill_(0, mask.squeeze(), 0.0) <NEW_LINE> <DEDENT> self.true_dist = true_dist <NEW_LINE> return self.loss_fn(x, true_dist) | Implement label smoothing on KLDivLoss. | 625990731f037a2d8b9e54f2 |
class Images(object): <NEW_LINE> <INDENT> swagger_types = { 'links': 'list[Link]', 'list': 'list[Image]' } <NEW_LINE> attribute_map = { 'links': 'Links', 'list': 'List' } <NEW_LINE> def __init__(self, links=None, list=None): <NEW_LINE> <INDENT> self._links = None <NEW_LINE> self._list = None <NEW_LINE> if links is not None: <NEW_LINE> <INDENT> self.links = links <NEW_LINE> <DEDENT> if list is not None: <NEW_LINE> <INDENT> self.list = list <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def links(self): <NEW_LINE> <INDENT> return self._links <NEW_LINE> <DEDENT> @links.setter <NEW_LINE> def links(self, links): <NEW_LINE> <INDENT> self._links = links <NEW_LINE> <DEDENT> @property <NEW_LINE> def list(self): <NEW_LINE> <INDENT> return self._list <NEW_LINE> <DEDENT> @list.setter <NEW_LINE> def list(self, list): <NEW_LINE> <INDENT> self._list = list <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Images): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259907376e4537e8c3f0e8e |
class SSHDefaultScanPlugin(core.PluginBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> core.PluginBase.__init__(self) <NEW_LINE> self.id = "sshdefaultscan" <NEW_LINE> self.name = "sshdefaultscan" <NEW_LINE> self.plugin_version = "0.0.1" <NEW_LINE> self.version = "1.0.0" <NEW_LINE> self._command_regex = re.compile( r'^(python sshdefaultscan.py|\./sshdefaultscan.py).*?') <NEW_LINE> self._completition = {"--fast": "Fast scan mode"} <NEW_LINE> <DEDENT> def parseOutputString(self, output, debug=False): <NEW_LINE> <INDENT> for line in [l.strip() for l in output.split("\n")]: <NEW_LINE> <INDENT> output_rexeg_match = re.match( r".*:.*@\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", line) <NEW_LINE> if output_rexeg_match: <NEW_LINE> <INDENT> credentials, address = line.split("@") <NEW_LINE> host = self.createAndAddHost(address) <NEW_LINE> iface = self.createAndAddInterface( host, address, ipv4_address=address) <NEW_LINE> service = self.createAndAddServiceToInterface( host, iface, "ssh", protocol="tcp", ports=[22] ) <NEW_LINE> username, password = credentials.split(":") <NEW_LINE> cred = self.createAndAddCredToService( host, service, username, password) <NEW_LINE> vuln = self.createAndAddVulnToService( host, service, "Default credentials", desc="The SSH server have default credentials ({username}:{password})".format( username=username, password=password ), severity=3 ) <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def processCommandString(self, username, current_path, command_string): <NEW_LINE> <INDENT> if "--batch" not in command_string: <NEW_LINE> <INDENT> return "{command} --batch --batch-template {template}".format( command=command_string, template="{username}:{password}@{host}" ) <NEW_LINE> <DEDENT> return None | Handle sshdefaultscan (https://github.com/atarantini/sshdefaultscan) output
using --batch and --batch-template; supports --username and --password | 6259907366673b3332c31d0e |
class _ControlCodes(dict): <NEW_LINE> <INDENT> def key_for(self, obj): <NEW_LINE> <INDENT> for key, val in self.iteritems(): <NEW_LINE> <INDENT> if val is obj: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> <DEDENT> raise ValueError("The given object could not be found: %r" % obj) | Control codes used to "signal" a service via ControlService.
User-defined control codes are in the range 128-255. We generally use
the standard Python value for the Linux signal and add 128. Example:
>>> signal.SIGUSR1
10
control_codes['graceful'] = 128 + 10 | 625990734428ac0f6e659e44 |
@admin.register(models.Room) <NEW_LINE> class RoomAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> inlines = (PhotoInline,) <NEW_LINE> fieldsets = ( ( "Basic Info", {"fields": ("name", "description", "country", "city", "address", "price")}, ), ("Times", {"fields": ("check_in", "check_out", "instant_book")}), ("Spaces", {"fields": ("guests", "beds", "bedrooms", "baths")}), ( "More About the Spaces", { "classes": ("collapse",), "fields": ("amenities", "facilities", "house_rules"), }, ), ("Last Details", {"fields": ("host",)}), ) <NEW_LINE> list_display = ( "name", "country", "city", "price", "guests", "beds", "bedrooms", "baths", "check_in", "check_out", "instant_book", "count_amenities", "count_photos", "total_rating", ) <NEW_LINE> list_filter = ( "instant_book", "host__superhost", "room_type", "amenities", "facilities", "house_rules", "city", "country", ) <NEW_LINE> raw_id_fields = ("host",) <NEW_LINE> search_fields = ("=city", "^host__username") <NEW_LINE> filter_horizontal = ("amenities", "facilities", "house_rules") <NEW_LINE> def count_amenities(self, obj): <NEW_LINE> <INDENT> return obj.amenities.count() <NEW_LINE> <DEDENT> def count_photos(self, obj): <NEW_LINE> <INDENT> return obj.photos.count() <NEW_LINE> <DEDENT> count_photos.short_description = "Photo Count" | Room Model Definition | 625990737b25080760ed896c |
class AbstractFetcher(object): <NEW_LINE> <INDENT> def __init__(self, data=None): <NEW_LINE> <INDENT> self.id = str(uuid.uuid4()) <NEW_LINE> self.context = f'/tmp/rf-runner/{self.id}/' <NEW_LINE> if data: <NEW_LINE> <INDENT> self._load_meta(data) <NEW_LINE> <DEDENT> <DEDENT> def _load_meta(self, data): <NEW_LINE> <INDENT> raise NotOverriddenException <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.create_context() <NEW_LINE> return self <NEW_LINE> <DEDENT> def create_context(self): <NEW_LINE> <INDENT> os.makedirs(self.context) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.clean() <NEW_LINE> self.fetch() <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> files = os.listdir(self.context) <NEW_LINE> for f in files: <NEW_LINE> <INDENT> os.remove(os.path.join(self.context, f)) <NEW_LINE> <DEDENT> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> raise NotOverriddenException <NEW_LINE> <DEDENT> def get_context(self): <NEW_LINE> <INDENT> return self.context <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> self.remove() <NEW_LINE> <DEDENT> def remove(self): <NEW_LINE> <INDENT> shutil.rmtree(self.context) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def meta(): <NEW_LINE> <INDENT> raise NotOverriddenException | Fetcher should get testcases from sources for execution | 625990734428ac0f6e659e45 |
class CheckoutPage(Report): <NEW_LINE> <INDENT> __name__ = 'account.payment.stripe.checkout' | Stripe Checkout | 6259907326068e7796d4e24d |
class Config(object): <NEW_LINE> <INDENT> SQLALCHEMY_DATABASE_URI = os.getenv("SQLALCHEMY_DATABASE_URI") <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False <NEW_LINE> SECRET_KEY = os.getenv("SECRET_KEY") | Set environment variables. | 62599073cc0a2c111447c759 |
class hr_employee(osv.osv): <NEW_LINE> <INDENT> _name = 'hr.employee' <NEW_LINE> _inherit = 'hr.employee' <NEW_LINE> _columns = { 'employee_no': fields.char('Employee ID', size=IDLEN, readonly=True), 'f_employee_no': fields.char('Employee ID', size=IDLEN+2, readonly=True), 'tin_no': fields.char('TIN No', size=10), } <NEW_LINE> _sql_constraints = [ ('employeeno_uniq', 'unique(employee_no)', 'The Employee Number must be unique accross the company(s).'), ('tinno_uniq', 'unique(tin_no)', 'There is already another employee with this TIN number.'), ] <NEW_LINE> def _check_identification(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> obj = self.browse(cr, uid, ids[0], context=context) <NEW_LINE> if obj.identification_id or obj.tin_no: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def _generate_employeeno(self, cr, uid, arg): <NEW_LINE> <INDENT> eid = '' <NEW_LINE> tries = 0 <NEW_LINE> max_tries = 50 <NEW_LINE> while tries < max_tries: <NEW_LINE> <INDENT> rnd = random.SystemRandom() <NEW_LINE> digit1 = ''.join(rnd.choice(['1','2','3','4','5','6','7','8','9'])) <NEW_LINE> digits = ''.join(rnd.choice(string.digits) for _ in range(IDLEN - 1)) <NEW_LINE> eid = digit1 + digits <NEW_LINE> ids = self.search(cr, uid, [('employee_no', '=', eid)]) <NEW_LINE> if len(ids) == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> tries += 1 <NEW_LINE> <DEDENT> if tries == max_tries: <NEW_LINE> <INDENT> raise osv.except_osv(_('Error'), _('Unable to generate an Employee ID number that is unique.')) <NEW_LINE> <DEDENT> return eid <NEW_LINE> <DEDENT> def create(self, cr, uid, vals, context={}): <NEW_LINE> <INDENT> eid = self._generate_employeeno(cr, uid, context) <NEW_LINE> vals['employee_no'] = eid <NEW_LINE> vals['f_employee_no'] = '%s-%s-%s' % (eid[:2], eid[2:4], eid[4:]) <NEW_LINE> return super(hr_employee, self).create(cr, uid, vals, context) | Implement company wide unique identification number. | 62599073d268445f2663a7e6 |
class chebyshevu_root(Function): <NEW_LINE> <INDENT> nargs = 2 <NEW_LINE> @classmethod <NEW_LINE> @deprecated <NEW_LINE> def canonize(cls, n, k): <NEW_LINE> <INDENT> return cls.eval(m, k) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def eval(cls, n, k): <NEW_LINE> <INDENT> if not 0 <= k < n: <NEW_LINE> <INDENT> raise ValueError("must have 0 <= k < n") <NEW_LINE> <DEDENT> return C.cos(S.Pi*(k+1)/(n+1)) | chebyshevu_root(n, k) returns the kth root (indexed from zero) of the
nth Chebyshev polynomial of the second kind; that is, if 0 <= k < n,
chebyshevu(n, chebyshevu_root(n, k)) == 0.
Examples
========
>>> chebyshevu_root(3, 2)
-2**(1/2)/2
>>> chebyshevu(3, chebyshevu_root(3, 2))
0 | 625990737c178a314d78e874 |
class StoreStreamerPart(multipart_streamer.MultiPartStreamer): <NEW_LINE> <INDENT> def __init__(self, store, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.store = store <NEW_LINE> <DEDENT> def create_part(self, headers): <NEW_LINE> <INDENT> return multipart_streamer.TemporaryFileStreamedPart(self, headers, tmp_dir=self.store.tmp_dir) | Create a Part streamer with a custom temp directory. Using the default
tmp directory and trying to move the file to $RIFT_ARTIFACTS occasionally
causes link errors. So create a temp directory within the staging area. | 6259907397e22403b383c815 |
class SubscriptionInstance(InstanceResource): <NEW_LINE> <INDENT> def __init__(self, version, payload, sid=None): <NEW_LINE> <INDENT> super(SubscriptionInstance, self).__init__(version) <NEW_LINE> self._properties = { 'account_sid': payload.get('account_sid'), 'sid': payload.get('sid'), 'date_created': deserialize.iso8601_datetime(payload.get('date_created')), 'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')), 'description': payload.get('description'), 'sink_sid': payload.get('sink_sid'), 'url': payload.get('url'), 'links': payload.get('links'), } <NEW_LINE> self._context = None <NEW_LINE> self._solution = {'sid': sid or self._properties['sid'], } <NEW_LINE> <DEDENT> @property <NEW_LINE> def _proxy(self): <NEW_LINE> <INDENT> if self._context is None: <NEW_LINE> <INDENT> self._context = SubscriptionContext(self._version, sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._context <NEW_LINE> <DEDENT> @property <NEW_LINE> def account_sid(self): <NEW_LINE> <INDENT> return self._properties['account_sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def sid(self): <NEW_LINE> <INDENT> return self._properties['sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def date_created(self): <NEW_LINE> <INDENT> return self._properties['date_created'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def date_updated(self): <NEW_LINE> <INDENT> return self._properties['date_updated'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self._properties['description'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def sink_sid(self): <NEW_LINE> <INDENT> return self._properties['sink_sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._properties['url'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def links(self): <NEW_LINE> <INDENT> return self._properties['links'] <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> return self._proxy.fetch() <NEW_LINE> <DEDENT> def update(self, description=values.unset, sink_sid=values.unset): <NEW_LINE> <INDENT> return self._proxy.update(description=description, sink_sid=sink_sid, ) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> return self._proxy.delete() <NEW_LINE> <DEDENT> @property <NEW_LINE> def subscribed_events(self): <NEW_LINE> <INDENT> return self._proxy.subscribed_events <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items()) <NEW_LINE> return '<Twilio.Events.V1.SubscriptionInstance {}>'.format(context) | PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. | 62599073bf627c535bcb2ddf |
class Execution(): <NEW_LINE> <INDENT> def __init__(self, execution, api=None): <NEW_LINE> <INDENT> self.resource_id = None <NEW_LINE> self.outputs = None <NEW_LINE> self.output_types = None <NEW_LINE> self.output_resources = None <NEW_LINE> self.result = None <NEW_LINE> self.status = None <NEW_LINE> self.source_location = None <NEW_LINE> self.error = None <NEW_LINE> self.error_message = None <NEW_LINE> self.error_location = None <NEW_LINE> self.call_stack = None <NEW_LINE> self.api = get_api_connection(api) <NEW_LINE> try: <NEW_LINE> <INDENT> self.resource_id, execution = get_resource_dict( execution, "execution", self.api) <NEW_LINE> <DEDENT> except ValueError as resource: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> execution = json.loads(str(resource)) <NEW_LINE> self.resource_id = execution["resource"] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError("The execution resource was faulty: \n%s" % resource) <NEW_LINE> <DEDENT> <DEDENT> if 'object' in execution and isinstance(execution['object'], dict): <NEW_LINE> <INDENT> execution = execution['object'] <NEW_LINE> self.status = execution["status"] <NEW_LINE> self.error = self.status.get("error") <NEW_LINE> if self.error is not None: <NEW_LINE> <INDENT> self.error_message = self.status.get("message") <NEW_LINE> self.error_location = self.status.get("source_location") <NEW_LINE> self.call_stack = self.status.get("call_stack") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.source_location = self.status.get("source_location") <NEW_LINE> if 'execution' in execution and isinstance(execution['execution'], dict): <NEW_LINE> <INDENT> execution = execution.get('execution') <NEW_LINE> self.result = execution.get("result") <NEW_LINE> self.outputs = dict((output[0], output[1]) for output in execution.get("outputs")) <NEW_LINE> self.output_types = dict((output[0], output[2]) for output in execution.get("outputs")) <NEW_LINE> self.output_resources = dict((res["variable"], res["id"]) for res in execution.get("output_resources")) <NEW_LINE> self.execution = execution | A class to deal with the information in an execution result
| 62599073e76e3b2f99fda315 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.