code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class AddTeam(Add): <NEW_LINE> <INDENT> log = logging.getLogger(__name__) <NEW_LINE> _command_name = 'add team'
Add a team.
6259903ed10714528d69efaf
class TreeAPIView(ListAPIView): <NEW_LINE> <INDENT> serializer_class = TreeSerializer <NEW_LINE> authentication_classes = (JSONWebTokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> def list(self, request, *args, **kwargs): <NEW_LINE> <INDENT> queryset = self.filter_queryset(self.get_queryset()) <NEW_LINE> page = self.paginate_queryset(queryset) <NEW_LINE> serializer = self.get_serializer(queryset, many=True) <NEW_LINE> tree_dict = {} <NEW_LINE> tree_data = [] <NEW_LINE> try: <NEW_LINE> <INDENT> for item in serializer.data: <NEW_LINE> <INDENT> tree_dict[item['id']] = item <NEW_LINE> <DEDENT> for i in tree_dict: <NEW_LINE> <INDENT> if tree_dict[i]['pid']: <NEW_LINE> <INDENT> pid = tree_dict[i]['pid'] <NEW_LINE> parent = tree_dict[pid] <NEW_LINE> parent.setdefault('children', []).append(tree_dict[i]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tree_data.append(tree_dict[i]) <NEW_LINE> <DEDENT> <DEDENT> results = tree_data <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> results = serializer.data <NEW_LINE> <DEDENT> if page is not None: <NEW_LINE> <INDENT> return self.get_paginated_response(results) <NEW_LINE> <DEDENT> return XopsResponse(results)
自定义树结构View
6259903e73bcbd0ca4bcb4d1
class HTTSOAPAudit(Base): <NEW_LINE> <INDENT> __tablename__ = 'http_soap_audit' <NEW_LINE> id = Column(Integer, Sequence('http_soap_audit_seq'), primary_key=True) <NEW_LINE> name = Column(String(200), nullable=False, index=True) <NEW_LINE> cid = Column(String(200), nullable=False, index=True) <NEW_LINE> transport = Column(String(200), nullable=False, index=True) <NEW_LINE> connection = Column(String(200), nullable=False, index=True) <NEW_LINE> req_time = Column(DateTime(), nullable=False) <NEW_LINE> resp_time = Column(DateTime(), nullable=True) <NEW_LINE> user_token = Column(String(200), nullable=True, index=True) <NEW_LINE> invoke_ok = Column(Boolean(), nullable=True) <NEW_LINE> auth_ok = Column(Boolean(), nullable=True) <NEW_LINE> remote_addr = Column(String(200), nullable=False, index=True) <NEW_LINE> req_headers = Column(LargeBinary(), nullable=True) <NEW_LINE> req_payload = Column(LargeBinary(), nullable=True) <NEW_LINE> resp_headers = Column(LargeBinary(), nullable=True) <NEW_LINE> resp_payload = Column(LargeBinary(), nullable=True) <NEW_LINE> cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False) <NEW_LINE> conn_id = Column(Integer, ForeignKey('http_soap.id', ondelete='CASCADE'), nullable=False) <NEW_LINE> def __init__(self, id=None, name=None, cid=None, transport=None, connection=None, req_time=None, resp_time=None, user_token=None, invoke_ok=None, auth_ok=None, remote_addr=None, req_headers=None, req_payload=None, resp_headers=None, resp_payload=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.name = name <NEW_LINE> self.cid = cid <NEW_LINE> self.transport = transport <NEW_LINE> self.connection = connection <NEW_LINE> self.req_time = req_time <NEW_LINE> self.resp_time = resp_time <NEW_LINE> self.user_token = user_token <NEW_LINE> self.invoke_ok = invoke_ok <NEW_LINE> self.auth_ok = auth_ok <NEW_LINE> self.remote_addr = remote_addr <NEW_LINE> self.req_headers = req_headers <NEW_LINE> self.req_payload = req_payload <NEW_LINE> self.resp_headers = resp_headers <NEW_LINE> self.resp_payload = resp_payload
An audit log for HTTP/SOAP channels and outgoing connections.
6259903ebaa26c4b54d504ef
class GenerateProto(appcommands.Cmd): <NEW_LINE> <INDENT> def Run(self, _): <NEW_LINE> <INDENT> codegen = _GetCodegenFromFlags() <NEW_LINE> _WriteProtoFiles(codegen)
Generate just the two proto files for a given API.
6259903e23e79379d538d746
class TestLiveApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = opendota_client.api.live_api.LiveApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_live_get(self): <NEW_LINE> <INDENT> pass
LiveApi unit test stubs
6259903e1d351010ab8f4d64
class ClasspointersGenerator(object): <NEW_LINE> <INDENT> def __init__(self, source_file): <NEW_LINE> <INDENT> self._source_file = source_file <NEW_LINE> self._internal_cp_table = self._load_cp_table() <NEW_LINE> <DEDENT> def cp_subset_by_ratio(self, min_class_ratio): <NEW_LINE> <INDENT> return {a_cp_row[ID_POSITION] for a_cp_row in self._internal_cp_table if a_cp_row[RATIO_POSITION] >= min_class_ratio} <NEW_LINE> <DEDENT> def cp_subsets_by_ratios(self, ratios_list): <NEW_LINE> <INDENT> return [self.cp_subset_by_ratio(a_ratio) for a_ratio in ratios_list] <NEW_LINE> <DEDENT> def _load_cp_table(self): <NEW_LINE> <INDENT> return read_json_obj_from_path(target_path=self._source_file)
Expected file format: [ [ 1, # Rank "P31", # ID 94119660, # Classes 0, # Instances 94119660, # Total 1.0, # ratio "instance of" # label ], [ 2, "P279", 3025254, 0, 3025254, 1.0, "subclass of" ], ....
6259903eb57a9660fecd2cc2
class RichWidget(object): <NEW_LINE> <INDENT> def put_text(self, text, fg_color=None, bg_color=None, font=None, size=None, bold=False, italic=False, underline=False, strike=False): <NEW_LINE> <INDENT> raise NotImplementedError('Not implemented') <NEW_LINE> <DEDENT> def put_formatted(self, text, fg_color=None, bg_color=None, font=None, size=None, bold=False, italic=False, underline=False, strike=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = e3.common.XmlParser.XmlParser( '<span>' + text + '</span>').result <NEW_LINE> <DEDENT> except xml.parsers.expat.ExpatError: <NEW_LINE> <INDENT> logging.getLogger("gtkui.RichWidget").debug("cant parse '%s'" % (text, )) <NEW_LINE> return <NEW_LINE> <DEDENT> dct = e3.common.XmlParser.DictObj(result) <NEW_LINE> self._put_formatted(dct, fg_color, bg_color, font, size, bold, italic, underline, strike) <NEW_LINE> <DEDENT> def _put_formatted(self, dct, fg_color=None, bg_color=None, font=None, size=None, bold=False, italic=False, underline=False, strike=False): <NEW_LINE> <INDENT> bold = dct.tag == 'b' or dct.tag == 'strong' or bold <NEW_LINE> italic = dct.tag == 'i' or dct.tag == 'em' or italic <NEW_LINE> underline = dct.tag == 'u' or underline <NEW_LINE> strike = dct.tag == 's' or strike <NEW_LINE> if dct.tag == 'span' and dct.style: <NEW_LINE> <INDENT> style = e3.common.XmlParser.parse_css(dct.style) <NEW_LINE> font = style.font_family or font <NEW_LINE> try: <NEW_LINE> <INDENT> size = int(style.font_size) or size <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> fg_color = style.color or fg_color <NEW_LINE> bg_color = style.background_color or bg_color <NEW_LINE> <DEDENT> if dct.childs is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for child in dct.childs: <NEW_LINE> <INDENT> if type(child) == str or type(child) == unicode: <NEW_LINE> <INDENT> self.put_text(child, fg_color, bg_color, font, size, bold, italic, underline, strike) <NEW_LINE> <DEDENT> elif child.tag == 'img': <NEW_LINE> <INDENT> self.put_image(child.src, child.alt) <NEW_LINE> <DEDENT> elif child.tag == 'br': <NEW_LINE> <INDENT> self.new_line() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._put_formatted(child, fg_color, bg_color, font, size, bold, italic, underline, strike) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def put_image(self, path, tip=None): <NEW_LINE> <INDENT> raise NotImplementedError('Not implemented') <NEW_LINE> <DEDENT> def new_line(self): <NEW_LINE> <INDENT> raise NotImplementedError('Not implemented')
a base widget that allows to add formatted text based on a xhtml subset
6259903eb830903b9686ed9d
class Solution: <NEW_LINE> <INDENT> def searchRange(self, A, target): <NEW_LINE> <INDENT> if len(A) == 0: <NEW_LINE> <INDENT> return [-1, -1] <NEW_LINE> <DEDENT> lb = 0 <NEW_LINE> rb = len(A) - 1 <NEW_LINE> while lb < rb: <NEW_LINE> <INDENT> m = (lb + rb) // 2 <NEW_LINE> if target < A[m]: <NEW_LINE> <INDENT> rb = m - 1 <NEW_LINE> <DEDENT> elif target > A[m]: <NEW_LINE> <INDENT> lb = m + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rb = m <NEW_LINE> <DEDENT> <DEDENT> idx1 = rb <NEW_LINE> lb = 0 <NEW_LINE> rb = len(A) - 1 <NEW_LINE> while lb < rb: <NEW_LINE> <INDENT> m = (lb + rb) // 2 + 1 <NEW_LINE> if target < A[m]: <NEW_LINE> <INDENT> rb = m - 1 <NEW_LINE> <DEDENT> elif target > A[m]: <NEW_LINE> <INDENT> lb = m + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lb = m <NEW_LINE> <DEDENT> <DEDENT> idx2 = lb <NEW_LINE> if A[idx1] != target: <NEW_LINE> <INDENT> return [-1, -1] <NEW_LINE> <DEDENT> return [idx1, idx2]
@param A: an integer sorted array @param target: an integer to be inserted @return: a list of length 2, [index1, index2]
6259903e30c21e258be99a54
class WLEDRestartButton(WLEDEntity, ButtonEntity): <NEW_LINE> <INDENT> _attr_device_class = ButtonDeviceClass.RESTART <NEW_LINE> _attr_entity_category = ENTITY_CATEGORY_CONFIG <NEW_LINE> def __init__(self, coordinator: WLEDDataUpdateCoordinator) -> None: <NEW_LINE> <INDENT> super().__init__(coordinator=coordinator) <NEW_LINE> self._attr_name = f"{coordinator.data.info.name} Restart" <NEW_LINE> self._attr_unique_id = f"{coordinator.data.info.mac_address}_restart" <NEW_LINE> <DEDENT> @wled_exception_handler <NEW_LINE> async def async_press(self) -> None: <NEW_LINE> <INDENT> await self.coordinator.wled.reset()
Defines a WLED restart button.
6259903e63f4b57ef0086698
class WindowsSecurityContextOptions(_kuber_definitions.Definition): <NEW_LINE> <INDENT> def __init__( self, gmsa_credential_spec: str = None, gmsa_credential_spec_name: str = None, host_process: bool = None, run_as_user_name: str = None, ): <NEW_LINE> <INDENT> super(WindowsSecurityContextOptions, self).__init__( api_version="core/v1", kind="WindowsSecurityContextOptions" ) <NEW_LINE> self._properties = { "gmsaCredentialSpec": gmsa_credential_spec if gmsa_credential_spec is not None else "", "gmsaCredentialSpecName": gmsa_credential_spec_name if gmsa_credential_spec_name is not None else "", "hostProcess": host_process if host_process is not None else None, "runAsUserName": run_as_user_name if run_as_user_name is not None else "", } <NEW_LINE> self._types = { "gmsaCredentialSpec": (str, None), "gmsaCredentialSpecName": (str, None), "hostProcess": (bool, None), "runAsUserName": (str, None), } <NEW_LINE> <DEDENT> @property <NEW_LINE> def gmsa_credential_spec(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("gmsaCredentialSpec"), ) <NEW_LINE> <DEDENT> @gmsa_credential_spec.setter <NEW_LINE> def gmsa_credential_spec(self, value: str): <NEW_LINE> <INDENT> self._properties["gmsaCredentialSpec"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def gmsa_credential_spec_name(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("gmsaCredentialSpecName"), ) <NEW_LINE> <DEDENT> @gmsa_credential_spec_name.setter <NEW_LINE> def gmsa_credential_spec_name(self, value: str): <NEW_LINE> <INDENT> self._properties["gmsaCredentialSpecName"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def host_process(self) -> bool: <NEW_LINE> <INDENT> return typing.cast( bool, self._properties.get("hostProcess"), ) <NEW_LINE> <DEDENT> @host_process.setter <NEW_LINE> def host_process(self, value: bool): <NEW_LINE> <INDENT> self._properties["hostProcess"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def run_as_user_name(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("runAsUserName"), ) <NEW_LINE> <DEDENT> @run_as_user_name.setter <NEW_LINE> def run_as_user_name(self, value: str): <NEW_LINE> <INDENT> self._properties["runAsUserName"] = value <NEW_LINE> <DEDENT> def __enter__(self) -> "WindowsSecurityContextOptions": <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> return False
WindowsSecurityContextOptions contain Windows-specific options and credentials.
6259903eac7a0e7691f73731
class Scanner: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.keywords = {} <NEW_LINE> self.token_specs = [ ('NUMBER', r'\d+'), ('DOT', r'\.'), ('HYPHEN', r'\-'), ('STAR', r'\*'), ('PLUS', r'\+'), ('FSLASH', r'\/'), ('COLON', r':'), ('EQUALS', r'='), ('PIPE', r'\|'), ('QUOTE', r'[\'"]'), ('PAREN', r'[()]'), ('CBRACKET', r'[{}]'), ('SBRACKET', r'[\[\]]'), ('COMMA', r','), ('WORD', r'[A-Za-z]+'), ('NEWLINE', r'\n'), ('SKIP', r'[ \t]+'), ('MISMATCH', r'.'), ] <NEW_LINE> self.token_regex = '|'.join(f'(?P<{name}>{regex})' for name,regex in self.token_specs) <NEW_LINE> <DEDENT> def tokenize(self, data): <NEW_LINE> <INDENT> line_num = 1 <NEW_LINE> line_start = 0 <NEW_LINE> for mo in re.finditer(self.token_regex, data): <NEW_LINE> <INDENT> name = mo.lastgroup <NEW_LINE> value = mo.group(name) <NEW_LINE> if name == 'NEWLINE': <NEW_LINE> <INDENT> line_num += 1 <NEW_LINE> <DEDENT> elif name =='SKIP': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if name == 'ID' and value in self.keywords: <NEW_LINE> <INDENT> name = value <NEW_LINE> <DEDENT> column = mo.start() - line_start <NEW_LINE> yield Token(name, value, line_num, column)
docstring for Scanner
6259903e1f5feb6acb163e3b
class ChromeDebuggerControl(object): <NEW_LINE> <INDENT> def __init__(self, port): <NEW_LINE> <INDENT> if websocket is None: <NEW_LINE> <INDENT> raise NotImplementedError("websocket-client library not available; cannot control Chrome.\n" "Please install it (pip install websocket-client) then try again.") <NEW_LINE> <DEDENT> pages = json.loads(urlopen('http://localhost:%d/json/list' % port).read().decode('UTF-8')) <NEW_LINE> if len(pages) == 0: <NEW_LINE> <INDENT> raise Exception("No pages to attach to!") <NEW_LINE> <DEDENT> elif len(pages) == 1: <NEW_LINE> <INDENT> page = pages[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Select a page to attach to:") <NEW_LINE> for i, page in enumerate(pages): <NEW_LINE> <INDENT> print("%d) %s" % (i+1, page['title'])) <NEW_LINE> <DEDENT> while 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pageidx = int(input("Selection? ")) <NEW_LINE> page = pages[pageidx-1] <NEW_LINE> break <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Invalid selection:", e) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> wsurl = page['webSocketDebuggerUrl'] <NEW_LINE> self.ws = websocket.create_connection(wsurl) <NEW_LINE> self.requests = {} <NEW_LINE> self.results = {} <NEW_LINE> self.req_counter = itertools.count(1) <NEW_LINE> self.thread = threading.Thread(target=self._receive_thread) <NEW_LINE> self.thread.daemon = True <NEW_LINE> self.thread.start() <NEW_LINE> self._send_cmd_noresult('Runtime.enable') <NEW_LINE> <DEDENT> def _receive_thread(self): <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> message = json.loads(self.ws.recv()) <NEW_LINE> if 'id' in message: <NEW_LINE> <INDENT> id = message['id'] <NEW_LINE> event = self.requests.pop(id, None) <NEW_LINE> if event is not None: <NEW_LINE> <INDENT> self.results[id] = message <NEW_LINE> event.set() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _send_cmd_noresult(self, method, **params): <NEW_LINE> <INDENT> id = next(self.req_counter) <NEW_LINE> out = {'id': id, 'method': method} <NEW_LINE> if params: <NEW_LINE> <INDENT> out['params'] = params <NEW_LINE> <DEDENT> self.ws.send(json.dumps(out)) <NEW_LINE> <DEDENT> def _send_cmd(self, method, **params): <NEW_LINE> <INDENT> id = next(self.req_counter) <NEW_LINE> out = {'id': id, 'method': method} <NEW_LINE> if params: <NEW_LINE> <INDENT> out['params'] = params <NEW_LINE> <DEDENT> event = threading.Event() <NEW_LINE> self.requests[id] = event <NEW_LINE> self.ws.send(json.dumps(out)) <NEW_LINE> event.wait() <NEW_LINE> resp = self.results.pop(id) <NEW_LINE> if 'error' in resp: <NEW_LINE> <INDENT> raise Exception("Command %s(%s) failed: %s (%d)" % ( method, ', '.join('%s=%r' % (k,v) for k,v in params.iteritems()), resp['error']['message'], resp['error']['code'])) <NEW_LINE> <DEDENT> return resp['result'] <NEW_LINE> <DEDENT> def execute(self, cmd): <NEW_LINE> <INDENT> resp = self._send_cmd('Runtime.evaluate', expression=cmd) <NEW_LINE> if resp['wasThrown']: <NEW_LINE> <INDENT> raise Exception("JS evaluation threw an error: %s" % resp['result']['description']) <NEW_LINE> <DEDENT> result = resp['result'] <NEW_LINE> if 'value' in result: <NEW_LINE> <INDENT> return result['value'] <NEW_LINE> <DEDENT> if 'description' in result: <NEW_LINE> <INDENT> return result['description'] <NEW_LINE> <DEDENT> return None
Control Chrome using the debugging socket. Chrome must be launched using the --remote-debugging-port=<port> option for this to work!
6259903e596a897236128ed2
class Stream(QtCore.QObject): <NEW_LINE> <INDENT> newText = QtCore.pyqtSignal(str) <NEW_LINE> def write(self, text): <NEW_LINE> <INDENT> self.newText.emit(str(text))
Redirects console output to text widget.
6259903e8a349b6b4368748e
class Ocaml(Package): <NEW_LINE> <INDENT> homepage = "http://ocaml.org/" <NEW_LINE> url = "http://caml.inria.fr/pub/distrib/ocaml-4.03/ocaml-4.03.0.tar.gz" <NEW_LINE> version('4.06.0', '66e5439eb63dbb8b8224cba5d1b20947') <NEW_LINE> version('4.03.0', '43812739ea1b4641cf480f57f977c149') <NEW_LINE> depends_on('ncurses') <NEW_LINE> def url_for_version(self, version): <NEW_LINE> <INDENT> url = "http://caml.inria.fr/pub/distrib/ocaml-{0}/ocaml-{1}.tar.gz" <NEW_LINE> return url.format(version.up_to(2), version) <NEW_LINE> <DEDENT> def install(self, spec, prefix): <NEW_LINE> <INDENT> configure('-prefix', '{0}'.format(prefix)) <NEW_LINE> make('world.opt') <NEW_LINE> make('install', 'PREFIX={0}'.format(prefix))
OCaml is an industrial strength programming language supporting functional, imperative and object-oriented styles
6259903e15baa723494631da
class GitHubServerUnavailable(HTTPException): <NEW_LINE> <INDENT> code = 500 <NEW_LINE> description = ( "Failed to get repositories " "Connection failed after max retries." )
*5xx* Connection issue` Raise in case any exception with GitHub except 404 occur
6259903ed53ae8145f9196a4
class ProjectComboBox(QtWidgets.QComboBox): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ProjectComboBox, self).__init__(*args, **kwargs) <NEW_LINE> self._show_active_projects = False <NEW_LINE> self.fill_ui() <NEW_LINE> <DEDENT> def fill_ui(self): <NEW_LINE> <INDENT> from stalker import Project <NEW_LINE> self.clear() <NEW_LINE> self.addItem("Select Project...", None) <NEW_LINE> if self.show_active_projects: <NEW_LINE> <INDENT> from stalker import Status <NEW_LINE> cmpl = Status.query.filter(Status.code == 'CMPL').first() <NEW_LINE> projects = Project.query.filter(Project.status != cmpl).all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> projects = Project.query.order_by(Project.name).all() <NEW_LINE> <DEDENT> for project in projects: <NEW_LINE> <INDENT> self.addItem(project.name, project) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def show_active_projects(self): <NEW_LINE> <INDENT> return self._show_active_projects <NEW_LINE> <DEDENT> @show_active_projects.setter <NEW_LINE> def show_active_projects(self, active_projects): <NEW_LINE> <INDENT> self._show_active_projects = bool(active_projects) <NEW_LINE> self.fill_ui() <NEW_LINE> <DEDENT> def get_current_project(self): <NEW_LINE> <INDENT> return self.itemData(self.currentIndex())
A QComboBox variant for Stalker Project instances
6259903e63b5f9789fe863b5
class LavaFloorExam2018Env(ObsGrid): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> actions = {0: "L", 1: "R", 2: "U", 3: "D"} <NEW_LINE> grid = [ ["S", "L", "L", "L", "L", "L"], ["L", "L", "W", "L", "L", "P"], ["L", "P", "W", "L", "L", "W"], ["L", "L", "L", "L", "L", "L"], ["P", "L", "L", "L", "L", "G"] ] <NEW_LINE> rewards = {"L": -0.04, "S": -0.04, "P": -10.0, "G": 10.0} <NEW_LINE> actdyn = {0: {0: 0.8, 1: 0.0, 2: 0.1, 3: 0.1}, 1: {1: 0.8, 0: 0.0, 2: 0.1, 3: 0.1}, 2: {2: 0.8, 1: 0.1, 0: 0.1, 3: 0.0}, 3: {3: 0.8, 1: 0.1, 2: 0.0, 0: 0.1}} <NEW_LINE> super().__init__(actions, grid, actdyn, rewards) <NEW_LINE> self.RC = self.R.copy() <NEW_LINE> self.TC = self.T.copy() <NEW_LINE> <DEDENT> def sample(self, state, action): <NEW_LINE> <INDENT> return self.np_random.choice(self.staterange, p=self.TC[state, action]) <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> if self.done: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> ns = self.sample(self.currstate, action) <NEW_LINE> r = self.RC[self.currstate, action, ns] <NEW_LINE> self.currstate = ns <NEW_LINE> if self.grid[ns] == "G" or self.grid[ns] == "P": <NEW_LINE> <INDENT> self.done = True <NEW_LINE> return ns, r, True, None <NEW_LINE> <DEDENT> return ns, r, False, None
The floor is lava! Actions have a stochastic outcome. Bigger grid.
6259903e23e79379d538d748
class NfvPortResource(BASE, NovaBase): <NEW_LINE> <INDENT> __tablename__ = "nfv_port_resource" <NEW_LINE> id = Column(Integer, primary_key=True, autoincrement=True) <NEW_LINE> node_id = Column(Integer, nullable=True) <NEW_LINE> nic_id = Column(String(length=255), nullable=True) <NEW_LINE> name = Column(String(length=255), nullable=True) <NEW_LINE> mac = Column(String(length=255), nullable=True) <NEW_LINE> pci = Column(String(length=255), nullable=True) <NEW_LINE> pci_passthrough_supported = Column(String(length=255),default='yes') <NEW_LINE> pci_sriov_supported = Column(String(length=255),default='yes') <NEW_LINE> max_vfnum = Column(Integer, nullable=True) <NEW_LINE> processor = Column(Integer, nullable=True) <NEW_LINE> auto = Column(String(length=255), nullable=True) <NEW_LINE> device = Column(String(length=255),nullable=True) <NEW_LINE> created_at = Column(DateTime, default=timeutils.utcnow) <NEW_LINE> updated_at = Column(DateTime, onupdate=timeutils.utcnow) <NEW_LINE> deleted_at = Column(DateTime) <NEW_LINE> deleted = Column(String(36), default="")
Represents nfv port resource.
6259903ecad5886f8bdc59a1
class Species(FixedObject): <NEW_LINE> <INDENT> attributes = ['name', 'color', 'representation', 'tags'] <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> FixedObject.__init__(self, **kwargs) <NEW_LINE> self.name = kwargs.get('name', '') <NEW_LINE> self.representation = kwargs.get('representation', '') <NEW_LINE> self.tags = kwargs.get('tags', '') <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if hasattr(self, 'color'): <NEW_LINE> <INDENT> return '[SPECIES] Name: %s Color: %s\n' % (self.name, self.color) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '[SPECIES] Name: %s Color: no color set.\n' % (self.name)
Class that represent a species such as oxygen, empty, ... . Note: `empty` is treated just like a species.
6259903e10dbd63aa1c71e20
class Services: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.services = { "account": { "email": "[email protected]", "firstname": "HA", "lastname": "HA", "username": "HA", "password": "password", }, "service_offering": { "name": "Tiny Instance", "displaytext": "Tiny Instance", "cpunumber": 1, "cpuspeed": 100, "memory": 128, }, "lbrule": { "name": "SSH", "alg": "roundrobin", "privateport": 22, "publicport": 2222, }, "natrule": { "privateport": 22, "publicport": 22, "protocol": "TCP" }, "fw_rule": { "startport": 1, "endport": 6000, "cidr": '55.55.0.0/11', }, "virtual_machine": { "displayname": "VM", "username": "root", "password": "password", "ssh_port": 22, "hypervisor": 'XenServer', "privateport": 22, "publicport": 22, "protocol": 'TCP', }, "templates": { "displaytext": "Public Template", "name": "Public template", "ostype": 'CentOS 5.3 (64-bit)', "url": "http://download.cloud.com/releases/2.0.0/UbuntuServer-10-04-64bit.vhd.bz2", "hypervisor": 'XenServer', "format": 'VHD', "isfeatured": True, "ispublic": True, "isextractable": True, "templatefilter": 'self', }, "ostype": 'CentOS 5.3 (64-bit)', "sleep": 60, "timeout": 100, "mode": 'advanced' }
Test network offering Services
6259903ebe383301e0254a61
class ServedAssetFieldType(enum.IntEnum): <NEW_LINE> <INDENT> UNSPECIFIED = 0 <NEW_LINE> UNKNOWN = 1 <NEW_LINE> HEADLINE_1 = 2 <NEW_LINE> HEADLINE_2 = 3 <NEW_LINE> HEADLINE_3 = 4 <NEW_LINE> DESCRIPTION_1 = 5 <NEW_LINE> DESCRIPTION_2 = 6
The possible asset field types. Attributes: UNSPECIFIED (int): No value has been specified. UNKNOWN (int): The received value is not known in this version. This is a response-only value. HEADLINE_1 (int): The asset is used in headline 1. HEADLINE_2 (int): The asset is used in headline 2. HEADLINE_3 (int): The asset is used in headline 3. DESCRIPTION_1 (int): The asset is used in description 1. DESCRIPTION_2 (int): The asset is used in description 2.
6259903ed6c5a102081e336f
class Console: <NEW_LINE> <INDENT> def __init__(self, width=0, height=0, tab_size=4, variables={}): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.tab_size = tab_size <NEW_LINE> self.variables = variables <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return (self.width, self.height) <NEW_LINE> <DEDENT> def print(self, *objects: tuple, sep=" ", end="\n", style=None): <NEW_LINE> <INDENT> if not objects: <NEW_LINE> <INDENT> print(sep=sep, end=end) <NEW_LINE> <DEDENT> strs = [self.style(obj) for obj in objects] <NEW_LINE> print(*strs, sep=sep, end=end) <NEW_LINE> <DEDENT> def style(self, obj) -> str: <NEW_LINE> <INDENT> if isinstance(obj, str): <NEW_LINE> <INDENT> return self.parse(obj) <NEW_LINE> <DEDENT> return str(obj) <NEW_LINE> <DEDENT> def parse(self, s: str) -> str: <NEW_LINE> <INDENT> ptr = 0 <NEW_LINE> stop = len(s) <NEW_LINE> buffer = "" <NEW_LINE> while ptr < stop: <NEW_LINE> <INDENT> _next = "" <NEW_LINE> if s[ptr] == "\\" and (s[ptr + 1] == "[" or s[ptr + 1] == "{"): <NEW_LINE> <INDENT> ptr += 1 <NEW_LINE> _next = s[ptr] <NEW_LINE> <DEDENT> elif s[ptr] == "[": <NEW_LINE> <INDENT> end = s.find("]", ptr + 1) <NEW_LINE> if end < 0: <NEW_LINE> <INDENT> raise TagParseError("Matching ']' could not be found.") <NEW_LINE> <DEDENT> _next = Style(s[ptr : end + 1]).get_ansi_style() <NEW_LINE> ptr = end <NEW_LINE> <DEDENT> elif s[ptr] == "{": <NEW_LINE> <INDENT> end = s.find("}", ptr + 1) <NEW_LINE> if end < 0: <NEW_LINE> <INDENT> raise TagParseError("Matching '}' could not be found.") <NEW_LINE> <DEDENT> var_name = s[ptr : end + 1].strip("{ }") <NEW_LINE> val = self.variables.get(var_name) <NEW_LINE> if val is None: <NEW_LINE> <INDENT> raise TagParseError(f"No variable '{var_name}'.") <NEW_LINE> <DEDENT> _next = f"{val:.1f}" if isinstance(val, float) else str(val) <NEW_LINE> ptr = end <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _next = s[ptr] <NEW_LINE> <DEDENT> buffer += _next <NEW_LINE> ptr += 1 <NEW_LINE> <DEDENT> buffer += "\x1b[0m" <NEW_LINE> return buffer
Definition of the console being used.
6259903ed99f1b3c44d068e6
class Solution: <NEW_LINE> <INDENT> def isValidBST(self, root): <NEW_LINE> <INDENT> return self.__isValid(root, [None]) <NEW_LINE> <DEDENT> def __isValid(self, root, prev): <NEW_LINE> <INDENT> if not root: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if not self.__isValid(root.left, prev): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if prev[0] is not None and prev[0] >= root.val: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> prev[0] = root.val <NEW_LINE> return self.__isValid(root.right, prev)
@param: root: The root of binary tree. @return: True if the binary tree is BST, or false
6259903ed4950a0f3b111765
class periodic(object): <NEW_LINE> <INDENT> def __init__(self, document): <NEW_LINE> <INDENT> self.document = document <NEW_LINE> self.callback = None <NEW_LINE> self.period = None <NEW_LINE> self.count = None <NEW_LINE> self.counter = None <NEW_LINE> self._start_time = None <NEW_LINE> self.timeout = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def completed(self): <NEW_LINE> <INDENT> return self.counter is None <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._start_time = time.time() <NEW_LINE> if self.document is None: <NEW_LINE> <INDENT> raise RuntimeError('periodic was registered to be run on bokeh' 'server but no document was found.') <NEW_LINE> <DEDENT> self.document.add_periodic_callback(self._periodic_callback, self.period) <NEW_LINE> <DEDENT> def __call__(self, period, count, callback, timeout=None, block=False): <NEW_LINE> <INDENT> if isinstance(count, int): <NEW_LINE> <INDENT> if count < 0: raise ValueError('Count value must be positive') <NEW_LINE> <DEDENT> elif not type(count) is type(None): <NEW_LINE> <INDENT> raise ValueError('Count value must be a positive integer or None') <NEW_LINE> <DEDENT> self.callback = callback <NEW_LINE> self.period = period*1000. <NEW_LINE> self.timeout = timeout <NEW_LINE> self.count = count <NEW_LINE> self.counter = 0 <NEW_LINE> return self <NEW_LINE> <DEDENT> def _periodic_callback(self): <NEW_LINE> <INDENT> self.callback(self.counter) <NEW_LINE> self.counter += 1 <NEW_LINE> if self.timeout is not None: <NEW_LINE> <INDENT> dt = (time.time() - self._start_time) <NEW_LINE> if dt > self.timeout: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> <DEDENT> <DEDENT> if self.counter == self.count: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.counter = None <NEW_LINE> self.timeout = None <NEW_LINE> try: <NEW_LINE> <INDENT> self.document.remove_periodic_callback(self._periodic_callback) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'periodic(%s, %s, %s)' % (self.period, self.count, callable_name(self.callback)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self)
Mocks the API of periodic Thread in hv.core.util, allowing a smooth API transition on bokeh server.
6259903e63f4b57ef0086699
class Sha1Hash(object): <NEW_LINE> <INDENT> name = 'python-sha1' <NEW_LINE> digest_size = 20 <NEW_LINE> block_size = 64 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._h = ( 0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0, ) <NEW_LINE> self._unprocessed = b'' <NEW_LINE> self._message_byte_length = 0 <NEW_LINE> self.GLUE = "" <NEW_LINE> <DEDENT> def update(self, arg): <NEW_LINE> <INDENT> if isinstance(arg, (bytes, bytearray)): <NEW_LINE> <INDENT> arg = io.BytesIO(arg) <NEW_LINE> <DEDENT> chunk = self._unprocessed + arg.read(64 - len(self._unprocessed)) <NEW_LINE> while len(chunk) == 64: <NEW_LINE> <INDENT> self._h = _process_chunk(chunk, *self._h) <NEW_LINE> self._message_byte_length += 64 <NEW_LINE> chunk = arg.read(64) <NEW_LINE> <DEDENT> self._unprocessed = chunk <NEW_LINE> return self <NEW_LINE> <DEDENT> def digest(self): <NEW_LINE> <INDENT> return b''.join(struct.pack(b'>I', h) for h in self._produce_digest()) <NEW_LINE> <DEDENT> def hexdigest(self): <NEW_LINE> <INDENT> return '%08x%08x%08x%08x%08x' % self._produce_digest() <NEW_LINE> <DEDENT> def _produce_digest(self): <NEW_LINE> <INDENT> message = self._unprocessed <NEW_LINE> message_byte_length = self._message_byte_length + len(message) <NEW_LINE> self.GLUE = '\x80' <NEW_LINE> message += b'\x80' <NEW_LINE> message += b'\x00' * ((56 - (message_byte_length + 1) % 64) % 64) <NEW_LINE> self.GLUE += b'\x00' * ((56 - (message_byte_length + 1) % 64) % 64) <NEW_LINE> message_bit_length = message_byte_length * 8 <NEW_LINE> message += struct.pack(b'>Q', message_bit_length) <NEW_LINE> self.GLUE += struct.pack(b'>Q', message_bit_length) <NEW_LINE> h = _process_chunk(message[:64], *self._h) <NEW_LINE> if len(message) == 64: <NEW_LINE> <INDENT> return h <NEW_LINE> <DEDENT> return _process_chunk(message[64:], *h)
A class that mimics that hashlib api and implements the SHA-1 algorithm.
6259903e73bcbd0ca4bcb4d4
class Track(object): <NEW_LINE> <INDENT> def __init__(self, category = None, frame_number = None, track_id = None, **kwds): <NEW_LINE> <INDENT> super(Track, self).__init__(**kwds) <NEW_LINE> self.category = category <NEW_LINE> self.frame_number = frame_number <NEW_LINE> self.last_added = 0 <NEW_LINE> self.length = 0 <NEW_LINE> self.props = {} <NEW_LINE> self.track_id = track_id <NEW_LINE> self.tw = 0.0 <NEW_LINE> self.tx = 0.0 <NEW_LINE> self.ty = 0.0 <NEW_LINE> self.tz = 0.0 <NEW_LINE> <DEDENT> def addLocalization(self, loc, index): <NEW_LINE> <INDENT> self.length += 1 <NEW_LINE> w = 1 <NEW_LINE> if "sum" in loc: <NEW_LINE> <INDENT> w += math.sqrt(loc["sum"][index]) <NEW_LINE> <DEDENT> self.tw += w <NEW_LINE> self.tx += w*loc["x"][index] <NEW_LINE> self.ty += w*loc["y"][index] <NEW_LINE> if "z" in loc: <NEW_LINE> <INDENT> self.tz += w*loc["z"][index] <NEW_LINE> <DEDENT> self.last_added = 0 <NEW_LINE> for key in loc: <NEW_LINE> <INDENT> if key in self.props: <NEW_LINE> <INDENT> self.props[key] += loc[key][index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.props[key] = loc[key][index] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def incLastAdded(self): <NEW_LINE> <INDENT> self.last_added += 1 <NEW_LINE> <DEDENT> def getCenter(self): <NEW_LINE> <INDENT> return [self.tx/self.tw, self.ty/self.tw] <NEW_LINE> <DEDENT> def getLastAdded(self): <NEW_LINE> <INDENT> return self.last_added <NEW_LINE> <DEDENT> def getProperties(self): <NEW_LINE> <INDENT> return self.props
Class to store & manipulate a single track.
6259903e711fe17d825e15c1
class ProgressBar(object): <NEW_LINE> <INDENT> def __init__(self, niter, f, width, silent): <NEW_LINE> <INDENT> self.niter = niter <NEW_LINE> self.f = f <NEW_LINE> self.width = width <NEW_LINE> self.silent = silent <NEW_LINE> self.count = 0 <NEW_LINE> self.nchar = 0 <NEW_LINE> <DEDENT> def __call__(self, inc=1): <NEW_LINE> <INDENT> self.count += inc <NEW_LINE> if not self.silent: <NEW_LINE> <INDENT> new_nchar = (self.count*self.width)/self.niter <NEW_LINE> if new_nchar > self.nchar: <NEW_LINE> <INDENT> self.f.write('>'*(new_nchar - self.nchar)) <NEW_LINE> self.f.flush() <NEW_LINE> self.nchar = new_nchar <NEW_LINE> <DEDENT> if self.count == self.niter: <NEW_LINE> <INDENT> self.f.write('\n') <NEW_LINE> <DEDENT> <DEDENT> elif self.count > self.niter: <NEW_LINE> <INDENT> raise ValueError('Progress bar overflow.')
Simple progress bar for the screen logger.
6259903e29b78933be26a9e8
class MultiServer(object): <NEW_LINE> <INDENT> def __init__(self, addr, registry, connect=False, context=None): <NEW_LINE> <INDENT> self.context = context or zmq.Context.instance() <NEW_LINE> self.addr = addr <NEW_LINE> self.connect = connect <NEW_LINE> self.registry = registry <NEW_LINE> <DEDENT> def run_device(self, callback=DummyCallback(), device=zmq.device): <NEW_LINE> <INDENT> output_addr = 'inproc://%s' % uuid.uuid4() <NEW_LINE> loadbal = LoadBalancer(self.addr, output_addr, context=self.context) <NEW_LINE> loadbal_thread = callback.spawn(loadbal.run, kwargs={'callback': callback, 'device': device}) <NEW_LINE> return output_addr <NEW_LINE> <DEDENT> def run(self, n_workers, callback=DummyCallback(), device=zmq.device): <NEW_LINE> <INDENT> loadbal_callback = type(callback)() <NEW_LINE> loadbal_addr = self.run_device(callback=loadbal_callback, device=device) <NEW_LINE> loadbal_callback.wait() <NEW_LINE> with callback.catch_exceptions(): <NEW_LINE> <INDENT> server = Server(loadbal_addr, self.registry, connect=True, context=self.context) <NEW_LINE> server_threads = [] <NEW_LINE> server_callbacks = [] <NEW_LINE> run_logger.debug("Spawning {0} workers", n_workers) <NEW_LINE> for i in xrange(n_workers): <NEW_LINE> <INDENT> server_callback = type(callback)() <NEW_LINE> server_thread = server_callback.spawn( server.run, kwargs={'callback': server_callback}) <NEW_LINE> server_threads.append(server_thread) <NEW_LINE> server_callbacks.append(server_callback) <NEW_LINE> <DEDENT> run_logger.debug("Spawned {0} workers", n_workers) <NEW_LINE> server_sockets = [server_callback.wait() for server_callback in server_callbacks] <NEW_LINE> <DEDENT> callback.send(server_sockets) <NEW_LINE> return loadbal_addr, server_threads
A multi-threaded ZMQ server.
6259903e8a43f66fc4bf33da
class ODE(HasTraits): <NEW_LINE> <INDENT> name = Str <NEW_LINE> num_vars = Int(0) <NEW_LINE> vars = List(Str, desc='The names of the variables of X vector') <NEW_LINE> changed = Event <NEW_LINE> def eval(self, X, t): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def default_domain(self): <NEW_LINE> <INDENT> return [(0.0,10.0) for i in range(len(self.vars))]
An ODE of the form dX/dt = f(X).
6259903e30dc7b76659a0a7c
class OclWrapper_Collection(OclWrapper_Any, OclWrapper_Multiple): <NEW_LINE> <INDENT> def __length_hint__(self) -> int: <NEW_LINE> <INDENT> return self._wrapped.__length_hint__() <NEW_LINE> <DEDENT> def __setitem__(self, key: object, item: object): <NEW_LINE> <INDENT> self._wrapped.__setitem__(key, item) <NEW_LINE> <DEDENT> def __delitem__(self, key: object): <NEW_LINE> <INDENT> return self._wrapped.__delitem__(key) <NEW_LINE> <DEDENT> def __missing__(self, key: object): <NEW_LINE> <INDENT> return self._wrapped.__missing__(key) <NEW_LINE> <DEDENT> def __iter__(self) -> OclWrapper_Any: <NEW_LINE> <INDENT> return self._wrapped.__iter__() <NEW_LINE> <DEDENT> def __next__(self) -> OclWrapper_Any: <NEW_LINE> <INDENT> return self._wrapped.__next__() <NEW_LINE> <DEDENT> def __reversed__(self) -> OclWrapper_Any: <NEW_LINE> <INDENT> return self._wrapped.__reversed__() <NEW_LINE> <DEDENT> def any(self, condition: function) -> OclWrapper_Any: <NEW_LINE> <INDENT> for a in self._wrapped: <NEW_LINE> <INDENT> if condition(a): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def asBag(self) -> OclWrapper_Bag: <NEW_LINE> <INDENT> return OclWrapper_Bag(self._wrapped) <NEW_LINE> <DEDENT> def asOrderedSet(self) -> OclWrapper_OrderedSet: <NEW_LINE> <INDENT> return OclWrapper_OrderedSet(self._wrapped) <NEW_LINE> <DEDENT> def asSequence(self) -> OclWrapper_Sequence: <NEW_LINE> <INDENT> return OclWrapper_Sequence(self._wrapped) <NEW_LINE> <DEDENT> def asSet(self) -> OclWrapper_Set: <NEW_LINE> <INDENT> return OclWrapper_Set(self._wrapped) <NEW_LINE> <DEDENT> def collect(self, attName: str) -> OclWrapper_Collection: <NEW_LINE> <INDENT> return oclWrapper_Factory([element.__getattribute__(attName) for element in self._wrapped]) <NEW_LINE> <DEDENT> def __getattr__(self, attName: str) -> OclWrapper_Collection: <NEW_LINE> <INDENT> if self.__slots__.__contains__(attName): <NEW_LINE> <INDENT> return OclWrapper_Multiple.__getattr__(self, attName) <NEW_LINE> <DEDENT> return self.collect(attName)
A wrapper to emulate Collection type in OCL (in python <multiple types possibles depending on the collection>).
6259903ebaa26c4b54d504f3
class QeTask(pipeBase.Task): <NEW_LINE> <INDENT> ConfigClass = QeConfig <NEW_LINE> _DefaultName = "QeTask" <NEW_LINE> @pipeBase.timeMethod <NEW_LINE> def run(self, sensor_id, qe_files, pd_ratio_file, mask_files, gains, bias_frame=None, medians_file=None, vendor_data=False, correction_image=None, mondiode_func=None): <NEW_LINE> <INDENT> imutils.check_temperatures(qe_files, self.config.temp_set_point_tol, setpoint=self.config.temp_set_point, warn_only=True) <NEW_LINE> qe_data = QE.QE_Data(verbose=self.config.verbose, logger=self.log, mondiode_func=mondiode_func) <NEW_LINE> if medians_file is None: <NEW_LINE> <INDENT> medians_file = os.path.join(self.config.output_dir, '%s_QE_medians.txt' % sensor_id) <NEW_LINE> qe_data.calculate_medians(qe_files, medians_file, mask_files=mask_files, bias_frame=bias_frame, overwrite=True, correction_image=correction_image) <NEW_LINE> <DEDENT> qe_data.read_medians(medians_file) <NEW_LINE> if vendor_data: <NEW_LINE> <INDENT> qe_data.incidentPower_e2v() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> qe_data.incidentPower(pd_ratio_file) <NEW_LINE> <DEDENT> qe_data.calculate_QE(gains, amps=imutils.allAmps(qe_files[0])) <NEW_LINE> fits_outfile = os.path.join(self.config.output_dir, '%s_QE.fits' % sensor_id) <NEW_LINE> qe_data.write_fits_tables(fits_outfile)
Task to compute QE curves from wavelength scan dataset
6259903e507cdc57c63a5fe7
class AzureAsyncOperationResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'error': {'key': 'error', 'type': 'Error'}, } <NEW_LINE> def __init__( self, *, status: Optional[Union[str, "NetworkOperationStatus"]] = None, error: Optional["Error"] = None, **kwargs ): <NEW_LINE> <INDENT> super(AzureAsyncOperationResult, self).__init__(**kwargs) <NEW_LINE> self.status = status <NEW_LINE> self.error = error
The response body contains the status of the specified asynchronous operation, indicating whether it has succeeded, is in progress, or has failed. Note that this status is distinct from the HTTP status code returned for the Get Operation Status operation itself. If the asynchronous operation succeeded, the response body includes the HTTP status code for the successful request. If the asynchronous operation failed, the response body includes the HTTP status code for the failed request and error information regarding the failure. :param status: Status of the Azure async operation. Possible values include: "InProgress", "Succeeded", "Failed". :type status: str or ~azure.mgmt.network.v2019_08_01.models.NetworkOperationStatus :param error: Details of the error occurred during specified asynchronous operation. :type error: ~azure.mgmt.network.v2019_08_01.models.Error
6259903ed4950a0f3b111766
class hello(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(hello, self).__init__() <NEW_LINE> pathToFile = 'C:/johannes/gdrive/scripts/sequence/maya/tools/exportNulls/ui' <NEW_LINE> qtWin = cmds.loadUI(uiFile=pathToFile) <NEW_LINE> if cmds.window(qtWin, exists=True): <NEW_LINE> <INDENT> cmds.deleteUI(qtWin) <NEW_LINE> <DEDENT> cmds.showWindow(qtWin)
docstring for hello
6259903e07d97122c4217eea
class VarGenerator(object): <NEW_LINE> <INDENT> def __init__(self, starting_vid=1): <NEW_LINE> <INDENT> self.vid = starting_vid <NEW_LINE> self.store = {} <NEW_LINE> <DEDENT> def new(self, sort, properties=None): <NEW_LINE> <INDENT> varstring = '{}{}'.format(sort, self.vid) <NEW_LINE> if properties is None: <NEW_LINE> <INDENT> properties = [] <NEW_LINE> <DEDENT> self.store[varstring] = properties <NEW_LINE> self.vid += 1 <NEW_LINE> return (varstring, properties)
Simple class to produce variables, incrementing the vid for each one.
6259903eac7a0e7691f73735
class MastodonAPI(): <NEW_LINE> <INDENT> def __init__(self, extractor, root="https://pawoo.net", access_token=("286462927198d0cf3e24683e91c8259a" "ac4367233064e0570ca18df2ac65b226")): <NEW_LINE> <INDENT> access_token = extractor.config("access-token", access_token) <NEW_LINE> self.session = extractor.session <NEW_LINE> self.session.headers["Authorization"] = "Bearer " + access_token <NEW_LINE> self.root = root <NEW_LINE> <DEDENT> def account_search(self, query, limit=40): <NEW_LINE> <INDENT> response = self.session.get( self.root + "/api/v1/accounts/search", params={"q": query, "limit": limit}, ) <NEW_LINE> return self._parse(response) <NEW_LINE> <DEDENT> def account_statuses(self, account_id): <NEW_LINE> <INDENT> url = "{}/api/v1/accounts/{}/statuses?only_media=1".format( self.root, account_id) <NEW_LINE> while url: <NEW_LINE> <INDENT> response = self.session.get(url) <NEW_LINE> yield from self._parse(response) <NEW_LINE> url = response.links.get("next", {}).get("url") <NEW_LINE> <DEDENT> <DEDENT> def status(self, status_id): <NEW_LINE> <INDENT> response = self.session.get( self.root + "/api/v1/statuses/" + status_id ) <NEW_LINE> return self._parse(response) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse(response): <NEW_LINE> <INDENT> if response.status_code == 404: <NEW_LINE> <INDENT> raise exception.NotFoundError() <NEW_LINE> <DEDENT> return response.json()
Minimal interface for the Mastodon API on pawoo.net https://github.com/tootsuite/mastodon https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md
6259903e379a373c97d9a275
class VUMeter(): <NEW_LINE> <INDENT> def __init__(self, parent, track, top, bottom, increments, vu_set, master = False): <NEW_LINE> <INDENT> self.frames = [0.0] * RMS_FRAMES <NEW_LINE> self.parent = parent <NEW_LINE> self.track = track <NEW_LINE> self.top = top <NEW_LINE> self.bottom = bottom <NEW_LINE> self.multiplier = self.calculate_multiplier(top, bottom, increments) <NEW_LINE> self.current_level = 0 <NEW_LINE> self.matrix = self.setup_matrix(vu_set, master) <NEW_LINE> self.master = master <NEW_LINE> <DEDENT> def observe(self): <NEW_LINE> <INDENT> new_frame = self.mean_peak() <NEW_LINE> self.store_frame(new_frame) <NEW_LINE> if self.master and new_frame >= 0.92: <NEW_LINE> <INDENT> self.parent._clipping = True <NEW_LINE> self.parent.clip_warning(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.master and self.parent._clipping: <NEW_LINE> <INDENT> self.parent._parent._session._change_offsets(0, 1) <NEW_LINE> self.parent._parent._session._change_offsets(0, -1) <NEW_LINE> self.parent._clipping = False <NEW_LINE> self.parent.clip_warning(False) <NEW_LINE> <DEDENT> if not self.parent._clipping: <NEW_LINE> <INDENT> if USE_RMS: <NEW_LINE> <INDENT> level = self.scale(self.rms(self.frames)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> level = self.scale(new_frame) <NEW_LINE> <DEDENT> if level != self.current_level: <NEW_LINE> <INDENT> self.current_level = level <NEW_LINE> if self.master: <NEW_LINE> <INDENT> self.parent.set_master_leds(level) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.parent.set_leds(self.matrix, level) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def store_frame(self, frame): <NEW_LINE> <INDENT> self.frames.pop(0) <NEW_LINE> self.frames.append(frame) <NEW_LINE> <DEDENT> def rms(self, frames): <NEW_LINE> <INDENT> return math.sqrt(sum(frame*frame for frame in frames)/len(frames)) <NEW_LINE> <DEDENT> def mean_peak(self): <NEW_LINE> <INDENT> return (self.track.output_meter_left + self.track.output_meter_right) / 2 <NEW_LINE> <DEDENT> def scale(self, value): <NEW_LINE> <INDENT> if (value > self.top): <NEW_LINE> <INDENT> value = self.top <NEW_LINE> <DEDENT> elif (value < self.bottom): <NEW_LINE> <INDENT> value = self.bottom <NEW_LINE> <DEDENT> value = value - self.bottom <NEW_LINE> value = value * self.multiplier <NEW_LINE> return int(round(value)) <NEW_LINE> <DEDENT> def calculate_multiplier(self, top, bottom, increments): <NEW_LINE> <INDENT> return (increments / (top - bottom)) <NEW_LINE> <DEDENT> def setup_matrix(self, vu_set, master): <NEW_LINE> <INDENT> matrix = [] <NEW_LINE> if master: <NEW_LINE> <INDENT> for scene in self.parent._parent._session._scenes: <NEW_LINE> <INDENT> matrix.append(scene._launch_button) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for index, column_index in enumerate(vu_set): <NEW_LINE> <INDENT> matrix.append([]) <NEW_LINE> column = matrix[index] <NEW_LINE> for row_index in range(CLIP_GRID_Y): <NEW_LINE> <INDENT> column.append(self.parent._parent._button_rows[row_index][column_index]) <NEW_LINE> <DEDENT> if master != True: <NEW_LINE> <INDENT> strip = self.parent._parent._mixer.channel_strip(column_index) <NEW_LINE> column.append(self.parent._parent._track_stop_buttons[column_index]) <NEW_LINE> column.extend([strip._select_button, strip._mute_button, strip._solo_button, strip._arm_button]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return matrix
represents a single VU to store RMS values etc in
6259903e50485f2cf55dc1cf
class SelectObject( ActionStep ): <NEW_LINE> <INDENT> def __init__( self, admin ): <NEW_LINE> <INDENT> self.admin = admin <NEW_LINE> self.query = admin.get_query() <NEW_LINE> <DEDENT> def gui_run( self, gui_context ): <NEW_LINE> <INDENT> select_dialog = SelectDialog( self.admin, self.query ) <NEW_LINE> select_dialog.exec_() <NEW_LINE> if select_dialog.object_getter: <NEW_LINE> <INDENT> return select_dialog.object_getter <NEW_LINE> <DEDENT> raise CancelRequest()
Select an object from a list :param admin: a :class:`camelot.admin.object_admin.ObjectAdmin` object
6259903e96565a6dacd2d8b1
class Student: <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, age): <NEW_LINE> <INDENT> self.age = age <NEW_LINE> self.last_name = last_name <NEW_LINE> self.first_name = first_name <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> def to_json(self, attrs=None): <NEW_LINE> <INDENT> if attrs is not None: <NEW_LINE> <INDENT> new_dict = {} <NEW_LINE> for obj in attrs: <NEW_LINE> <INDENT> if type(obj) is not str: <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> if obj in self.__dict__: <NEW_LINE> <INDENT> new_dict[obj] = self.__dict__[obj] <NEW_LINE> <DEDENT> <DEDENT> return new_dict <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> <DEDENT> def reload_from_json(self, json): <NEW_LINE> <INDENT> for key, value in json.items(): <NEW_LINE> <INDENT> self.__dict__[key] = value
new class
6259903e66673b3332c31645
@dataclasses.dataclass <NEW_LINE> class UserService: <NEW_LINE> <INDENT> _user_repository: IUserRepository <NEW_LINE> def exists(self, user: User) -> bool: <NEW_LINE> <INDENT> return self._user_repository.exists(user)
レポジトリを利用したドメインサービス Attributes: _user_repository (IUserRepository): ユーザレポジトリ Note: リスト5.6を利用するとドメインサービスが主体にならない
6259903ebe383301e0254a65
class DeletePrivilegeEventsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId")
DeletePrivilegeEvents返回参数结构体
6259903e8e71fb1e983bcd1b
@combiner([Uname, rht_release]) <NEW_LINE> class RedHatRelease(object): <NEW_LINE> <INDENT> def __init__(self, uname, rh_rel): <NEW_LINE> <INDENT> self.major = self.minor = self.rhel = None <NEW_LINE> if uname and uname.redhat_release.major != -1: <NEW_LINE> <INDENT> self.major = uname.redhat_release.major <NEW_LINE> self.minor = uname.redhat_release.minor <NEW_LINE> self.rhel = '{0}.{1}'.format(self.major, self.minor) <NEW_LINE> <DEDENT> elif rh_rel and rh_rel.is_rhel: <NEW_LINE> <INDENT> self.major = rh_rel.major <NEW_LINE> self.minor = rh_rel.minor <NEW_LINE> self.rhel = rh_rel.version <NEW_LINE> <DEDENT> if self.rhel is None: <NEW_LINE> <INDENT> raise SkipComponent("Unable to determine release.") <NEW_LINE> <DEDENT> self.rhel6 = self.rhel if self.major == 6 else None <NEW_LINE> self.rhel7 = self.rhel if self.major == 7 else None <NEW_LINE> self.rhel8 = self.rhel if self.major == 8 else None
Combiner class to check uname and redhat-release for RHEL major/minor version. Prefer uname to redhat-release. Attributes: major (int): The major RHEL version. minor (int): The minor RHEL version. rhel (str): The RHEL version, e.g. '7.2', '7.5-0.14' rhel6 (str): The RHEL version when it's RHEL6, otherwise None rhel7 (str): The RHEL version when it's RHEL7, otherwise None rhel8 (str): The RHEL version when it's RHEL8, otherwise None Raises: SkipComponent: If the version can't be determined even though a Uname or RedhatRelease was provided. Examples: >>> rh_rel.rhel '7.2' >>> rh_rel.major 7 >>> rh_rel.minor 2 >>> rh_rel.rhel6 is None True >>> rh_rel.rhel7 '7.2' >>> rh_rel.rhel8 is None True
6259903ea4f1c619b294f7ae
class MemcacheTestCase(BaseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(MemcacheTestCase, self).setUp() <NEW_LINE> self.clear_memcache() <NEW_LINE> <DEDENT> def clear_memcache(self): <NEW_LINE> <INDENT> memcache.flush_all() <NEW_LINE> <DEDENT> def assertMemcacheHits(self, hits): <NEW_LINE> <INDENT> self.assertEqual(memcache.get_stats()['hits'], hits) <NEW_LINE> <DEDENT> def assertMemcacheItems(self, items): <NEW_LINE> <INDENT> self.assertEqual(memcache.get_stats()['items'], items)
The ``MemcacheTestCase`` is a base test case that provides helper methods for dealing with App Engine's Memcache API. App Engine actually does most of the work for this by providing statistics through the Memcache API, as well as a simple method call to clear out the cache. The main feature of this is the ability to assert that items are in the cache, and to check the number of hits to the cache. More fine grained assertions are on the way, for now it's pretty general, only able to assert that things are in there and not which specific things. The following example illustrates a simple way of checking that Memcache is working as expected:: import unittest from gaetestbed import MemcacheTestCase from google.appengine.api import memcache class MyTestCase(MemcacheTestCase, unittest.TestCase): def test_memcache(self): # Nothing has been put in the cache, or retrieved from the cache self.assertMemcacheItems(0) self.assertMemcacheHits(0) # Add something to the cache memcache.set(key='test_item', value='test_content') # One item added, no hits yet self.assertMemcacheItems(1) self.assertMemcacheHits(0) # Grab it from the cache: item = memcache.get('test_item') # One item, one hit self.assertMemcacheItems(1) self.assertMemcacheHits(1) Just like the other test cases, each test should be a sandbox, meaning that the following assertions should pass if they are run at the start of every test case:: self.assertMemcacheItems(0) self.assertMemcacheHits(0)
6259903e23849d37ff852307
class PluginProxy(object): <NEW_LINE> <INDENT> def inject(self, data, url): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def setInjectionCode(self, code): <NEW_LINE> <INDENT> raise NotImplementedError
' Main class modules
6259903eec188e330fdf9ae7
class Detector(object): <NEW_LINE> <INDENT> def __init__(self, hPath): <NEW_LINE> <INDENT> self.haarDetector = HaarDetector(hPath) <NEW_LINE> self.meanshiftTracker = None <NEW_LINE> <DEDENT> def detectar(self,image): <NEW_LINE> <INDENT> if(self.meanshiftTracker == None): <NEW_LINE> <INDENT> seEncontro, x,y,w,h = self.haarDetector.detectar(image) <NEW_LINE> if seEncontro: <NEW_LINE> <INDENT> self.meanshiftTracker = MeanshiftTracker(image, (x,y,w,h)) <NEW_LINE> self.meanshiftTracker.identificarBlob() <NEW_LINE> return seEncontro, x,y,w,h <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> seEncontro,x,y,w,h = self.meanshiftTracker.rastrear(image) <NEW_LINE> if(seEncontro): <NEW_LINE> <INDENT> return seEncontro,x,y,w,h <NEW_LINE> <DEDENT> <DEDENT> self.meanshiftTracker = None <NEW_LINE> return False,0,0,0,0
Esta clase se encarga de encontrar y dar seguimiento determinado patron dado por el haar feature pasado en el constructor
6259903e1f5feb6acb163e41
class FakeFindQuery(object): <NEW_LINE> <INDENT> def __init__(self, pattern): <NEW_LINE> <INDENT> self.pattern = pattern
Fake Query object for testing puposes. We don't use the Graphite Query because it imports too many things from Django.
6259903e30c21e258be99a5b
class GroupSerializer(serializers.HyperlinkedModelSerializer): <NEW_LINE> <INDENT> users_count = serializers.IntegerField(read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Group <NEW_LINE> fields = ('url', 'name', 'users_count',) <NEW_LINE> extra_kwargs = {'url': {'view_name': 'api:group-detail', 'lookup_field': 'name'}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> instance = Group.objects.create(**validated_data) <NEW_LINE> instance.users_count = 0 <NEW_LINE> return instance
Group list serializer. users_count - field that represents ammount of users in group, passed from view as annotation.
6259903e29b78933be26a9ea
class Column(SerializationMixin, SA_Column): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Column, self).__init__(*args, **kwargs)
Represents a column in a database table. Inherits from :class:`sqlalchemy.schema.Column <sqlalchemy:sqlalchemy.schema.Column>`
6259903e004d5f362081f90b
class MDict(dict): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.missing = kwargs.pop("missing", 0) <NEW_LINE> dict.__init__(self, kwargs) <NEW_LINE> <DEDENT> def __missing__(self, _key): <NEW_LINE> <INDENT> return self.missing
A dictionionary which returns self.missing (default=0) for non existing value The advantage over collections.defaultdict(int), is that the key is not filled when a unknown key is requested.
6259903ee76e3b2f99fd9c5a
class KeychainException(Exception): <NEW_LINE> <INDENT> def __init__(self, value=""): <NEW_LINE> <INDENT> self.message_prefix = "Keychain Error:" <NEW_LINE> self.message = "%s %s" % (self.message_prefix, value) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message
The generic keychain exception class
6259903e82261d6c527307eb
class DfpClientTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.network_code = '12345' <NEW_LINE> self.application_name = 'application name' <NEW_LINE> self.oauth2_client = 'unused' <NEW_LINE> self.https_proxy = 'myproxy:443' <NEW_LINE> self.cache = None <NEW_LINE> self.version = sorted(googleads.dfp._SERVICE_MAP.keys())[-1] <NEW_LINE> self.dfp_client = googleads.dfp.DfpClient( self.oauth2_client, self.application_name, self.network_code, self.https_proxy, self.cache) <NEW_LINE> <DEDENT> def testLoadFromStorage(self): <NEW_LINE> <INDENT> with mock.patch('googleads.common.LoadFromStorage') as mock_load: <NEW_LINE> <INDENT> mock_load.return_value = { 'network_code': 'abcdEFghIjkLMOpqRs', 'oauth2_client': True, 'application_name': 'unit testing' } <NEW_LINE> self.assertIsInstance(googleads.dfp.DfpClient.LoadFromStorage(), googleads.dfp.DfpClient) <NEW_LINE> <DEDENT> <DEDENT> def testGetService_success(self): <NEW_LINE> <INDENT> service = googleads.dfp._SERVICE_MAP[self.version][0] <NEW_LINE> server = 'https://testing.test.com/' <NEW_LINE> https_proxy = {'https': self.https_proxy} <NEW_LINE> with mock.patch('suds.client.Client') as mock_client: <NEW_LINE> <INDENT> suds_service = self.dfp_client.GetService(service, self.version, server) <NEW_LINE> mock_client.assert_called_once_with( 'https://testing.test.com/apis/ads/publisher/%s/%s?wsdl' % (self.version, service), proxy=https_proxy, cache=self.cache, timeout=3600) <NEW_LINE> self.assertIsInstance(suds_service, googleads.common.SudsServiceProxy) <NEW_LINE> <DEDENT> self.dfp_client.https_proxy = None <NEW_LINE> with mock.patch('suds.client.Client') as mock_client: <NEW_LINE> <INDENT> suds_service = self.dfp_client.GetService(service, self.version) <NEW_LINE> mock_client.assert_called_once_with( 'https://ads.google.com/apis/ads/publisher/%s/%s?wsdl' % (self.version, service), proxy=None, cache=self.cache, timeout=3600) <NEW_LINE> self.assertFalse(mock_client.return_value.set_options.called) <NEW_LINE> self.assertIsInstance(suds_service, googleads.common.SudsServiceProxy) <NEW_LINE> <DEDENT> <DEDENT> def testGetService_badService(self): <NEW_LINE> <INDENT> with mock.patch('suds.client.Client') as mock_client: <NEW_LINE> <INDENT> mock_client.side_effect = suds.transport.TransportError('', '') <NEW_LINE> self.assertRaises( googleads.errors.GoogleAdsValueError, self.dfp_client.GetService, 'GYIVyievfyiovslf', self.version) <NEW_LINE> <DEDENT> <DEDENT> def testGetService_badVersion(self): <NEW_LINE> <INDENT> with mock.patch('suds.client.Client') as mock_client: <NEW_LINE> <INDENT> mock_client.side_effect = suds.transport.TransportError('', '') <NEW_LINE> self.assertRaises( googleads.errors.GoogleAdsValueError, self.dfp_client.GetService, 'CampaignService', '11111') <NEW_LINE> <DEDENT> <DEDENT> def testGetService_transportError(self): <NEW_LINE> <INDENT> service = googleads.dfp._SERVICE_MAP[self.version][0] <NEW_LINE> with mock.patch('suds.client.Client') as mock_client: <NEW_LINE> <INDENT> mock_client.side_effect = suds.transport.TransportError('', '') <NEW_LINE> self.assertRaises(suds.transport.TransportError, self.dfp_client.GetService, service, self.version)
Tests for the googleads.dfp.DfpClient class.
6259903e07d97122c4217eed
class ProjectStep: <NEW_LINE> <INDENT> def __init__(self, client, data): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def project_step_id(self): <NEW_LINE> <INDENT> return self._data.get("projectStepId") <NEW_LINE> <DEDENT> @property <NEW_LINE> def project_step_name(self): <NEW_LINE> <INDENT> return self._data.get("projectStepName") <NEW_LINE> <DEDENT> @property <NEW_LINE> def project_name(self): <NEW_LINE> <INDENT> return self._data.get("projectName") <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._data.get("type") <NEW_LINE> <DEDENT> def project(self): <NEW_LINE> <INDENT> name = self.project_name <NEW_LINE> projects = [p for p in self.client.projects if p.name == name] <NEW_LINE> if len(projects) == 0: <NEW_LINE> <INDENT> raise KeyError(f'No project found with name "{name}"') <NEW_LINE> <DEDENT> elif len(projects) > 1: <NEW_LINE> <INDENT> raise LookupError(f'Multiple projects found with name "{name}"') <NEW_LINE> <DEDENT> return projects[0] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( f"{self.__class__.__module__}." f"{self.__class__.__qualname__}(" f"project_step_id={self.project_step_id!r}, " f"project_step_name={self.project_step_name!r}, " f"project_name={self.project_name!r}, " f"type={self.type!r})" )
A step of a Tamr project. This is not a `BaseResource` because it has no API path and cannot be directly retrieved or modified. See https://docs.tamr.com/reference#retrieve-downstream-dataset-usage :param client: Delegate underlying API calls to this client. :type client: :class:`~tamr_unify_client.Client` :param data: The JSON body containing project step information. :type data: :py:class:`dict`
6259903e26238365f5fadda6
class _CommandWithNamespaceTypeInfo(_CommandBaseTypeInfo): <NEW_LINE> <INDENT> def __init__(self, command): <NEW_LINE> <INDENT> self._command = command <NEW_LINE> super(_CommandWithNamespaceTypeInfo, self).__init__(command) <NEW_LINE> <DEDENT> def get_constructor_method(self): <NEW_LINE> <INDENT> class_name = common.title_case(self._struct.name) <NEW_LINE> return MethodInfo(class_name, class_name, ['const NamespaceString nss'], explicit=True) <NEW_LINE> <DEDENT> def get_serializer_method(self): <NEW_LINE> <INDENT> return MethodInfo( common.title_case(self._struct.name), 'serialize', ['const BSONObj& commandPassthroughFields', 'BSONObjBuilder* builder'], 'void', const=True) <NEW_LINE> <DEDENT> def get_to_bson_method(self): <NEW_LINE> <INDENT> return MethodInfo( common.title_case(self._struct.name), 'toBSON', ['const BSONObj& commandPassthroughFields'], 'BSONObj', const=True) <NEW_LINE> <DEDENT> def get_deserializer_method(self): <NEW_LINE> <INDENT> return MethodInfo( common.title_case(self._struct.name), 'parseProtected', ['const IDLParserErrorContext& ctxt', 'const BSONObj& bsonObject'], 'void') <NEW_LINE> <DEDENT> def gen_getter_method(self, indented_writer): <NEW_LINE> <INDENT> indented_writer.write_line('const NamespaceString& getNamespace() const { return _nss; }') <NEW_LINE> <DEDENT> def gen_member(self, indented_writer): <NEW_LINE> <INDENT> indented_writer.write_line('NamespaceString _nss;') <NEW_LINE> <DEDENT> def gen_serializer(self, indented_writer): <NEW_LINE> <INDENT> indented_writer.write_line('invariant(!_nss.isEmpty());') <NEW_LINE> indented_writer.write_line('builder->append("%s", _nss.coll());' % (self._command.name)) <NEW_LINE> indented_writer.write_empty_line() <NEW_LINE> <DEDENT> def gen_namespace_check(self, indented_writer, db_name, element): <NEW_LINE> <INDENT> indented_writer.write_line('invariant(_nss.isEmpty());') <NEW_LINE> indented_writer.write_line('_nss = ctxt.parseNSCollectionRequired(%s, %s);' % (db_name, element))
Class for command code generation.
6259903e30dc7b76659a0a80
class CoreDownloader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = "CORE" <NEW_LINE> self.config = None <NEW_LINE> self.logger = None <NEW_LINE> self.downloader = None <NEW_LINE> self.uploader = None <NEW_LINE> self.db = None <NEW_LINE> self.new_version = None <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.logger = get_logger(self.name) <NEW_LINE> try: <NEW_LINE> <INDENT> self.prepare() <NEW_LINE> download_settings = self.config["download"] <NEW_LINE> url = self.get_url(download_settings) <NEW_LINE> if self.new_version: <NEW_LINE> <INDENT> self.logger.info(u"Начало загрузки архива") <NEW_LINE> self.downloader = ArchiveDownloader(url, download_settings, self.config["log_lvl"]) <NEW_LINE> success = self.downloader.download_archive() <NEW_LINE> if success: <NEW_LINE> <INDENT> self.logger.info(u'Начало обновления бд') <NEW_LINE> self.uploader = UploaderXML(max_size=self.config["max_part_size"], log_lvl=self.config["log_lvl"], **self.config["mysql"]) <NEW_LINE> self.uploader.upload_archive() <NEW_LINE> self.db.update_release_version(self.new_version) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except RequestException as e: <NEW_LINE> <INDENT> self.logger.exception(UnhandledRequestsError(e)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.exception(UnhandledError(e)) <NEW_LINE> <DEDENT> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> self.config = read_config() <NEW_LINE> set_log_lvl(self.config["log_lvl"], self.name) <NEW_LINE> self.db = DB(**self.config["mysql"]) <NEW_LINE> urllib3.disable_warnings() <NEW_LINE> <DEDENT> def get_url(self, settings): <NEW_LINE> <INDENT> new_info = requests.get(settings["url"]).json() <NEW_LINE> if self.db.get_version() < new_info["VersionId"]: <NEW_LINE> <INDENT> self.logger.info(u"Найдена новая версия") <NEW_LINE> self.new_version = int(new_info["VersionId"]) <NEW_LINE> return new_info[settings["method"]]
Core of this project
6259903e1d351010ab8f4d6b
class SymLinkNest: <NEW_LINE> <INDENT> def __init__(self, dirname): <NEW_LINE> <INDENT> self._dirname = dirname <NEW_LINE> <DEDENT> def add_provider(self, provider): <NEW_LINE> <INDENT> for filename in provider.executable_list: <NEW_LINE> <INDENT> self.add_executable(filename) <NEW_LINE> <DEDENT> <DEDENT> def add_executable(self, filename): <NEW_LINE> <INDENT> logger.debug( _("Adding executable %s to nest %s"), filename, self._dirname) <NEW_LINE> dest = os.path.join(self._dirname, os.path.basename(filename)) <NEW_LINE> try: <NEW_LINE> <INDENT> os.symlink(filename, dest) <NEW_LINE> <DEDENT> except OSError as exc: <NEW_LINE> <INDENT> logger.error( _("Unable to create symlink s%s -> %s: %r"), filename, dest, exc)
A class for setting up a control directory with symlinked executables
6259903ed10714528d69efb3
class NotifyWorker(BaseWorker): <NEW_LINE> <INDENT> def __init__(self, program, **kwargs): <NEW_LINE> <INDENT> super(NotifyWorker, self).__init__(program, **kwargs) <NEW_LINE> self.registered = [] <NEW_LINE> <DEDENT> def do(self, function): <NEW_LINE> <INDENT> def make_new_fun(fun): <NEW_LINE> <INDENT> @functools.wraps(fun) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return fun(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.debug('NotifyWorker.do error: {}'.format(e)) <NEW_LINE> <DEDENT> <DEDENT> return wrapper <NEW_LINE> <DEDENT> self.registered.append(make_new_fun(function)) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> notify = self.program.config.getboolean('notify.worker', 'notify') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> notify = defaults.DEFAULTS['notify.worker']['notify'] <NEW_LINE> <DEDENT> if not notify: <NEW_LINE> <INDENT> logging.debug('No notify. Exit thread.') <NEW_LINE> return <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> logging.debug('Getting a new story from queue') <NEW_LINE> story = self.program.state.watch.get() <NEW_LINE> self.program.state.watch.mark_notified(story) <NEW_LINE> logging.debug('Do notifications for story {}'.format(story['id'])) <NEW_LINE> for function in self.registered: <NEW_LINE> <INDENT> function(story) <NEW_LINE> <DEDENT> interval = 5 <NEW_LINE> logging.debug('NotifyWorker will sleep for {}s'.format(interval)) <NEW_LINE> time.sleep(interval)
Show notifications
6259903e23e79379d538d74e
class Title(models.Model): <NEW_LINE> <INDENT> id = models.IntegerField(primary_key=True) <NEW_LINE> name = models.CharField(max_length=100) <NEW_LINE> admin_notes = models.TextField(blank=True, null=True) <NEW_LINE> admin_published = models.BooleanField(default=True) <NEW_LINE> meta_created_datetime = models.DateTimeField(auto_now_add=True, verbose_name='Created') <NEW_LINE> meta_lastupdated_datetime = models.DateTimeField(auto_now=True, verbose_name='Last Updated') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Title for each custom user, e.g. Mr, Mrs, Dr, Prof.
6259903e0fa83653e46f6129
class Distros(collection.Collection): <NEW_LINE> <INDENT> def collection_type(self): <NEW_LINE> <INDENT> return "distro" <NEW_LINE> <DEDENT> def factory_produce(self, collection_mgr, item_dict): <NEW_LINE> <INDENT> new_distro = distro.Distro(collection_mgr) <NEW_LINE> new_distro.from_dict(item_dict) <NEW_LINE> return new_distro <NEW_LINE> <DEDENT> def remove(self, name, with_delete=True, with_sync=True, with_triggers=True, recursive=False, logger=None): <NEW_LINE> <INDENT> name = name.lower() <NEW_LINE> if not recursive: <NEW_LINE> <INDENT> for v in self.collection_mgr.profiles(): <NEW_LINE> <INDENT> if v.distro and v.distro.lower() == name: <NEW_LINE> <INDENT> raise CX(_("removal would orphan profile: %s") % v.name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> obj = self.find(name=name) <NEW_LINE> if obj is not None: <NEW_LINE> <INDENT> kernel = obj.kernel <NEW_LINE> if recursive: <NEW_LINE> <INDENT> kids = obj.get_children() <NEW_LINE> for k in kids: <NEW_LINE> <INDENT> self.collection_mgr.api.remove_profile(k.name, recursive=recursive, delete=with_delete, with_triggers=with_triggers, logger=logger) <NEW_LINE> <DEDENT> <DEDENT> if with_delete: <NEW_LINE> <INDENT> if with_triggers: <NEW_LINE> <INDENT> utils.run_triggers(self.collection_mgr.api, obj, "/var/lib/cobbler/triggers/delete/distro/pre/*", [], logger) <NEW_LINE> <DEDENT> if with_sync: <NEW_LINE> <INDENT> lite_sync = action_litesync.CobblerLiteSync(self.collection_mgr, logger=logger) <NEW_LINE> lite_sync.remove_single_distro(name) <NEW_LINE> <DEDENT> <DEDENT> self.lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> del self.listing[name] <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.lock.release() <NEW_LINE> <DEDENT> self.collection_mgr.serialize_delete(self, obj) <NEW_LINE> if with_delete: <NEW_LINE> <INDENT> if with_triggers: <NEW_LINE> <INDENT> utils.run_triggers(self.collection_mgr.api, obj, "/var/lib/cobbler/triggers/delete/distro/post/*", [], logger) <NEW_LINE> utils.run_triggers(self.collection_mgr.api, obj, "/var/lib/cobbler/triggers/change/*", [], logger) <NEW_LINE> <DEDENT> <DEDENT> settings = self.collection_mgr.settings() <NEW_LINE> possible_storage = glob.glob(settings.webdir + "/distro_mirror/*") <NEW_LINE> path = None <NEW_LINE> for storage in possible_storage: <NEW_LINE> <INDENT> if os.path.dirname(obj.kernel).find(storage) != -1: <NEW_LINE> <INDENT> path = storage <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> if with_delete and path is not None and os.path.exists(path) and kernel.find(settings.webdir) != -1: <NEW_LINE> <INDENT> found = False <NEW_LINE> distros = self.api.distros() <NEW_LINE> for d in distros: <NEW_LINE> <INDENT> if d.kernel.find(path) != -1: <NEW_LINE> <INDENT> found = True <NEW_LINE> <DEDENT> <DEDENT> if not found: <NEW_LINE> <INDENT> utils.rmtree(path)
A distro represents a network bootable matched set of kernels and initrd files.
6259903e6e29344779b018a2
class BaseResource(object): <NEW_LINE> <INDENT> HUMAN_ID = False <NEW_LINE> NAME_ATTR = "name" <NEW_LINE> get_details = True <NEW_LINE> _non_display = [] <NEW_LINE> def __init__(self, manager, info, key=None, loaded=False): <NEW_LINE> <INDENT> self._loaded = loaded <NEW_LINE> self.manager = manager <NEW_LINE> if key: <NEW_LINE> <INDENT> info = info[key] <NEW_LINE> <DEDENT> self._info = info <NEW_LINE> self._add_details(info) <NEW_LINE> <DEDENT> @property <NEW_LINE> def human_id(self): <NEW_LINE> <INDENT> if self.NAME_ATTR in self.__dict__ and self.HUMAN_ID: <NEW_LINE> <INDENT> return utils.slugify(getattr(self, self.NAME_ATTR)) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _add_details(self, info): <NEW_LINE> <INDENT> for (key, val) in info.iteritems(): <NEW_LINE> <INDENT> if isinstance(key, unicode): <NEW_LINE> <INDENT> key = key.encode(pyrax.get_encoding()) <NEW_LINE> <DEDENT> setattr(self, key, val) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> if not self.loaded: <NEW_LINE> <INDENT> self.get() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.__dict__[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError("'%s' object has no attribute " "'%s'." % (self.__class__, key)) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> reprkeys = sorted(key for key in self.__dict__.keys() if (key[0] != "_") and (key not in ("manager", "created", "updated")) and (key not in self._non_display)) <NEW_LINE> info = ", ".join("%s=%s" % (key, getattr(self, key)) for key in reprkeys) <NEW_LINE> return "<%s %s>" % (self.__class__.__name__, info) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> self.loaded = True <NEW_LINE> if not hasattr(self.manager, "get"): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not self.get_details: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> new = self.manager.get(self) <NEW_LINE> if new: <NEW_LINE> <INDENT> self._add_details(new._info) <NEW_LINE> <DEDENT> <DEDENT> reload = get <NEW_LINE> def delete(self): <NEW_LINE> <INDENT> self.loaded = True <NEW_LINE> if not hasattr(self.manager, "delete"): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.manager.delete(self) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if hasattr(self, "id") and hasattr(other, "id"): <NEW_LINE> <INDENT> return self.id == other.id <NEW_LINE> <DEDENT> return self._info == other._info <NEW_LINE> <DEDENT> def _get_loaded(self): <NEW_LINE> <INDENT> return self._loaded <NEW_LINE> <DEDENT> def _set_loaded(self, val): <NEW_LINE> <INDENT> self._loaded = val <NEW_LINE> <DEDENT> loaded = property(_get_loaded, _set_loaded)
A resource represents a particular instance of an object (server, flavor, etc). This is pretty much just a bag for attributes.
6259903e50485f2cf55dc1d3
class SmokeTests(SmokeTestCase): <NEW_LINE> <INDENT> authenticated_or_anonymous_urls = ["/public-data/"] <NEW_LINE> authenticated_urls = [ "/public-data/activate-1-overview/", "/public-data/activate-2-information/", "/public-data/toggle-sharing/", "/public-data/deactivate/", ] <NEW_LINE> post_only_urls = [ "/public-data/activate-3-quiz/", "/public-data/activate-4-complete/", ]
A simple GET test for all of the simple URLs in the site.
6259903e8a43f66fc4bf33e0
class _Masters(object): <NEW_LINE> <INDENT> def add_masters_name(self, masters_name): <NEW_LINE> <INDENT> stmt = iscconf.Statement(masters_name) <NEW_LINE> self.add_element(stmt) <NEW_LINE> <DEDENT> def add_master(self, master, port=None, key=None): <NEW_LINE> <INDENT> ip = ipaddr.IPAddress(master) <NEW_LINE> value = [] <NEW_LINE> if port: <NEW_LINE> <INDENT> value.extend(['port', port]) <NEW_LINE> <DEDENT> if key: <NEW_LINE> <INDENT> value.extend(['key', '"%s"' % key]) <NEW_LINE> <DEDENT> stmt = iscconf.Statement(ip, value=value) <NEW_LINE> self.add_element(stmt)
Abstract class for NamedMasters and Masters classes.
6259903e1d351010ab8f4d6d
class InvalidGroupId(Exception): <NEW_LINE> <INDENT> def __init__(self, value=''): <NEW_LINE> <INDENT> Exception.__init__(self, value)
...
6259903ec432627299fa4229
class DevicesActive(bb.Struct): <NEW_LINE> <INDENT> __slots__ = [ '_windows_value', '_macos_value', '_linux_value', '_ios_value', '_android_value', '_other_value', '_total_value', ] <NEW_LINE> _has_required_fields = True <NEW_LINE> def __init__(self, windows=None, macos=None, linux=None, ios=None, android=None, other=None, total=None): <NEW_LINE> <INDENT> self._windows_value = bb.NOT_SET <NEW_LINE> self._macos_value = bb.NOT_SET <NEW_LINE> self._linux_value = bb.NOT_SET <NEW_LINE> self._ios_value = bb.NOT_SET <NEW_LINE> self._android_value = bb.NOT_SET <NEW_LINE> self._other_value = bb.NOT_SET <NEW_LINE> self._total_value = bb.NOT_SET <NEW_LINE> if windows is not None: <NEW_LINE> <INDENT> self.windows = windows <NEW_LINE> <DEDENT> if macos is not None: <NEW_LINE> <INDENT> self.macos = macos <NEW_LINE> <DEDENT> if linux is not None: <NEW_LINE> <INDENT> self.linux = linux <NEW_LINE> <DEDENT> if ios is not None: <NEW_LINE> <INDENT> self.ios = ios <NEW_LINE> <DEDENT> if android is not None: <NEW_LINE> <INDENT> self.android = android <NEW_LINE> <DEDENT> if other is not None: <NEW_LINE> <INDENT> self.other = other <NEW_LINE> <DEDENT> if total is not None: <NEW_LINE> <INDENT> self.total = total <NEW_LINE> <DEDENT> <DEDENT> windows = bb.Attribute("windows") <NEW_LINE> macos = bb.Attribute("macos") <NEW_LINE> linux = bb.Attribute("linux") <NEW_LINE> ios = bb.Attribute("ios") <NEW_LINE> android = bb.Attribute("android") <NEW_LINE> other = bb.Attribute("other") <NEW_LINE> total = bb.Attribute("total") <NEW_LINE> def _process_custom_annotations(self, annotation_type, field_path, processor): <NEW_LINE> <INDENT> super(DevicesActive, self)._process_custom_annotations(annotation_type, field_path, processor)
Each of the items is an array of values, one value per day. The value is the number of devices active within a time window, ending with that day. If there is no data for a day, then the value will be None. :ivar team.DevicesActive.windows: Array of number of linked windows (desktop) clients with activity. :ivar team.DevicesActive.macos: Array of number of linked mac (desktop) clients with activity. :ivar team.DevicesActive.linux: Array of number of linked linus (desktop) clients with activity. :ivar team.DevicesActive.ios: Array of number of linked ios devices with activity. :ivar team.DevicesActive.android: Array of number of linked android devices with activity. :ivar team.DevicesActive.other: Array of number of other linked devices (blackberry, windows phone, etc) with activity. :ivar team.DevicesActive.total: Array of total number of linked clients with activity.
6259903ed99f1b3c44d068ed
class DegreeVsVariable(base.PlotterBase): <NEW_LINE> <INDENT> def plot(self): <NEW_LINE> <INDENT> fig, ax = plt.subplots() <NEW_LINE> variable = self.graph.settings.get('variable', 'size') <NEW_LINE> labels = { 'degree': "Stopnja", 'in-degree': "Vhodna stopnja", 'out-degree': "Izhodna stopnja", } <NEW_LINE> values = { 'degree': {}, 'in-degree': {}, 'out-degree': {}, } <NEW_LINE> for run in self.runs: <NEW_LINE> <INDENT> graph = run.get_graph(self.graph.settings['graph']) <NEW_LINE> degrees = graph.degree().values() <NEW_LINE> values['degree'][run.orig.settings[variable]] = (numpy.average(degrees), numpy.std(degrees)) <NEW_LINE> if hasattr(graph, 'in_degree'): <NEW_LINE> <INDENT> in_degrees = graph.in_degree().values() <NEW_LINE> out_degrees = graph.out_degree().values() <NEW_LINE> values['in-degree'][run.orig.settings[variable]] = (numpy.average(in_degrees), numpy.std(in_degrees)) <NEW_LINE> values['out-degree'][run.orig.settings[variable]] = (numpy.average(out_degrees), numpy.std(out_degrees)) <NEW_LINE> <DEDENT> <DEDENT> for typ in values: <NEW_LINE> <INDENT> X = sorted(values[typ].keys()) <NEW_LINE> Y = [values[typ][x][0] for x in X] <NEW_LINE> Yerr = [values[typ][x][1] for x in X] <NEW_LINE> if X: <NEW_LINE> <INDENT> ax.errorbar(X, Y, Yerr, marker='x', label=labels[typ]) <NEW_LINE> <DEDENT> <DEDENT> fit_function = self.graph.settings.get('fit', None) <NEW_LINE> if fit_function is not None: <NEW_LINE> <INDENT> X = sorted(values['degree'].keys()) <NEW_LINE> Y = [values['degree'][x][0] for x in X] <NEW_LINE> popt, pcov = scipy.optimize.curve_fit(fit_function, X, Y) <NEW_LINE> Fx = numpy.linspace(min(X), max(X) + 2*(X[-1] - X[-2]), 100) <NEW_LINE> Fy = [fit_function(x, *popt) for x in Fx] <NEW_LINE> ax.plot(Fx, Fy, linestyle='--', color='black', label=self.graph.settings.get('fit_label', 'Fit')) <NEW_LINE> <DEDENT> ax.set_xlabel(u'Število vozlišč v osnovni topologiji') <NEW_LINE> ax.set_ylabel(u'Stopnja vozlišča') <NEW_LINE> ax.grid() <NEW_LINE> if self.graph.settings.get('scale'): <NEW_LINE> <INDENT> ax.set_xscale(self.graph.settings.get('scale')) <NEW_LINE> <DEDENT> legend = ax.legend(loc='upper left') <NEW_LINE> if self.settings.GRAPH_TRANSPARENCY: <NEW_LINE> <INDENT> legend.get_frame().set_alpha(0.8) <NEW_LINE> <DEDENT> fig.savefig(self.get_figure_filename())
Draws graph degree in relation to some variable.
6259903e96565a6dacd2d8b3
class NoSuchObjectError(OperationalError, ValueError): <NEW_LINE> <INDENT> pass
Raised when an operation is attempted on a non-existent object.
6259903e66673b3332c31649
class HMDA(object): <NEW_LINE> <INDENT> def __init__(self, institution_zip_file, institution_csv_file, loan_zip_file, loans_csv_file): <NEW_LINE> <INDENT> self.inst_fp = institution_zip_file <NEW_LINE> self.inst_file = institution_csv_file <NEW_LINE> self.loans_fp = loan_zip_file <NEW_LINE> self.loans_file = loans_csv_file <NEW_LINE> self.ins_data = None <NEW_LINE> self.ln_data = None <NEW_LINE> self.resp_ref = None <NEW_LINE> self.full_file = None <NEW_LINE> self.state_list = None <NEW_LINE> <DEDENT> def hmda_init(self): <NEW_LINE> <INDENT> self.ins_data, self.ln_data = FileBuilder(self.inst_fp, self.inst_file, self.loans_fp, self.loans_file).file_builder() <NEW_LINE> self.ins_data = zip_code_fix(self.ins_data, 'Respondent_ZIP_Code') <NEW_LINE> self.ins_data = zip_code_fix(self.ins_data, 'Parent_ZIP_Code') <NEW_LINE> self.ln_data = convert_to_num(self.ln_data, 'Applicant_Income_000') <NEW_LINE> self.ln_data = convert_to_num(self.ln_data, 'FFIEC_Median_Family_Income') <NEW_LINE> self.ln_data = convert_to_num(self.ln_data, 'Number_of_Owner_Occupied_Units') <NEW_LINE> self.ln_data = convert_to_num(self.ln_data, 'Tract_to_MSA_MD_Income_Pct') <NEW_LINE> self.resp_ref = lookup_create(['Respondent_ID', 'Respondent_Name_TS', 'As_of_Year', 'Respondent_City_TS', 'Respondent_State_TS', 'Respondent_ZIP_Code', 'Parent_Name_TS', 'Parent_City_TS', 'Parent_State_TS', 'Parent_ZIP_Code'], self.ins_data) <NEW_LINE> self.full_file = pd.merge(self.ln_data, self.resp_ref, how='inner', on=( 'Respondent_ID', 'As_of_Year')) <NEW_LINE> return self.full_file <NEW_LINE> <DEDENT> def hmda_to_json(self, data, dest_dir, states=None, conventional_conforming=False): <NEW_LINE> <INDENT> directory_check_create(dest_dir) <NEW_LINE> self.state_list = state_verify(states, data) <NEW_LINE> for state in self.state_list: <NEW_LINE> <INDENT> state_file = data.loc[data['State'] == state] <NEW_LINE> if conventional_conforming: <NEW_LINE> <INDENT> state_file = conforming_filter(state_file) <NEW_LINE> <DEDENT> state_output = state_file.reset_index(drop=True).to_json(orient='records') <NEW_LINE> directory_check_create(dest_dir + '/' + state) <NEW_LINE> cleanup_old(dest_dir + '/' + state + '/' + state + '.json') <NEW_LINE> state_write = open(dest_dir + '/' + state + '/' + state + '.json', 'w') <NEW_LINE> json.dump(state_output, state_write) <NEW_LINE> state_write.close() <NEW_LINE> <DEDENT> print('Files created in %s for the states: %r' % (dest_dir, self.state_list)) <NEW_LINE> <DEDENT> def run_plots(self, data, dest_dir, c_filter=False): <NEW_LINE> <INDENT> county_income_plot(data, dest_dir, c_filter) <NEW_LINE> market_size(data, dest_dir, c_filter) <NEW_LINE> total_market(data, dest_dir, c_filter)
Main Class for generating file / formatting / saving JSON data from zipped .csv files
6259903ebe383301e0254a69
class Post(core_models.TimeStampedModel): <NEW_LINE> <INDENT> title = models.CharField(max_length=140) <NEW_LINE> description = models.TextField() <NEW_LINE> photo = models.ImageField(blank=True, upload_to='post_photos/') <NEW_LINE> author = models.ForeignKey( "authentication.User", related_name="author", on_delete=models.CASCADE) <NEW_LINE> is_published = models.BooleanField(default=False) <NEW_LINE> authorized_by = models.ForeignKey( "authentication.User", on_delete=models.SET_NULL, null=True, blank=True, default=None) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering: ['-updated_at'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title
Post Model Definition
6259903ea4f1c619b294f7b0
class NativeEventsManagementServiceStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.ListEvents = channel.unary_unary( '/dmi.NativeEventsManagementService/ListEvents', request_serializer=dmi_dot_hw__pb2.HardwareID.SerializeToString, response_deserializer=dmi_dot_hw__events__mgmt__service__pb2.ListEventsResponse.FromString, ) <NEW_LINE> self.UpdateEventsConfiguration = channel.unary_unary( '/dmi.NativeEventsManagementService/UpdateEventsConfiguration', request_serializer=dmi_dot_hw__events__mgmt__service__pb2.EventsConfigurationRequest.SerializeToString, response_deserializer=dmi_dot_hw__events__mgmt__service__pb2.EventsConfigurationResponse.FromString, ) <NEW_LINE> self.StreamEvents = channel.unary_stream( '/dmi.NativeEventsManagementService/StreamEvents', request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, response_deserializer=dmi_dot_hw__events__mgmt__service__pb2.Event.FromString, )
Missing associated documentation comment in .proto file.
6259903e30c21e258be99a5e
class MockMemcache(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.key = None <NEW_LINE> self.time = None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_stats(): <NEW_LINE> <INDENT> return [('127.0.0.1:11211 (1)', {})] <NEW_LINE> <DEDENT> def delete(self, key, time): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.time = time <NEW_LINE> return True
Mock of memcache
6259903eac7a0e7691f7373b
class OrderService(object): <NEW_LINE> <INDENT> def _create_order(self, paypal_payment_id, user_id, products, order_id, status): <NEW_LINE> <INDENT> order = Order({ 'paypal_payment_id': paypal_payment_id, 'user_id': user_id, 'created_at': datetime.now(), 'status': status }) <NEW_LINE> db.session.add(order) <NEW_LINE> db.session.commit() <NEW_LINE> if status != 'init': <NEW_LINE> <INDENT> order.parent_id = order_id <NEW_LINE> db.session.add(order) <NEW_LINE> db.session.commit() <NEW_LINE> return <NEW_LINE> <DEDENT> order.parent_id = order.id <NEW_LINE> db.session.add(order) <NEW_LINE> for item in products: <NEW_LINE> <INDENT> product_order = ProductOrder({ 'product_id': item['product'].id, 'quantity': item['quantity'], 'order_id': order.id }) <NEW_LINE> db.session.add(product_order) <NEW_LINE> <DEDENT> db.session.commit() <NEW_LINE> <DEDENT> def create_init_order(self, paypal_payment_id, user_id, products): <NEW_LINE> <INDENT> self._create_order(paypal_payment_id, user_id, products, None, 'init') <NEW_LINE> <DEDENT> def create_failed_order(self, paypal_payment_id, user_id, order_id): <NEW_LINE> <INDENT> self._create_order(paypal_payment_id, user_id, None, order_id, 'fail') <NEW_LINE> <DEDENT> def create_success_order(self, paypal_payment_id, user_id, order_id): <NEW_LINE> <INDENT> self._create_order(paypal_payment_id, user_id, None, order_id, 'success') <NEW_LINE> <DEDENT> def get_order_by_payment_id(self, payment_id): <NEW_LINE> <INDENT> return Order.query .filter_by(paypal_payment_id=payment_id) .first()
Database wrapper for the orders
6259903e50485f2cf55dc1d5
class Solution: <NEW_LINE> <INDENT> def coinChange(self, coins, amount): <NEW_LINE> <INDENT> dp = [sys.maxsize for _ in range(amount + 1)] <NEW_LINE> dp[0] = 0 <NEW_LINE> for amo in range(1, amount + 1): <NEW_LINE> <INDENT> for coin in coins: <NEW_LINE> <INDENT> if amo - coin >= 0: <NEW_LINE> <INDENT> dp[amo] = min(dp[amo], dp[amo - coin] + 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dp[amount] if dp[amount] != sys.maxsize else -1
@param coins: a list of integer @param amount: a total amount of money amount @return: the fewest number of coins that you need to make up
6259903e29b78933be26a9ec
class V4RoomMemberDemotedFromOwner(object): <NEW_LINE> <INDENT> openapi_types = { 'stream': 'V4Stream', 'affected_user': 'V4User' } <NEW_LINE> attribute_map = { 'stream': 'stream', 'affected_user': 'affectedUser' } <NEW_LINE> def __init__(self, stream=None, affected_user=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._stream = None <NEW_LINE> self._affected_user = None <NEW_LINE> self.discriminator = None <NEW_LINE> if stream is not None: <NEW_LINE> <INDENT> self.stream = stream <NEW_LINE> <DEDENT> if affected_user is not None: <NEW_LINE> <INDENT> self.affected_user = affected_user <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def stream(self): <NEW_LINE> <INDENT> return self._stream <NEW_LINE> <DEDENT> @stream.setter <NEW_LINE> def stream(self, stream): <NEW_LINE> <INDENT> self._stream = stream <NEW_LINE> <DEDENT> @property <NEW_LINE> def affected_user(self): <NEW_LINE> <INDENT> return self._affected_user <NEW_LINE> <DEDENT> @affected_user.setter <NEW_LINE> def affected_user(self, affected_user): <NEW_LINE> <INDENT> self._affected_user = affected_user <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V4RoomMemberDemotedFromOwner): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V4RoomMemberDemotedFromOwner): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259903e004d5f362081f90d
class SubscriptionAPI(Resource): <NEW_LINE> <INDENT> def get(self, sid): <NEW_LINE> <INDENT> subscription = get_object_or_404(Subscription, sid) <NEW_LINE> result = SubscriptionSchema().dump(subscription) <NEW_LINE> return jsonify(result.data)
Resource/routes for subscription endpoints
6259903e50485f2cf55dc1d6
class Part(object): <NEW_LINE> <INDENT> BOUNDARY = '---------------------------89322229629855' <NEW_LINE> CRLF = '\r\n' <NEW_LINE> CONTENT_TYPE = 'Content-Type' <NEW_LINE> CONTENT_DISPOSITION = 'Content-Disposition' <NEW_LINE> DEFAULT_CONTENT_TYPE = 'application/octet-stream' <NEW_LINE> def __init__(self, name, filename, body, headers): <NEW_LINE> <INDENT> self._headers = headers.copy() <NEW_LINE> self._name = name <NEW_LINE> self._filename = filename <NEW_LINE> self._body = body <NEW_LINE> if self._filename == None: <NEW_LINE> <INDENT> self._headers[Part.CONTENT_DISPOSITION] = ('form-data; name="%s"' % self._name) <NEW_LINE> self._headers.setdefault(Part.CONTENT_TYPE, Part.DEFAULT_CONTENT_TYPE) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._headers[Part.CONTENT_DISPOSITION] = ('form-data; name="%s"; filename="%s"' % (self._name, self._filename)) <NEW_LINE> self._headers.setdefault(Part.CONTENT_TYPE, mimetypes.guess_type(filename)[0] or Part.DEFAULT_CONTENT_TYPE) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> lines = [] <NEW_LINE> lines.append('--' + Part.BOUNDARY) <NEW_LINE> for (key, val) in self._headers.items(): <NEW_LINE> <INDENT> lines.append('%s: %s' % (key, val)) <NEW_LINE> <DEDENT> lines.append('') <NEW_LINE> lines.append(self._body) <NEW_LINE> return lines
Class holding a single part of the form. You should never need to use this class directly; instead, use the factory methods in Multipart: field and file.
6259903e26068e7796d4db99
class OffsetBaseIndexScale(interface.namedtypedtuple, interface.symbol_t): <NEW_LINE> <INDENT> _fields = ('offset','base','index','scale') <NEW_LINE> _types = (six.integer_types, (types.NoneType,register_t), (types.NoneType,register_t), six.integer_types) <NEW_LINE> @property <NEW_LINE> def __symbols__(self): <NEW_LINE> <INDENT> _, b, i, _ = self <NEW_LINE> if b is not None: yield b <NEW_LINE> if i is not None: yield i
A tuple containing an intel operand (offset, base, index, scale). Within the tuple, `base` and `index` are registers.
6259903e21bff66bcd723ebc
class __EntryIterator(object): <NEW_LINE> <INDENT> ITEMS_MODE = 0 <NEW_LINE> KEYS_MODE = 1 <NEW_LINE> VALUES_MODE = 2 <NEW_LINE> def __init__(self, entries, mode): <NEW_LINE> <INDENT> self.__mode = mode <NEW_LINE> self.__entries = entries <NEW_LINE> self.__next_index = 0 <NEW_LINE> self.__skip_unused() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.__next_index < len(self.__entries): <NEW_LINE> <INDENT> entry = self.__entries[self.__next_index] <NEW_LINE> self.__next_index += 1 <NEW_LINE> self.__skip_unused() <NEW_LINE> if self.__mode == self.ITEMS_MODE: <NEW_LINE> <INDENT> return entry <NEW_LINE> <DEDENT> elif self.__mode == self.KEYS_MODE: <NEW_LINE> <INDENT> return entry[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return entry[1] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> <DEDENT> def __skip_unused(self): <NEW_LINE> <INDENT> found = False <NEW_LINE> for i in range(self.__next_index, len(self.__entries)): <NEW_LINE> <INDENT> if self.__entries[i] != None: <NEW_LINE> <INDENT> self.__next_index = i <NEW_LINE> found = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not found: <NEW_LINE> <INDENT> self.__next_index = len(self.__entries)
Iterator class for visiting all entries in the collection.
6259903e30dc7b76659a0a84
class NotificationEventRequest: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.I64, 'lastEvent', None, None, ), (2, TType.I32, 'maxEvents', None, None, ), ) <NEW_LINE> def __init__(self, lastEvent=None, maxEvents=None,): <NEW_LINE> <INDENT> self.lastEvent = lastEvent <NEW_LINE> self.maxEvents = maxEvents <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I64: <NEW_LINE> <INDENT> self.lastEvent = iprot.readI64() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.maxEvents = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('NotificationEventRequest') <NEW_LINE> if self.lastEvent is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('lastEvent', TType.I64, 1) <NEW_LINE> oprot.writeI64(self.lastEvent) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.maxEvents is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('maxEvents', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.maxEvents) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.lastEvent is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field lastEvent is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.lastEvent) <NEW_LINE> value = (value * 31) ^ hash(self.maxEvents) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - lastEvent - maxEvents
6259903e16aa5153ce401740
class DeviceCategory(object): <NEW_LINE> <INDENT> def __init__(self, code: str, description: str, max_devices: Optional[int]): <NEW_LINE> <INDENT> self._code = code <NEW_LINE> self._description = description <NEW_LINE> self._max_devices = max_devices <NEW_LINE> <DEDENT> @property <NEW_LINE> def code(self) -> str: <NEW_LINE> <INDENT> return self._code <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self) -> str: <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_devices(self) -> int: <NEW_LINE> <INDENT> return self._max_devices <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "<{}: code={}, description={}, max_devices={}>". format(self.__class__.__name__, self._code, self._description, self._max_devices) <NEW_LINE> <DEDENT> def as_dict(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> return serializable(self)
Represents a category of devices.
6259903e3c8af77a43b68865
class TruncatedZipfDist(DiscreteDist): <NEW_LINE> <INDENT> def __init__(self, alpha=1.0, n=1000, seed=None): <NEW_LINE> <INDENT> if alpha <= 0: <NEW_LINE> <INDENT> raise ValueError("alpha must be positive") <NEW_LINE> <DEDENT> if n < 0: <NEW_LINE> <INDENT> raise ValueError("n must be positive") <NEW_LINE> <DEDENT> pdf = np.arange(1.0, n + 1.0) ** -alpha <NEW_LINE> pdf /= np.sum(pdf) <NEW_LINE> self._alpha = alpha <NEW_LINE> super().__init__(pdf, seed) <NEW_LINE> <DEDENT> @property <NEW_LINE> def alpha(self): <NEW_LINE> <INDENT> return self._alpha
Implements a truncated Zipf distribution, i.e. a Zipf distribution with a finite population, which can hence take values of alpha > 0.
6259903e23e79379d538d752
class NavButton(Styled): <NEW_LINE> <INDENT> _style = "plain" <NEW_LINE> def __init__(self, title, dest, sr_path=True, nocname=False, aliases=None, target="", use_params=False, css_class='', data=None): <NEW_LINE> <INDENT> aliases = aliases or [] <NEW_LINE> aliases = set(_force_unicode(a.rstrip('/')) for a in aliases) <NEW_LINE> if dest: <NEW_LINE> <INDENT> aliases.add(_force_unicode(dest.rstrip('/'))) <NEW_LINE> <DEDENT> self.title = title <NEW_LINE> self.dest = dest <NEW_LINE> self.selected = False <NEW_LINE> self.sr_path = sr_path <NEW_LINE> self.nocname = nocname <NEW_LINE> self.aliases = aliases <NEW_LINE> self.target = target <NEW_LINE> self.use_params = use_params <NEW_LINE> self.data = data <NEW_LINE> Styled.__init__(self, self._style, css_class=css_class) <NEW_LINE> <DEDENT> def build(self, base_path=''): <NEW_LINE> <INDENT> base_path = ("%s/%s/" % (base_path, self.dest)).replace('//', '/') <NEW_LINE> self.bare_path = _force_unicode(base_path.replace('//', '/')).lower() <NEW_LINE> self.bare_path = self.bare_path.rstrip('/') <NEW_LINE> self.base_path = base_path <NEW_LINE> if self.use_params: <NEW_LINE> <INDENT> base_path += query_string(dict(request.GET)) <NEW_LINE> <DEDENT> self.path = base_path.replace('//', '/') <NEW_LINE> <DEDENT> def is_selected(self): <NEW_LINE> <INDENT> stripped_path = _force_unicode(request.path.rstrip('/').lower()) <NEW_LINE> if stripped_path == self.bare_path: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> site_path = c.site.user_path.lower() + self.bare_path <NEW_LINE> if self.sr_path and stripped_path == site_path: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.bare_path and stripped_path.startswith(self.bare_path): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if stripped_path in self.aliases: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def selected_title(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def cachable_attrs(self): <NEW_LINE> <INDENT> return [ ('selected', self.selected), ('title', self.title), ('path', self.path), ('sr_path', self.sr_path), ('nocname', self.nocname), ('target', self.target), ('css_class', self.css_class), ('_id', self._id), ('data', self.data), ]
Smallest unit of site navigation. A button once constructed must also have its build() method called with the current path to set self.path. This step is done automatically if the button is passed to a NavMenu instance upon its construction.
6259903e24f1403a926861f6
class debounce_handler(object): <NEW_LINE> <INDENT> DEBOUNCE_SECONDS = 0.3 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.lastEcho = time.time() <NEW_LINE> <DEDENT> def on(self, client_address): <NEW_LINE> <INDENT> if self.debounce(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.act(client_address, True) <NEW_LINE> <DEDENT> def off(self, client_address): <NEW_LINE> <INDENT> if self.debounce(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.act(client_address, False) <NEW_LINE> <DEDENT> def act(self, client_address, state): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def debounce(self): <NEW_LINE> <INDENT> if (time.time() - self.lastEcho) < self.DEBOUNCE_SECONDS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> self.lastEcho = time.time() <NEW_LINE> return False
Use this handler to keep multiple Amazon Echo devices from reacting to the same voice command.
6259903e0a366e3fb87ddc39
class FailedMos(BaseObject): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> BaseObject.__init__(self, "FailedMos", "failedMos") <NEW_LINE> if kwargs: <NEW_LINE> <INDENT> for n, v in six.iteritems(kwargs): <NEW_LINE> <INDENT> self.attr_set(n, v)
This is FailedMos class.
6259903eac7a0e7691f7373d
class TestUsersEdit(BaseTestCase): <NEW_LINE> <INDENT> @request_context <NEW_LINE> def test_admin_can_edit_all_attributes(self): <NEW_LINE> <INDENT> UserPayload() .blend() .patch(self.user_1.id, user=self.admin)
Test Users edit
6259903e6e29344779b018a6
class TestQueryPreset(mediaBase.MediaBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> mediaBase.MediaBase.__init__(self) <NEW_LINE> self.pre = self.prefix + 'querypreset' <NEW_LINE> self.preset_name = self.pre <NEW_LINE> self.container = 'mp4' <NEW_LINE> self.client = media_client.MediaClient(media_config.config) <NEW_LINE> self.key = 'watermark.jpg' <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> resp = self.client.create_preset(self.preset_name, self.container, True) <NEW_LINE> nose.tools.assert_true(resp) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> result = self.client.list_presets() <NEW_LINE> for each_val in result.presets: <NEW_LINE> <INDENT> preset_name = each_val.preset_name <NEW_LINE> if(preset_name.startswith(self.pre)): <NEW_LINE> <INDENT> resp = self.client.delete_preset(preset_name) <NEW_LINE> nose.tools.assert_is_not_none(resp) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_query_preset_exist(self): <NEW_LINE> <INDENT> resp = self.client.get_preset(self.preset_name) <NEW_LINE> assert resp.state == 'ACTIVE' <NEW_LINE> assert resp.preset_name == self.preset_name <NEW_LINE> <DEDENT> def test_query_preset_deleted(self): <NEW_LINE> <INDENT> resp = self.client.delete_preset(self.preset_name) <NEW_LINE> nose.tools.assert_is_not_none(resp) <NEW_LINE> resp = self.client.get_preset(self.preset_name) <NEW_LINE> assert resp.state == 'INACTIVE' <NEW_LINE> assert resp.preset_name == self.preset_name <NEW_LINE> <DEDENT> def test_query_preset_not_exist(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resp = self.client.get_preset('not_exist_preset') <NEW_LINE> <DEDENT> except BceHttpClientError as e: <NEW_LINE> <INDENT> if isinstance(e.last_error, BceServerError): <NEW_LINE> <INDENT> assert e.last_error.message.startswith('The requested preset does not exist') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert True == False, 'not throw BceServerError' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_query_preset_param_none(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resp = self.client.get_preset(None) <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> assert e.message.startswith('arg "preset_name" should not be None') <NEW_LINE> <DEDENT> <DEDENT> def test_query_preset_param_empty(self): <NEW_LINE> <INDENT> with nose.tools.assert_raises_regexp(BceClientError, 'preset_name can\'t be empty string'): <NEW_LINE> <INDENT> resp = self.client.get_preset('')
test query preset
6259903e50485f2cf55dc1d7
class ICallforpaper(model.Schema): <NEW_LINE> <INDENT> title = schema.TextLine( title=_(safe_unicode('Call for paper title')), ) <NEW_LINE> description = schema.Text( title=_(safe_unicode('Call for paper summary')), required=False, ) <NEW_LINE> primary('details') <NEW_LINE> details = RichText( title=_(safe_unicode('Details')), description=_(safe_unicode('Details about the program')), required=True, ) <NEW_LINE> cfp_topics = schema.List(title=_(safe_unicode('Topics for the Call for Papers')), description=_( safe_unicode('Fill in the topics for conference talks and workshops. ' 'Use a new line for every value / topic.')), default=['Development', 'Documentation', 'Project-Administration'], value_type=schema.TextLine(), )
A call for paper for a conferences. A call for paper can contain incomming talks.
6259903e07f4c71912bb0686
class DatetimeWidget(BaseDatetimeWidget): <NEW_LINE> <INDENT> pickDate = True <NEW_LINE> pickTime = True <NEW_LINE> klass = 'datetime-widget'
Widget for Datetime fields.
6259903e50485f2cf55dc1d8
class TestRoutesApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = swagger_client.api.routes_api.RoutesApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_route_as_gpx(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_route_as_tcx(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_route_by_id(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_routes_by_athlete_id(self): <NEW_LINE> <INDENT> pass
RoutesApi unit test stubs
6259903ed164cc61758221ca
class Invitation(models.Model): <NEW_LINE> <INDENT> exam = models.ForeignKey(Exam, on_delete=models.CASCADE, blank=True) <NEW_LINE> email = models.EmailField() <NEW_LINE> date_created = models.DateTimeField(auto_now_add=True) <NEW_LINE> date_started = models.DateTimeField(blank=True, null=True) <NEW_LINE> date_ended = models.DateTimeField(blank=True, null=True) <NEW_LINE> date_expired = models.DateTimeField(blank=True, null=True) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_in_progress = models.BooleanField(default=False) <NEW_LINE> is_passed = models.BooleanField(default=False) <NEW_LINE> is_evaluated = models.BooleanField(blank=True, default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ['exam', 'email'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"{self.email}: {self.is_active}"
Invite to exam by email or direct user
6259903e07d97122c4217ef3
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.maxDiff = None <NEW_LINE> filename = 'selection01.xlsx' <NEW_LINE> test_dir = 'xlsxwriter/test/comparison/' <NEW_LINE> self.got_filename = test_dir + '_test_' + filename <NEW_LINE> self.exp_filename = test_dir + 'xlsx_files/' + filename <NEW_LINE> self.ignore_files = [] <NEW_LINE> self.ignore_elements = {} <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> worksheet.set_selection('B4:C5') <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual()
Test file created by XlsxWriter against a file created by Excel.
6259903e73bcbd0ca4bcb4df
class DisbiGetTableData(View): <NEW_LINE> <INDENT> experiment_meta_model = None <NEW_LINE> def get(self, request, exp_id_str): <NEW_LINE> <INDENT> response = {} <NEW_LINE> response['status'] = None <NEW_LINE> response['data'] = {} <NEW_LINE> response['err_msg'] = None <NEW_LINE> exp_ids = get_ids(exp_id_str) <NEW_LINE> requested_exps = self.experiment_meta_model.objects.filter(pk__in=exp_ids) <NEW_LINE> result = DataResult(requested_exps, self.experiment_meta_model) <NEW_LINE> table_data = result.get_or_create_base_table(fetch_as='namedtuple') <NEW_LINE> response['data']['columns'] = table_data[0]._fields <NEW_LINE> response['data']['tableData'] = [tuple(row) for row in table_data] <NEW_LINE> return JsonResponse(response)
View for initially getting the data for the datatable.
6259903e96565a6dacd2d8b5
class RecordingStopping(base_classes.Baseevents): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> base_classes.Baseevents.__init__(self) <NEW_LINE> self.name = "RecordingStopping"
A request to stop recording has been issued.
6259903e507cdc57c63a5ff1
class CosineAnnealingExtendLR(torch.optim.lr_scheduler._LRScheduler): <NEW_LINE> <INDENT> def __init__(self, optimizer, T_cosine_max, eta_min=0, last_epoch=-1): <NEW_LINE> <INDENT> self.eta_min = eta_min <NEW_LINE> self.T_cosine_max = T_cosine_max <NEW_LINE> super(CosineAnnealingExtendLR, self).__init__(optimizer, last_epoch) <NEW_LINE> <DEDENT> def get_lr(self): <NEW_LINE> <INDENT> if self.last_epoch <= self.T_cosine_max: <NEW_LINE> <INDENT> return [self.eta_min + (base_lr - self.eta_min) * (1 + math.cos(math.pi * self.last_epoch / self.T_cosine_max)) / 2 for base_lr in self.base_lrs] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [self.eta_min for _ in self.base_lrs]
Set the learning rate of each parameter group using a cosine annealing schedule, where :math:`\eta_{max}` is set to the initial lr and :math:`T_{cur}` is the number of epochs since the last restart in SGDR: .. math:: \eta_t = \eta_{min} + \frac{1}{2}(\eta_{max} - \eta_{min})(1 + \cos(\frac{T_{cur}}{T_{max}}\pi)) When last_epoch=-1, sets initial lr as lr. It has been proposed in `SGDR: Stochastic Gradient Descent with Warm Restarts`_. Note that this only implements the cosine annealing part of SGDR, and not the restarts. Args: optimizer (Optimizer): Wrapped optimizer. T_max (int): Maximum number of iterations. eta_min (float): Minimum learning rate. Default: 0. last_epoch (int): The index of last epoch. Default: -1. .. _SGDR\: Stochastic Gradient Descent with Warm Restarts: https://arxiv.org/abs/1608.03983
6259903e0a366e3fb87ddc3b
class FacebookMessage: <NEW_LINE> <INDENT> notification_message_max_len = 15 <NEW_LINE> def __init__(self, sent_by, location, time, content): <NEW_LINE> <INDENT> self.sent_by = sent_by <NEW_LINE> self.location = location <NEW_LINE> self.time = time <NEW_LINE> self.content = content <NEW_LINE> <DEDENT> def notification(self): <NEW_LINE> <INDENT> res_str = "{}\n{}\n{}\n".format(self.time, self.sent_by, self.location) <NEW_LINE> res_str += self.content if len(self.content) <= FacebookMessage.notification_message_max_len else self.content[:FacebookMessage.notification_message_max_len] + "..." <NEW_LINE> return res_str
페이스북 메시지 클래스
6259903e63f4b57ef008669f
class FixtureLib: <NEW_LINE> <INDENT> def assert_logger(self, logger): <NEW_LINE> <INDENT> if not isinstance(logger, logging.Logger): <NEW_LINE> <INDENT> raise TypeError("logger must be a Logger instance") <NEW_LINE> <DEDENT> <DEDENT> def close_loggers(self, handler): <NEW_LINE> <INDENT> return logging.flush.close_later(handler) <NEW_LINE> <DEDENT> def new_fixture_node_logger(self, fixture_class, job_num, node_name): <NEW_LINE> <INDENT> return logging.loggers.new_fixture_node_logger(fixture_class, job_num, node_name) <NEW_LINE> <DEDENT> def make_fixture(self, class_name, logger, job_num, *args, **kwargs): <NEW_LINE> <INDENT> return _builder.make_fixture(class_name, logger, job_num, *args, **kwargs) <NEW_LINE> <DEDENT> def mongod_program(self, logger, job_num, executable, process_kwargs, mongod_options): <NEW_LINE> <INDENT> return core.programs.mongod_program(logger, job_num, executable, process_kwargs, mongod_options) <NEW_LINE> <DEDENT> def mongos_program( self, logger, job_num, test_id=None, executable=None, process_kwargs=None, mongos_options=None): <NEW_LINE> <INDENT> return core.programs.mongos_program(logger, job_num, test_id, executable, process_kwargs, mongos_options) <NEW_LINE> <DEDENT> def generic_program(self, logger, args, job_num, test_id=None, process_kwargs=None, **kwargs): <NEW_LINE> <INDENT> return core.programs.generic_program(logger, args, job_num, test_id, process_kwargs, **kwargs) <NEW_LINE> <DEDENT> ServerFailure = errors.ServerFailure <NEW_LINE> def make_historic(self, obj): <NEW_LINE> <INDENT> return _make_historic(obj) <NEW_LINE> <DEDENT> def default_if_none(self, *values): <NEW_LINE> <INDENT> return utils.default_if_none(*values) <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return _FixtureConfig() <NEW_LINE> <DEDENT> def get_next_port(self, job_num): <NEW_LINE> <INDENT> return network.PortAllocator.next_fixture_port(job_num)
Class that exposes the resmokelib API that fixtures can use.
6259903e6e29344779b018a8
class TestValidTemplateMeta(MetaTestCaseClass): <NEW_LINE> <INDENT> def __new__(cls, name, bases, dct): <NEW_LINE> <INDENT> def test_method(site): <NEW_LINE> <INDENT> def test_template(self): <NEW_LINE> <INDENT> lang = site.lang <NEW_LINE> if lang not in keys: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> msg = i18n.twtranslate(lang, package, fallback=False) <NEW_LINE> if msg: <NEW_LINE> <INDENT> templates = extract_templates_and_params_regex_simple(msg) <NEW_LINE> self.assertIsInstance(templates, list) <NEW_LINE> self.assertGreater(len(templates), 0) <NEW_LINE> if site.code == 'simple': <NEW_LINE> <INDENT> raise unittest.SkipTest( "'simple' wiki has 'en' language code but " "missing template. Must be solved by the " "corresponding script.") <NEW_LINE> <DEDENT> title = templates[0][0] <NEW_LINE> page = pywikibot.Page(site, title, ns=10) <NEW_LINE> self.assertTrue(page.exists()) <NEW_LINE> <DEDENT> <DEDENT> return test_template <NEW_LINE> <DEDENT> if not i18n.messages_available(): <NEW_LINE> <INDENT> raise unittest.SkipTest("i18n messages package '%s' not available." % i18n._messages_package_name) <NEW_LINE> <DEDENT> site = pywikibot.Site(dct['code'], dct['family']) <NEW_LINE> codes = site.family.languages_by_size <NEW_LINE> del site <NEW_LINE> for package in PACKAGES: <NEW_LINE> <INDENT> keys = i18n.twget_keys(package) <NEW_LINE> for code in codes: <NEW_LINE> <INDENT> current_site = pywikibot.Site(code, dct['family']) <NEW_LINE> test_name = ("test_%s_%s" % (package, code)).replace('-', '_') <NEW_LINE> cls.add_method( dct, test_name, test_method(current_site), doc_suffix='{0} and language {1}'.format( package, code)) <NEW_LINE> <DEDENT> <DEDENT> return super(TestValidTemplateMeta, cls).__new__(cls, name, bases, dct)
Test meta class.
6259903e1f5feb6acb163e49
class VirtualCorpus(Corpus): <NEW_LINE> <INDENT> _virtual_works = [] <NEW_LINE> for name in dir(virtual): <NEW_LINE> <INDENT> className = getattr(virtual, name) <NEW_LINE> if callable(className): <NEW_LINE> <INDENT> obj = className() <NEW_LINE> if isinstance(obj, virtual.VirtualWork): <NEW_LINE> <INDENT> if obj.corpusPath is not None: <NEW_LINE> <INDENT> _virtual_works.append(obj) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def _cacheName(self): <NEW_LINE> <INDENT> return 'virtual' <NEW_LINE> <DEDENT> def getPaths( self, fileExtensions=None, expandExtensions=True, ): <NEW_LINE> <INDENT> fileExtensions = self._translateExtensions( fileExtensions=fileExtensions, expandExtensions=expandExtensions, ) <NEW_LINE> paths = [] <NEW_LINE> for obj in self._virtual_works: <NEW_LINE> <INDENT> if obj.corpusPath is not None: <NEW_LINE> <INDENT> for fileExtension in fileExtensions: <NEW_LINE> <INDENT> results = obj.getUrlByExt(fileExtension) <NEW_LINE> for result in results: <NEW_LINE> <INDENT> if result not in paths: <NEW_LINE> <INDENT> paths.append(result) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return paths <NEW_LINE> <DEDENT> def getWorkList( self, workName, movementNumber=None, fileExtensions=None, ): <NEW_LINE> <INDENT> if not common.isListLike(fileExtensions): <NEW_LINE> <INDENT> fileExtensions = [fileExtensions] <NEW_LINE> <DEDENT> for obj in VirtualCorpus._virtual_works: <NEW_LINE> <INDENT> if obj.corpusPath is not None and workName.lower() in obj.corpusPath.lower(): <NEW_LINE> <INDENT> return obj.getUrlByExt(fileExtensions) <NEW_LINE> <DEDENT> <DEDENT> return [] <NEW_LINE> <DEDENT> def search(self, query, field=None, fileExtensions=None): <NEW_LINE> <INDENT> from music21 import metadata <NEW_LINE> return metadata.MetadataBundle.fromVirtualCorpus(self.name).search( query, field=field, fileExtensions=fileExtensions, ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return 'virtual'
A model of the *virtual* corpus. >>> virtualCorpus = corpus.VirtualCorpus()
6259903e097d151d1a2c22bc
@inherit_doc <NEW_LINE> class VectorSizeHint( JavaTransformer, HasInputCol, HasHandleInvalid, JavaMLReadable, JavaMLWritable ): <NEW_LINE> <INDENT> size = Param( Params._dummy(), "size", "Size of vectors in column.", typeConverter=TypeConverters.toInt ) <NEW_LINE> handleInvalid = Param( Params._dummy(), "handleInvalid", "How to handle invalid vectors in inputCol. Invalid vectors include " "nulls and vectors with the wrong size. The options are `skip` (filter " "out rows with invalid vectors), `error` (throw an error) and " "`optimistic` (do not check the vector size, and keep all rows). " "`error` by default.", TypeConverters.toString, ) <NEW_LINE> @keyword_only <NEW_LINE> def __init__(self, *, inputCol=None, size=None, handleInvalid="error"): <NEW_LINE> <INDENT> super(VectorSizeHint, self).__init__() <NEW_LINE> self._java_obj = self._new_java_obj("org.apache.spark.ml.feature.VectorSizeHint", self.uid) <NEW_LINE> self._setDefault(handleInvalid="error") <NEW_LINE> self.setParams(**self._input_kwargs) <NEW_LINE> <DEDENT> @keyword_only <NEW_LINE> @since("2.3.0") <NEW_LINE> def setParams(self, *, inputCol=None, size=None, handleInvalid="error"): <NEW_LINE> <INDENT> kwargs = self._input_kwargs <NEW_LINE> return self._set(**kwargs) <NEW_LINE> <DEDENT> @since("2.3.0") <NEW_LINE> def getSize(self): <NEW_LINE> <INDENT> return self.getOrDefault(self.size) <NEW_LINE> <DEDENT> @since("2.3.0") <NEW_LINE> def setSize(self, value): <NEW_LINE> <INDENT> return self._set(size=value) <NEW_LINE> <DEDENT> def setInputCol(self, value): <NEW_LINE> <INDENT> return self._set(inputCol=value) <NEW_LINE> <DEDENT> def setHandleInvalid(self, value): <NEW_LINE> <INDENT> return self._set(handleInvalid=value)
A feature transformer that adds size information to the metadata of a vector column. VectorAssembler needs size information for its input columns and cannot be used on streaming dataframes without this metadata. .. versionadded:: 2.3.0 Notes ----- VectorSizeHint modifies `inputCol` to include size metadata and does not have an outputCol. Examples -------- >>> from pyspark.ml.linalg import Vectors >>> from pyspark.ml import Pipeline, PipelineModel >>> data = [(Vectors.dense([1., 2., 3.]), 4.)] >>> df = spark.createDataFrame(data, ["vector", "float"]) >>> >>> sizeHint = VectorSizeHint(inputCol="vector", size=3, handleInvalid="skip") >>> vecAssembler = VectorAssembler(inputCols=["vector", "float"], outputCol="assembled") >>> pipeline = Pipeline(stages=[sizeHint, vecAssembler]) >>> >>> pipelineModel = pipeline.fit(df) >>> pipelineModel.transform(df).head().assembled DenseVector([1.0, 2.0, 3.0, 4.0]) >>> vectorSizeHintPath = temp_path + "/vector-size-hint-pipeline" >>> pipelineModel.save(vectorSizeHintPath) >>> loadedPipeline = PipelineModel.load(vectorSizeHintPath) >>> loaded = loadedPipeline.transform(df).head().assembled >>> expected = pipelineModel.transform(df).head().assembled >>> loaded == expected True
6259903e6fece00bbacccc06
class Critter(object): <NEW_LINE> <INDENT> def __init__(self, name, hunger = 0, boredom = 0): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.hunger = hunger <NEW_LINE> self.boredom = boredom <NEW_LINE> <DEDENT> def __pass_time(self): <NEW_LINE> <INDENT> self.hunger += 1 <NEW_LINE> self.boredom += 1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> rep = "Critter object\n" <NEW_LINE> rep += "name: " + self.name + "\n" <NEW_LINE> return rep <NEW_LINE> <DEDENT> @property <NEW_LINE> def mood(self): <NEW_LINE> <INDENT> unhappiness = self.hunger + self.boredom <NEW_LINE> if unhappiness <5: <NEW_LINE> <INDENT> m = "happy" <NEW_LINE> <DEDENT> elif 5 <= unhappiness <= 10: <NEW_LINE> <INDENT> m = "okay" <NEW_LINE> <DEDENT> elif 11 <= unhappiness <= 15: <NEW_LINE> <INDENT> m = "frustrated" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m = "mad" <NEW_LINE> <DEDENT> return m <NEW_LINE> <DEDENT> def talk(self): <NEW_LINE> <INDENT> print("I'm", self.name, "and I feel", self.mood, "now.\n") <NEW_LINE> self.__pass_time() <NEW_LINE> <DEDENT> def eat(self, food = 4): <NEW_LINE> <INDENT> print("Brruppp. Thank you.") <NEW_LINE> self.hunger -= food <NEW_LINE> if self.hunger < 0: <NEW_LINE> <INDENT> self.hunger - 0 <NEW_LINE> <DEDENT> self.__pass_time() <NEW_LINE> <DEDENT> def play(self, fun = 4): <NEW_LINE> <INDENT> print("Wheee!") <NEW_LINE> self.boredom -= fun <NEW_LINE> if self.boredom < 0: <NEW_LINE> <INDENT> self.boredom = 0 <NEW_LINE> <DEDENT> self.__pass_time() <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> crit_name = input("What do you want to name your critter: ") <NEW_LINE> crit = Critter(crit_name) <NEW_LINE> print(crit)
A virtual pet
6259903e07d97122c4217ef5
class EndpointAuthorization(object): <NEW_LINE> <INDENT> deserialized_types = { 'object_type': 'str' } <NEW_LINE> attribute_map = { 'object_type': 'type' } <NEW_LINE> supports_multiple_types = False <NEW_LINE> discriminator_value_class_map = { 'AWS_IAM': 'ask_smapi_model.v0.development_events.subscriber.endpoint_aws_authorization.EndpointAwsAuthorization' } <NEW_LINE> json_discriminator_key = "type" <NEW_LINE> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def __init__(self, object_type=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.object_type = object_type <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_real_child_model(cls, data): <NEW_LINE> <INDENT> discriminator_value = data[cls.json_discriminator_key] <NEW_LINE> return cls.discriminator_value_class_map.get(discriminator_value) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, EndpointAuthorization): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Authorization information to be able to publish notification to specified endpoint. :param object_type: :type object_type: (optional) str .. note:: This is an abstract class. Use the following mapping, to figure out the model class to be instantiated, that sets ``type`` variable. | AWS_IAM: :py:class:`ask_smapi_model.v0.development_events.subscriber.endpoint_aws_authorization.EndpointAwsAuthorization`
6259903e21bff66bcd723ec0
class AutoInitModelMixin(object): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> @declared_attr <NEW_LINE> def __tablename__(cls): <NEW_LINE> <INDENT> return cls.__name__.lower() <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> for attr in (a for a in dir(self) if not a.startswith('_')): <NEW_LINE> <INDENT> attr_obj = getattr(self, attr) <NEW_LINE> if isinstance(attr_obj, db.Column): <NEW_LINE> <INDENT> if attr in kwargs: <NEW_LINE> <INDENT> setattr(self, attr, kwargs[attr]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if hasattr(attr_obj, 'default'): <NEW_LINE> <INDENT> if callable(attr_obj.default): <NEW_LINE> <INDENT> setattr(self, attr, attr_obj.default()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, attr, attr_obj.default)
Mixin for populating models' columns automatically (no need to define an __init__ method) and set the default value if any. Also sets the model's id and __tablename__ automatically.
6259903e30dc7b76659a0a88
class Users(UserMixin, db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> erp_user_id = db.Column(db.Integer, unique=True, nullable=False) <NEW_LINE> erp_user_name = db.Column(db.String(50), nullable=False) <NEW_LINE> tm_email_id = db.Column(db.String(50), unique=True, nullable=False) <NEW_LINE> tm_password = db.Column(db.String(80), nullable=False) <NEW_LINE> tm_user_role = db.Column(db.String(10), nullable=False, default="developer") <NEW_LINE> tm_latest_task = db.Column(custom_date) <NEW_LINE> def is_authenticated(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_active(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_anonymous(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return self.id <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.erp_user_name
This class is used for User table
6259903ed10714528d69efb7