Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
5,700
def validate_value(self, value): if self.readonly: raise ValidationError(self.record, "Cannot set readonly field ".format(self.name)) if value not in (None, self._unset): if self.supported_types and not isinstance(value, tuple(self.supported_types)): raise ValidationError(self.record, "Field expects one of {}, got instead".format( self.name, .join([repr(t.__name__) for t in self.supported_types]), type(value).__name__) )
Validate value is an acceptable type during set_python operation
5,701
def _GetNumberOfSeconds(self, fat_date_time): day_of_month = (fat_date_time & 0x1f) month = ((fat_date_time >> 5) & 0x0f) year = (fat_date_time >> 9) & 0x7f days_per_month = self._GetDaysPerMonth(year, month) if day_of_month < 1 or day_of_month > days_per_month: raise ValueError() number_of_days = self._GetDayOfYear(1980 + year, month, day_of_month) number_of_days -= 1 for past_year in range(0, year): number_of_days += self._GetNumberOfDaysInYear(past_year) fat_date_time >>= 16 seconds = (fat_date_time & 0x1f) * 2 minutes = (fat_date_time >> 5) & 0x3f hours = (fat_date_time >> 11) & 0x1f if hours not in range(0, 24): raise ValueError() if minutes not in range(0, 60): raise ValueError() if seconds not in range(0, 60): raise ValueError() number_of_seconds = (((hours * 60) + minutes) * 60) + seconds number_of_seconds += number_of_days * definitions.SECONDS_PER_DAY return number_of_seconds
Retrieves the number of seconds from a FAT date time. Args: fat_date_time (int): FAT date time. Returns: int: number of seconds since January 1, 1980 00:00:00. Raises: ValueError: if the month, day of month, hours, minutes or seconds value is out of bounds.
5,702
def render_metadata(**kwargs): html = service_logo = kwargs.get(, None) if service_logo: html += .format(service_logo) service_name = kwargs.get(, None) if service_name: html += .format(service_name) timestamp = kwargs.get(, None) if timestamp: html += timestamp_link = kwargs.get(, None) if timestamp_link: html += .format( timestamp_link=timestamp_link, timestamp=timestamp ) else: html += timestamp html += html +=
Unstrict template block for rendering metadata: <div class="metadata"> <img class="metadata-logo" src="{service_logo}"> <p class="metadata-name">{service_name}</p> <p class="metadata-timestamp"> <a href="{timestamp_link}">{timestamp}</a> </p> </div>
5,703
def compute_consistency_score(returns_test, preds): returns_test_cum = cum_returns(returns_test, starting_value=1.) cum_preds = np.cumprod(preds + 1, 1) q = [sp.stats.percentileofscore(cum_preds[:, i], returns_test_cum.iloc[i], kind=) for i in range(len(returns_test_cum))] return 100 - np.abs(50 - np.mean(q)) / .5
Compute Bayesian consistency score. Parameters ---------- returns_test : pd.Series Observed cumulative returns. preds : numpy.array Multiple (simulated) cumulative returns. Returns ------- Consistency score Score from 100 (returns_test perfectly on the median line of the Bayesian cone spanned by preds) to 0 (returns_test completely outside of Bayesian cone.)
5,704
def start_end(data, num_start=250, num_end=100, full_output=False): if num_start < 0: num_start = 0 if num_end < 0: num_end = 0 if data.shape[0] < (num_start + num_end): raise ValueError( + ) mask = np.ones(shape=data.shape[0],dtype=bool) mask[:num_start] = False if num_end > 0: mask[-num_end:] = False gated_data = data[mask] if full_output: StartEndGateOutput = collections.namedtuple( , [, ]) return StartEndGateOutput(gated_data=gated_data, mask=mask) else: return gated_data
Gate out first and last events. Parameters ---------- data : FCSData or numpy array NxD flow cytometry data where N is the number of events and D is the number of parameters (aka channels). num_start, num_end : int, optional Number of events to gate out from beginning and end of `data`. Ignored if less than 0. full_output : bool, optional Flag specifying to return additional outputs. If true, the outputs are given as a namedtuple. Returns ------- gated_data : FCSData or numpy array Gated flow cytometry data of the same format as `data`. mask : numpy array of bool, only if ``full_output==True`` Boolean gate mask used to gate data such that ``gated_data = data[mask]``. Raises ------ ValueError If the number of events to discard is greater than the total number of events in `data`.
5,705
def assoc(self, sitecol, assoc_dist, mode): assert mode in , mode dic = {} discarded = [] for sid, lon, lat in zip(sitecol.sids, sitecol.lons, sitecol.lats): obj, distance = self.get_closest(lon, lat) if assoc_dist is None: dic[sid] = obj elif distance <= assoc_dist: dic[sid] = obj elif mode == : dic[sid] = obj logging.warning( , obj[], obj[], int(distance), sid, lon, lat) elif mode == : discarded.append(obj) elif mode == : raise SiteAssociationError( % (assoc_dist, lon, lat)) if not dic: raise SiteAssociationError( % assoc_dist) return (sitecol.filtered(dic), numpy.array([dic[sid] for sid in sorted(dic)]), discarded)
:param sitecol: a (filtered) site collection :param assoc_dist: the maximum distance for association :param mode: 'strict', 'warn' or 'filter' :returns: filtered site collection, filtered objects, discarded
5,706
def put(self, robj, w=None, dw=None, pw=None, return_body=None, if_none_match=None, timeout=None): raise NotImplementedError
Stores an object.
5,707
def get_blueprint(service_brokers: Union[List[ServiceBroker], ServiceBroker], broker_credentials: Union[None, List[BrokerCredentials], BrokerCredentials], logger: logging.Logger) -> Blueprint: openbroker = Blueprint(, __name__) service_brokers = ensure_list(service_brokers) logger.debug("Apply print_request filter for debugging") openbroker.before_request(print_request) if DISABLE_VERSION_CHECK: logger.warning( "Minimum API version is not checked, this can cause illegal contracts between service broker and platform!" ) else: logger.debug("Apply check_version filter for version %s" % str(MIN_VERSION)) openbroker.before_request(check_version) logger.debug("Apply check_originating_identity filter") openbroker.before_request(check_originating_identity) if broker_credentials is not None: broker_credentials = ensure_list(broker_credentials) logger.debug("Apply check_auth filter with {} credentials".format(len(broker_credentials))) openbroker.before_request(get_auth_filter(broker_credentials)) def get_broker_by_id(service_id: str): for service in service_brokers: if service.service_id() == service_id: return service raise KeyError(.format(service_id)) def add_service_id_to_async_response(response, service_id: str): if response.is_async: if response.operation is None: response.operation = service_id else: response.operation = .join((service_id, response.operation)) def extract_authorization_username(request: Request): if request.authorization is not None: return request.authorization.username else: return None @openbroker.errorhandler(Exception) def error_handler(e): logger.exception(e) return to_json_response(ErrorResponse( description=str(e) )), HTTPStatus.INTERNAL_SERVER_ERROR @openbroker.errorhandler(NotImplementedError) def error_handler(e): logger.exception(e) return to_json_response(ErrorResponse( description=str(e) )), HTTPStatus.NOT_IMPLEMENTED @openbroker.route("/v2/catalog", methods=[]) def catalog(): return to_json_response(CatalogResponse(list(s.catalog() for s in service_brokers))) @openbroker.route("/v2/service_instances/<instance_id>", methods=[]) @requires_application_json def provision(instance_id): try: accepts_incomplete = == request.args.get("accepts_incomplete", ) provision_details = ProvisionDetails(**json.loads(request.data)) provision_details.originating_identity = request.originating_identity provision_details.authorization_username = extract_authorization_username(request) broker = get_broker_by_id(provision_details.service_id) if not broker.check_plan_id(provision_details.plan_id): raise TypeError() except (TypeError, KeyError, JSONDecodeError) as e: logger.exception(e) return to_json_response(ErrorResponse(description=str(e))), HTTPStatus.BAD_REQUEST try: result = broker.provision(instance_id, provision_details, accepts_incomplete) add_service_id_to_async_response(result, broker.service_id()) except errors.ErrInstanceAlreadyExists as e: logger.exception(e) return to_json_response(EmptyResponse()), HTTPStatus.CONFLICT except errors.ErrInvalidParameters as e: return to_json_response(ErrorResponse(, str(e))), HTTPStatus.BAD_REQUEST except errors.ErrAsyncRequired as e: logger.exception(e) return to_json_response(ErrorResponse( error="AsyncRequired", description="This service plan requires client support for asynchronous service operations." )), HTTPStatus.UNPROCESSABLE_ENTITY if result.state == ProvisionState.IS_ASYNC: return to_json_response(ProvisioningResponse(result.dashboard_url, result.operation)), HTTPStatus.ACCEPTED elif result.state == ProvisionState.IDENTICAL_ALREADY_EXISTS: return to_json_response(ProvisioningResponse(result.dashboard_url, result.operation)), HTTPStatus.OK elif result.state == ProvisionState.SUCCESSFUL_CREATED: return to_json_response(ProvisioningResponse(result.dashboard_url, result.operation)), HTTPStatus.CREATED else: raise errors.ServiceException() @openbroker.route("/v2/service_instances/<instance_id>", methods=[]) @requires_application_json def update(instance_id): try: accepts_incomplete = == request.args.get("accepts_incomplete", ) update_details = UpdateDetails(**json.loads(request.data)) update_details.originating_identity = request.originating_identity update_details.authorization_username = extract_authorization_username(request) broker = get_broker_by_id(update_details.service_id) if not broker.check_plan_id(update_details.plan_id): raise TypeError() except (TypeError, KeyError, JSONDecodeError) as e: logger.exception(e) return to_json_response(ErrorResponse(description=str(e))), HTTPStatus.BAD_REQUEST try: result = broker.update(instance_id, update_details, accepts_incomplete) add_service_id_to_async_response(result, broker.service_id()) except errors.ErrInvalidParameters as e: return to_json_response(ErrorResponse(, str(e))), HTTPStatus.BAD_REQUEST except errors.ErrAsyncRequired as e: logger.exception(e) return to_json_response(ErrorResponse( error="AsyncRequired", description="This service plan requires client support for asynchronous service operations." )), HTTPStatus.UNPROCESSABLE_ENTITY if result.is_async: return to_json_response(UpdateResponse(result.operation, result.dashboard_url)), HTTPStatus.ACCEPTED else: return to_json_response(UpdateResponse(None, result.dashboard_url)), HTTPStatus.OK @openbroker.route("/v2/service_instances/<instance_id>/service_bindings/<binding_id>", methods=[]) @requires_application_json def bind(instance_id, binding_id): try: binding_details = BindDetails(**json.loads(request.data)) binding_details.originating_identity = request.originating_identity binding_details.authorization_username = extract_authorization_username(request) broker = get_broker_by_id(binding_details.service_id) if not broker.check_plan_id(binding_details.plan_id): raise TypeError() except (TypeError, KeyError, JSONDecodeError) as e: logger.exception(e) return to_json_response(ErrorResponse(description=str(e))), HTTPStatus.BAD_REQUEST try: result = broker.bind(instance_id, binding_id, binding_details) except errors.ErrBindingAlreadyExists as e: logger.exception(e) return to_json_response(EmptyResponse()), HTTPStatus.CONFLICT except errors.ErrAppGuidNotProvided as e: logger.exception(e) return to_json_response(ErrorResponse( error="RequiresApp", description="This service supports generation of credentials through binding an application only." )), HTTPStatus.UNPROCESSABLE_ENTITY response = BindResponse( credentials=result.credentials, syslog_drain_url=result.syslog_drain_url, route_service_url=result.route_service_url, volume_mounts=result.volume_mounts ) if result.state == BindState.SUCCESSFUL_BOUND: return to_json_response(response), HTTPStatus.CREATED elif result.state == BindState.IDENTICAL_ALREADY_EXISTS: return to_json_response(response), HTTPStatus.OK else: raise errors.ServiceException() @openbroker.route("/v2/service_instances/<instance_id>/service_bindings/<binding_id>", methods=[]) def unbind(instance_id, binding_id): try: plan_id = request.args["plan_id"] service_id = request.args["service_id"] unbind_details = UnbindDetails(plan_id, service_id) unbind_details.originating_identity = request.originating_identity unbind_details.authorization_username = extract_authorization_username(request) broker = get_broker_by_id(unbind_details.service_id) if not broker.check_plan_id(unbind_details.plan_id): raise TypeError() except (TypeError, KeyError) as e: logger.exception(e) return to_json_response(ErrorResponse(description=str(e))), HTTPStatus.BAD_REQUEST try: broker.unbind(instance_id, binding_id, unbind_details) except errors.ErrBindingDoesNotExist as e: logger.exception(e) return to_json_response(EmptyResponse()), HTTPStatus.GONE return to_json_response(EmptyResponse()), HTTPStatus.OK @openbroker.route("/v2/service_instances/<instance_id>", methods=[]) def deprovision(instance_id): try: plan_id = request.args["plan_id"] service_id = request.args["service_id"] accepts_incomplete = == request.args.get("accepts_incomplete", ) deprovision_details = DeprovisionDetails(plan_id, service_id) deprovision_details.originating_identity = request.originating_identity deprovision_details.authorization_username = extract_authorization_username(request) broker = get_broker_by_id(deprovision_details.service_id) if not broker.check_plan_id(deprovision_details.plan_id): raise TypeError() except (TypeError, KeyError) as e: logger.exception(e) return to_json_response(ErrorResponse(description=str(e))), HTTPStatus.BAD_REQUEST try: result = broker.deprovision(instance_id, deprovision_details, accepts_incomplete) add_service_id_to_async_response(result, broker.service_id()) except errors.ErrInstanceDoesNotExist as e: logger.exception(e) return to_json_response(EmptyResponse()), HTTPStatus.GONE except errors.ErrAsyncRequired as e: logger.exception(e) return to_json_response(ErrorResponse( error="AsyncRequired", description="This service plan requires client support for asynchronous service operations." )), HTTPStatus.UNPROCESSABLE_ENTITY if result.is_async: return to_json_response(DeprovisionResponse(result.operation)), HTTPStatus.ACCEPTED else: return to_json_response(EmptyResponse()), HTTPStatus.OK @openbroker.route("/v2/service_instances/<instance_id>/last_operation", methods=[]) def last_operation(instance_id): operation_data = request.args.get("operation", None) data = operation_data.split(, maxsplit=1) service_id = data[0] if len(data) == 2: operation_data = data[1] else: operation_data = None try: broker = get_broker_by_id(service_id) except KeyError as e: logger.exception(e) return to_json_response(ErrorResponse(description=str(e))), HTTPStatus.BAD_REQUEST result = broker.last_operation(instance_id, operation_data) return to_json_response(LastOperationResponse(result.state, result.description)), HTTPStatus.OK return openbroker
Returns the blueprint with service broker api. :param service_brokers: Services that this broker exposes :param broker_credentials: Optional Usernames and passwords that will be required to communicate with service broker :param logger: Used for api logs. This will not influence Flasks logging behavior. :return: Blueprint to register with Flask app instance
5,708
def findall(self, string, pos=0, endpos=sys.maxint): matchlist = [] state = _State(string, pos, endpos, self.flags) while state.start <= state.end: state.reset() state.string_position = state.start if not state.search(self._code): break match = SRE_Match(self, state) if self.groups == 0 or self.groups == 1: item = match.group(self.groups) else: item = match.groups("") matchlist.append(item) if state.string_position == state.start: state.start += 1 else: state.start = state.string_position return matchlist
Return a list of all non-overlapping matches of pattern in string.
5,709
def rapidfire(self, max_nlaunch=-1, max_loops=1, sleep_time=5): num_launched, do_exit, launched = 0, False, [] for count in range(max_loops): if do_exit: break if count > 0: time.sleep(sleep_time) tasks = self.fetch_tasks_to_run() do_exit = True break self.flow.pickle_dump() return num_launched
Keeps submitting `Tasks` until we are out of jobs or no job is ready to run. Args: max_nlaunch: Maximum number of launches. default: no limit. max_loops: Maximum number of loops sleep_time: seconds to sleep between rapidfire loop iterations Returns: The number of tasks launched.
5,710
def Fsphere(q, R): return 4 * np.pi / q ** 3 * (np.sin(q * R) - q * R * np.cos(q * R))
Scattering form-factor amplitude of a sphere normalized to F(q=0)=V Inputs: ------- ``q``: independent variable ``R``: sphere radius Formula: -------- ``4*pi/q^3 * (sin(qR) - qR*cos(qR))``
5,711
def find_checker(func: CallableT) -> Optional[CallableT]: contract_checker = None for a_wrapper in _walk_decorator_stack(func): if hasattr(a_wrapper, "__preconditions__") or hasattr(a_wrapper, "__postconditions__"): contract_checker = a_wrapper return contract_checker
Iterate through the decorator stack till we find the contract checker.
5,712
def GetBoundingRectangles(self) -> list: floats = self.textRange.GetBoundingRectangles() rects = [] for i in range(len(floats) // 4): rect = Rect(int(floats[i * 4]), int(floats[i * 4 + 1]), int(floats[i * 4]) + int(floats[i * 4 + 2]), int(floats[i * 4 + 1]) + int(floats[i * 4 + 3])) rects.append(rect) return rects
Call IUIAutomationTextRange::GetBoundingRectangles. textAttributeId: int, a value in class `TextAttributeId`. Return list, a list of `Rect`. bounding rectangles for each fully or partially visible line of text in a text range.. Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextrange-getboundingrectangles for rect in textRange.GetBoundingRectangles(): print(rect.left, rect.top, rect.right, rect.bottom, rect.width(), rect.height(), rect.xcenter(), rect.ycenter())
5,713
def backend(entry): return % ( PROTOCOL, Site.objects.get_current().domain, reverse(, args=[base36(entry.pk)]))
Default URL shortener backend for Zinnia.
5,714
def check(self, instance): self.log.debug("Running instance: %s", instance) custom_tags = instance.get(, []) if not instance or self.PROC_NAME not in instance: raise GUnicornCheckError("instance must specify: %s" % self.PROC_NAME) proc_name = instance.get(self.PROC_NAME) master_procs = self._get_master_proc_by_name(proc_name, custom_tags) worker_procs = self._get_workers_from_procs(master_procs) working, idle = self._count_workers(worker_procs) msg = "%s working and %s idle workers for %s" % (working, idle, proc_name) status = AgentCheck.CRITICAL if working == 0 and idle == 0 else AgentCheck.OK tags = [ + proc_name] + custom_tags self.service_check(self.SVC_NAME, status, tags=tags, message=msg) self.log.debug("instance %s procs - working:%s idle:%s" % (proc_name, working, idle)) self.gauge("gunicorn.workers", working, tags + self.WORKING_TAGS) self.gauge("gunicorn.workers", idle, tags + self.IDLE_TAGS)
Collect metrics for the given gunicorn instance.
5,715
async def delete(self, query, *, dc=None): query_id = extract_attr(query, keys=["ID"]) response = await self._api.delete("/v1/query", query_id, params={"dc": dc}) return response.status == 200
Delete existing prepared query Parameters: query (ObjectID): Query ID dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. Results: bool: ``True`` on success
5,716
def _iter_info(self, niter, level=logging.INFO): max_mis = self.iter_mis[niter - 1] msg = .format(niter, max_mis) logger.info(msg)
Log iteration number and mismatch Parameters ---------- level logging level Returns ------- None
5,717
def remove(self, state_element, recursive=True, force=False, destroy=True): if isinstance(state_element, State): return self.remove_state(state_element.state_id, recursive=recursive, force=force, destroy=destroy) elif isinstance(state_element, Transition): return self.remove_transition(state_element.transition_id, destroy=destroy) elif isinstance(state_element, DataFlow): return self.remove_data_flow(state_element.data_flow_id, destroy=destroy) elif isinstance(state_element, ScopedVariable): return self.remove_scoped_variable(state_element.data_port_id, destroy=destroy) else: super(ContainerState, self).remove(state_element, force=force, destroy=destroy)
Remove item from state :param StateElement state_element: State or state element to be removed :param bool recursive: Only applies to removal of state and decides whether the removal should be called recursively on all child states :param bool force: if the removal should be forced without checking constraints :param bool destroy: a flag that signals that the state element will be fully removed and disassembled
5,718
def xml_to_region(xmlstr): xmldoc = minidom.parseString(xmlstr) region = ServiceBusRegion() for desc in _MinidomXmlToObject.get_children_from_path(xmldoc, , , ): node_value = _MinidomXmlToObject.get_first_child_node_value(desc, ) if node_value is not None: region.code = node_value node_value = _MinidomXmlToObject.get_first_child_node_value(desc, ) if node_value is not None: region.fullname = node_value return region
Converts xml response to service bus region The xml format for region: <entry> <id>uuid:157c311f-081f-4b4a-a0ba-a8f990ffd2a3;id=1756759</id> <title type="text"></title> <updated>2013-04-10T18:25:29Z</updated> <content type="application/xml"> <RegionCodeDescription xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect" xmlns:i="http://www.w3.org/2001/XMLSchema-instance"> <Code>East Asia</Code> <FullName>East Asia</FullName> </RegionCodeDescription> </content> </entry>
5,719
def f_set(self, *args, **kwargs): for idx, arg in enumerate(args): valstr = self._translate_key(idx) self.f_set_single(valstr, arg) for key, arg in kwargs.items(): self.f_set_single(key, arg)
Sets annotations Items in args are added as `annotation` and `annotation_X` where 'X' is the position in args for following arguments.
5,720
def read_file(self, file_name, section=None): file_name, section = self.parse_file_name_and_section(file_name, section) if not os.path.isfile(file_name): raise SettingsFileNotFoundError(file_name) parser = self.make_parser() with open(file_name) as fp: parser.read_file(fp) settings = OrderedDict() if parser.has_section(section): section_dict = parser[section] self.section_found_while_reading = True else: section_dict = parser.defaults().copy() extends = section_dict.get() if extends: extends = self.decode_value(extends) extends, extends_section = self.parse_file_name_and_section( extends, extender=file_name, extender_section=section) settings.update(self.read_file(extends, extends_section)) settings.update(section_dict) if not self.section_found_while_reading: raise SettingsFileSectionNotFoundError(section) return settings
Read settings from specified ``section`` of config file.
5,721
def get_host_mac(name=None, allow_array=False, **api_opts): data = get_host(name=name, **api_opts) if data and in data: l = [] for a in data[]: if in a: l.append(a[]) if allow_array: return l if l: return l[0] return None
Get mac address from host record. Use `allow_array` to return possible multiple values. CLI Example: .. code-block:: bash salt-call infoblox.get_host_mac host=localhost.domain.com
5,722
def format(self): name = self._primary.value[0] if self.surname: if name: name += name += self.surname if self._primary.value[2]: if name: name += name += self._primary.value[2] return name
Format name for output. :return: Formatted name representation.
5,723
def __convert_to_df(a, val_col=None, group_col=None, val_id=None, group_id=None): if not group_col: group_col = if not val_col: val_col = if isinstance(a, DataFrame): x = a.copy() if not {group_col, val_col}.issubset(a.columns): raise ValueError() return x, val_col, group_col elif isinstance(a, list) or (isinstance(a, np.ndarray) and not a.shape.count(2)): grps_len = map(len, a) grps = list(it.chain(*[[i+1] * l for i, l in enumerate(grps_len)])) vals = list(it.chain(*a)) return DataFrame({val_col: vals, group_col: grps}), val_col, group_col elif isinstance(a, np.ndarray): if not(all([val_id, group_id])): if np.argmax(a.shape): a = a.T ax = [np.unique(a[:, 0]).size, np.unique(a[:, 1]).size] if np.asscalar(np.diff(ax)): __val_col = np.argmax(ax) __group_col = np.argmin(ax) else: raise ValueError() cols = {__val_col: val_col, __group_col: group_col} else: cols = {val_id: val_col, group_id: group_col} cols_vals = dict(sorted(cols.items())).values() return DataFrame(a, columns=cols_vals), val_col, group_col
Hidden helper method to create a DataFrame with input data for further processing. Parameters ---------- a : array_like or pandas DataFrame object An array, any object exposing the array interface or a pandas DataFrame. Array must be two-dimensional. Second dimension may vary, i.e. groups may have different lengths. val_col : str, optional Name of a DataFrame column that contains dependent variable values (test or response variable). Values should have a non-nominal scale. Must be specified if `a` is a pandas DataFrame object. group_col : str, optional Name of a DataFrame column that contains independent variable values (grouping or predictor variable). Values should have a nominal scale (categorical). Must be specified if `a` is a pandas DataFrame object. val_id : int, optional Index of a column that contains dependent variable values (test or response variable). Should be specified if a NumPy ndarray is used as an input. It will be inferred from data, if not specified. group_id : int, optional Index of a column that contains independent variable values (grouping or predictor variable). Should be specified if a NumPy ndarray is used as an input. It will be inferred from data, if not specified. Returns ------- x : pandas DataFrame DataFrame with input data, `val_col` column contains numerical values and `group_col` column contains categorical values. val_col : str Name of a DataFrame column that contains dependent variable values (test or response variable). group_col : str Name of a DataFrame column that contains independent variable values (grouping or predictor variable). Notes ----- Inferrence algorithm for determining `val_id` and `group_id` args is rather simple, so it is better to specify them explicitly to prevent errors.
5,724
def remove_prefix(self, id): try: p = Prefix.get(int(id)) p.remove() except NipapError, e: return json.dumps({: 1, : e.args, : type(e).__name__}) return json.dumps(p, cls=NipapJSONEncoder)
Remove a prefix.
5,725
def setPage(self, pageId, page): page.setParent(self) if self.property("useShadow") is not False: effect = QtGui.QGraphicsDropShadowEffect(page) effect.setColor(QtGui.QColor()) effect.setBlurRadius(50) effect.setOffset(0, 0) page.setGraphicsEffect(effect) self._pages[pageId] = page if self._startId == -1: self._startId = pageId
Sets the page and id for the given page vs. auto-constructing it. This will allow the developer to create a custom order for IDs. :param pageId | <int> page | <projexui.widgets.xoverlaywizard.XOverlayWizardPage>
5,726
def _processFailedSuccessors(self, jobGraph): if jobGraph.jobStoreID in self.toilState.servicesIssued: logger.debug("Telling job: %s to terminate its services due to successor failure", jobGraph.jobStoreID) self.serviceManager.killServices(self.toilState.servicesIssued[jobGraph.jobStoreID], error=True) elif jobGraph.jobStoreID in self.toilState.successorCounts: logger.debug("Job %s with ID: %s with failed successors still has successor jobs running", jobGraph, jobGraph.jobStoreID) elif jobGraph.checkpoint is not None and jobGraph.remainingRetryCount > 1: self.issueJob(JobNode.fromJobGraph(jobGraph)) else: logger.debug("Job %s is being processed as completely failed", jobGraph.jobStoreID) self.processTotallyFailedJob(jobGraph)
Some of the jobs successors failed then either fail the job or restart it if it has retries left and is a checkpoint job
5,727
def save(self, dolist=0): quoted = not dolist fields = 7*[""] fields[0] = self.name fields[1] = self.type fields[2] = self.mode fields[3] = self.toString(self.value,quoted=quoted) if self.choice is not None: schoice = list(map(self.toString, self.choice)) schoice.insert(0,) schoice.append() fields[4] = repr(.join(schoice)) elif self.min not in [None,INDEF]: fields[4] = self.toString(self.min,quoted=quoted) if self.max not in [None,INDEF]: fields[5] = self.toString(self.max,quoted=quoted) if self.prompt: if quoted: sprompt = repr(self.prompt) else: sprompt = self.prompt sprompt = sprompt.replace(r, ) sprompt = sprompt.replace(r, ) fields[6] = sprompt for i in [6,5,4]: if fields[i] != "": break del fields[i] if dolist: return fields else: return .join(fields)
Return .par format string for this parameter If dolist is set, returns fields as a list of strings. Default is to return a single string appropriate for writing to a file.
5,728
def In(sigOrVal, iterable): res = None for i in iterable: i = toHVal(i) if res is None: res = sigOrVal._eq(i) else: res = res | sigOrVal._eq(i) assert res is not None, "Parameter iterable is empty" return res
Hdl convertible in operator, check if any of items in "iterable" equals "sigOrVal"
5,729
def set_to_current(self, ): cur = self.get_current_file() if cur is not None: self.set_selection(cur) else: self.init_selection()
Set the selection to the currently open one :returns: None :rtype: None :raises: None
5,730
def run(self): while True: try: cursor = JSON_CLIENT.json_client[][].find( {: {: self.last_timestamp}}) except TypeError: pass else: cursor.add_option(2) cursor.add_option(8) cursor.add_option(32) self._retry() for doc in cursor: self.last_timestamp = doc[] if doc[] in self.receivers: self._run_namespace(doc) time.sleep(1)
main control loop for thread
5,731
def evaluate_script(self): editor = self.get_current_editor() if not editor: return False LOGGER.debug("> Evaluating content.") if self.evaluate_code(foundations.strings.to_string(editor.toPlainText().toUtf8())): self.ui_refresh.emit() return True
Evaluates current **Script_Editor_tabWidget** Widget tab Model editor content into the interactive console. :return: Method success. :rtype: bool
5,732
def set_display_mode(self, zoom,layout=): if(zoom== or zoom== or zoom== or zoom== or not isinstance(zoom,basestring)): self.zoom_mode=zoom else: self.error(+zoom) if(layout== or layout== or layout== or layout==): self.layout_mode=layout else: self.error(+layout)
Set display mode in viewer The "zoom" argument may be 'fullpage', 'fullwidth', 'real', 'default', or a number, interpreted as a percentage.
5,733
def serialize(self, now=None): created = self.created if self.created is not None else now el = etree.Element(utils.lxmlns("mets") + self.subsection, ID=self.id_string) if created: el.set("CREATED", created) status = self.get_status() if status: el.set("STATUS", status) if self.contents: el.append(self.contents.serialize()) return el
Serialize this SubSection and all children to lxml Element and return it. :param str now: Default value for CREATED if none set :return: dmdSec/techMD/rightsMD/sourceMD/digiprovMD Element with all children
5,734
def vcs_init(self): VCS(os.path.join(self.outdir, self.name), self.pkg_data)
Initialize VCS repository.
5,735
def reduce_resource_name_to_task(res_name): if res_name[:3].lower() != "res": return res_name res_name = res_name[3:].lower() while res_name[-1].isdigit(): res_name = res_name[:-1] return res_name
Assuming that the convention of naming resources associated with tasks as res[TASK][number], reduces such resource names to just the name of the task. This ensures that multiple copies of the same resource are treated the same. Resource names of different formats will be left untouched.
5,736
def configure_slack_logger( self, slack_webhook=None, log_level=, log_format=ReportingFormats.SLACK_PRINT.value, custom_args= ): slack_webhook = self.config.get_option( , , None, slack_webhook ) log_level = self.config.get_option( , , None, log_level ) slack_handler = HackySlackHandler( slack_webhook ) self._configure_common( , log_level, log_format, , slack_handler, custom_args=custom_args )
logger for sending messages to Slack. Easy way to alert humans of issues Note: Will try to overwrite minimum log level to enable requested log_level Will warn and not attach hipchat logger if missing webhook key Learn more about webhooks: https://api.slack.com/docs/message-attachments Args: slack_webhook (str): slack bot webhook (full URL) log_level (str): desired log level for handle https://docs.python.org/3/library/logging.html#logging-levels log_format (str): format for logging messages https://docs.python.org/3/library/logging.html#logrecord-attributes custom_args (str): special ID to include in messages
5,737
def setsebool(boolean, value, persist=False): * if persist: cmd = .format(boolean, value) else: cmd = .format(boolean, value) return not __salt__[](cmd, python_shell=False)
Set the value for a boolean CLI Example: .. code-block:: bash salt '*' selinux.setsebool virt_use_usb off
5,738
def _put(self, url, data={}): r = requests.put(self._api_prefix + url, data=json.dumps(data), headers=self.headers, auth=self.auth, allow_redirects=False, ) return self._action(r)
Wrapper around request.put() to use the API prefix. Returns a JSON response.
5,739
def create_pie_chart(self, snapshot, filename=): try: from pylab import figure, title, pie, axes, savefig from pylab import sum as pylab_sum except ImportError: return self.nopylab_msg % ("pie_chart") title("Snapshot (%s) Memory Distribution" % (snapshot.desc)) figure(figsize=(8,8)) axes([0.1, 0.1, 0.8, 0.8]) pie(sizelist, labels=classlist) savefig(filename, dpi=50) return self.chart_tag % (self.relative_path(filename))
Create a pie chart that depicts the distribution of the allocated memory for a given `snapshot`. The chart is saved to `filename`.
5,740
def get_repository_form_for_create(self, repository_record_types=None): if not self._can(): raise PermissionDenied() else: return self._provider_session.get_repository_form_for_create(repository_record_types)
Gets the repository form for creating new repositories. A new form should be requested for each create transaction. arg: repository_record_types (osid.type.Type[]): array of repository record types return: (osid.repository.RepositoryForm) - the repository form raise: NullArgument - ``repository_record_types`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - unable to get form for requested record types *compliance: mandatory -- This method must be implemented.*
5,741
def find_bounding_indices(arr, values, axis, from_below=True): indices_shape = list(arr.shape) indices_shape[axis] = len(values) indices = np.empty(indices_shape, dtype=np.int) good = np.empty(indices_shape, dtype=np.bool) store_slice = [slice(None)] * arr.ndim for level_index, value in enumerate(values): switches = np.abs(np.diff((arr <= value).astype(np.int), axis=axis)) good_search = np.any(switches, axis=axis) if from_below: index = switches.argmax(axis=axis) + 1 else: arr_slice = [slice(None)] * arr.ndim arr_slice[axis] = slice(None, None, -1) index = arr.shape[axis] - 1 - switches[tuple(arr_slice)].argmax(axis=axis) index[~good_search] = 0 store_slice[axis] = level_index indices[tuple(store_slice)] = index good[tuple(store_slice)] = good_search above = broadcast_indices(arr, indices, arr.ndim, axis) below = broadcast_indices(arr, indices - 1, arr.ndim, axis) return above, below, good
Find the indices surrounding the values within arr along axis. Returns a set of above, below, good. Above and below are lists of arrays of indices. These lists are formulated such that they can be used directly to index into a numpy array and get the expected results (no extra slices or ellipsis necessary). `good` is a boolean array indicating the "columns" that actually had values to bound the desired value(s). Parameters ---------- arr : array-like Array to search for values values: array-like One or more values to search for in `arr` axis : int The dimension of `arr` along which to search. from_below : bool, optional Whether to search from "below" (i.e. low indices to high indices). If `False`, the search will instead proceed from high indices to low indices. Defaults to `True`. Returns ------- above : list of arrays List of broadcasted indices to the location above the desired value below : list of arrays List of broadcasted indices to the location below the desired value good : array Boolean array indicating where the search found proper bounds for the desired value
5,742
def nla_put_u8(msg, attrtype, value): data = bytearray(value if isinstance(value, c_uint8) else c_uint8(value)) return nla_put(msg, attrtype, SIZEOF_U8, data)
Add 8 bit integer attribute to Netlink message. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L563 Positional arguments: msg -- Netlink message (nl_msg class instance). attrtype -- attribute type (integer). value -- numeric value to store as payload (int() or c_uint8()). Returns: 0 on success or a negative error code.
5,743
def score_url(self, url): t = urlparse(url) return (t.scheme != , in t.netloc, posixpath.basename(t.path))
Give an url a score which can be used to choose preferred URLs for a given project release.
5,744
def cmd_rally_add(self, args): if len(args) < 1: alt = self.settings.rallyalt else: alt = float(args[0]) if len(args) < 2: break_alt = self.settings.rally_breakalt else: break_alt = float(args[1]) if len(args) < 3: flag = self.settings.rally_flags else: flag = int(args[2]) if (flag != 0): flag = 2 if not self.have_list: print("Please list rally points first") return if (self.rallyloader.rally_count() > 4): print ("Only 5 rally points possible per flight plan.") return try: latlon = self.module().click_position except Exception: print("No map available") return if latlon is None: print("No map click position available") return land_hdg = 0.0 self.rallyloader.create_and_append_rally_point(latlon[0] * 1e7, latlon[1] * 1e7, alt, break_alt, land_hdg, flag) self.send_rally_points() print("Added Rally point at %s %f %f, autoland: %s" % (str(latlon), alt, break_alt, bool(flag & 2)))
handle rally add
5,745
def is_all_Ns(self, start=0, end=None): if end is not None: if start > end: raise Error() end += 1 else: end = len(self) if len(self) == 0: return False else: return re.search(, self.seq[start:end]) is None
Returns true if the sequence is all Ns (upper or lower case)
5,746
def printPi(self): assert self.pi is not None, "Calculate pi before calling printPi()" assert len(self.mapping)>0, "printPi() can only be used in combination with the direct or indirect method. Use print(mc.pi) if your subclass is called mc." for key,state in self.mapping.items(): print(state,self.pi[key])
Prints all states state and their steady state probabilities. Not recommended for large state spaces.
5,747
def add_member(self, member, dn=False): if dn: if self.check_member(member, dn=True): return mod = (ldap.MOD_ADD, , member.encode()) else: if self.check_member(member): return mod = (ldap.MOD_ADD, , member.get_dn().encode()) if self.__lib__.__batch_mods__: self.__lib__.enqueue_mod(self.__dn__, mod) elif not self.__lib__.__ro__: mod_attrs = [mod] self.__con__.modify_s(self.__dn__, mod_attrs) else: print("ADD VALUE member = {} FOR {}".format(mod[2], self.__dn__))
Add a member to the bound group Arguments: member -- the CSHMember object (or distinguished name) of the member Keyword arguments: dn -- whether or not member is a distinguished name
5,748
def restore_renamed_serializable_attributes(self): if hasattr(self, ): self.origin = self.start_addr log.debug(f"moving start_addr to origin: {self.start_addr}") delattr(self, )
Hook for the future if attributes have been renamed. The old attribute names will have been restored in the __dict__.update in __setstate__, so this routine should move attribute values to their new names.
5,749
def cmdline_params(self, surface_sample_file_name, cell_file_name): parameters = [] parameters += [surface_sample_file_name] parameters += [cell_file_name] parameters += [self._OUTPUT_FILE_NAME] return map(str, parameters)
Synthesize command line parameters e.g. [ ['struct.vsa'], ['struct.cell']]
5,750
def submit(self): if self.api._req_lixian_add_task_bt(self): self.submitted = True return True return False
Submit this torrent and create a new task
5,751
def parse(input_string, prefix=): tree = parser.parse(input_string) visitor = ChatlVisitor(prefix) visit_parse_tree(tree, visitor) return visitor.parsed
Parses the given DSL string and returns parsed results. Args: input_string (str): DSL string prefix (str): Optional prefix to add to every element name, useful to namespace things Returns: dict: Parsed content
5,752
def attribute(self, attribute_id, action=, params=None): if params is None: params = {} if not self.can_update(): self._tcex.handle_error(910, [self.type]) if action == : return self.tc_requests.get_attribute( self.api_type, self.api_sub_type, self.unique_id, attribute_id, owner=self.owner, params=params, ) if action == : return self.tc_requests.delete_attribute( self.api_type, self.api_sub_type, self.unique_id, attribute_id, owner=self.owner ) self._tcex.handle_error(925, [, , , , action]) return None
Gets the attribute from a Group/Indicator or Victim Args: action: params: attribute_id: Returns: attribute json
5,753
def subsample_snps_map(seqchunk, nmask, maparr): rmask = np.zeros(seqchunk.shape[1], dtype=np.bool_) last_loc = -1 for idx in xrange(maparr.shape[0]): if maparr[idx] != last_loc: if not nmask[idx]: rmask[idx] = True last_loc = maparr[idx] return rmask
removes ncolumns from snparray prior to matrix calculation, and subsamples 'linked' snps (those from the same RAD locus) such that for these four samples only 1 SNP per locus is kept. This information comes from the 'map' array (map file).
5,754
def _handle_raw_book(self, dtype, data, ts): self.log.debug("_handle_raw_book: %s - %s - %s", dtype, data, ts) channel_id, *data = data channel_identifier = self.channel_directory[channel_id] entry = (data, ts) self.raw_books[channel_identifier].put(entry)
Updates the raw order books stored in self.raw_books[chan_id]. :param dtype: :param data: :param ts: :return:
5,755
def addIDs(self, asfield=False): ids = vtk.vtkIdFilter() ids.SetInputData(self.poly) ids.PointIdsOn() ids.CellIdsOn() if asfield: ids.FieldDataOn() else: ids.FieldDataOff() ids.Update() return self.updateMesh(ids.GetOutput())
Generate point and cell ids. :param bool asfield: flag to control whether to generate scalar or field data.
5,756
def is_module_installed(module_name, version=None, installed_version=None, interpreter=None): if interpreter: if osp.isfile(interpreter) and ( in interpreter): checkver = inspect.getsource(check_version) get_modver = inspect.getsource(get_module_version) stable_ver = inspect.getsource(is_stable_version) ismod_inst = inspect.getsource(is_module_installed) f = tempfile.NamedTemporaryFile(, suffix=, dir=get_temp_dir(), delete=False) try: script = f.name f.write(" f.write("from distutils.version import LooseVersion" + "\n") f.write("import re" + "\n\n") f.write(stable_ver + "\n") f.write(checkver + "\n") f.write(get_modver + "\n") f.write(ismod_inst + "\n") if version: f.write("print(is_module_installed(,))"\ % (module_name, version)) else: f.write("print(is_module_installed())" % module_name) f.flush() os.fsync(f) f.close() try: proc = run_program(interpreter, [script]) output, _err = proc.communicate() except subprocess.CalledProcessError: return True return eval(output.decode()) finally: if not f.closed: f.close() os.remove(script) else: return True else: if installed_version is None: try: actver = get_module_version(module_name) except: return False else: actver = installed_version if actver is None and version is not None: return False elif version is None: return True else: if in version: output = True for ver in version.split(): output = output and is_module_installed(module_name, ver) return output match = re.search(r, version) assert match is not None, "Invalid version number" symb = version[:match.start()] if not symb: symb = assert symb in (, , , , ),\ "Invalid version condition " % symb version = version[match.start():] return check_version(actver, version, symb)
Return True if module *module_name* is installed If version is not None, checking module version (module must have an attribute named '__version__') version may starts with =, >=, > or < to specify the exact requirement ; multiple conditions may be separated by ';' (e.g. '>=0.13;<1.0') interpreter: check if a module is installed with a given version in a determined interpreter
5,757
def __exists_row_not_too_old(self, row): if row is None: return False record_time = dateutil.parser.parse(row[2]) now = datetime.datetime.now(dateutil.tz.gettz()) age = (record_time - now).total_seconds() if age > self.max_age: return False return True
Check if the given row exists and is not too old
5,758
def cmap_from_text(filename, norm=False, transparency=False, hex=False): lines = [line.rstrip() for line in open(filename)] _colors=[] _tot = len(lines) _index = 1 for i in lines: if transparency: _colors.append(_text_to_rgb(i,norm=norm,cat=_index,tot=_tot,hex=hex)) else: _colors.append(_text_to_rgb(i,norm=norm,hex=hex)) _index = _index + 1 return _make_cmap(_colors)
cmap_from_text takes as input a file that contains a colormap in text format composed by lines with 3 values in the range [0,255] or [00,FF] and returns a tuple of integers. If the parameters cat and tot are given, the function generates a transparency value for this color and returns a tuple of length 4. tot is the total number of colors in the colormap cat is the index of the current colour in the colormap if norm is set to True, the input values are normalized between 0 and 1.
5,759
def unpublish_view(self, request, object_id): kwargs = {: self, : object_id} view_class = self.unpublish_view_class return view_class.as_view(**kwargs)(request)
Instantiates a class-based view that redirects to Wagtail's 'unpublish' view for models that extend 'Page' (if the user has sufficient permissions). We do this via our own view so that we can reliably control redirection of the user back to the index_view once the action is completed. The view class used can be overridden by changing the 'unpublish_view_class' attribute.
5,760
def compute_diffusion_maps(lapl_type, diffusion_map, lambdas, diffusion_time): if lapl_type not in [, ]: warnings.warn("for correct diffusion maps embedding use laplacian type or .") vectors = diffusion_map.copy() psi = vectors/vectors[:,[0]] diffusion_times = diffusion_time if diffusion_time == 0: lambdas = np.abs(lambdas) diffusion_times = np.exp(1. - np.log(1 - lambdas[1:])/np.log(lambdas[1:])) lambdas = lambdas / (1 - lambdas) else: lambdas = np.abs(lambdas) lambdas = lambdas ** float(diffusion_time) diffusion_map = psi * lambdas return diffusion_map
Credit to Satrajit Ghosh (http://satra.cogitatum.org/) for final steps
5,761
def captured_output(stream_name): orig_stdout = getattr(sys, stream_name) setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) try: yield getattr(sys, stream_name) finally: setattr(sys, stream_name, orig_stdout)
Return a context manager used by captured_stdout/stdin/stderr that temporarily replaces the sys stream *stream_name* with a StringIO. Taken from Lib/support/__init__.py in the CPython repo.
5,762
def _transform_in(self): return np.array([ [self.left, self.bottom, 0, 1], [self.right, self.top, 0, 1]])
Return array of coordinates that can be mapped by Transform classes.
5,763
def get_tetrahedra_integration_weight(omegas, tetrahedra_omegas, function=): if isinstance(omegas, float): return phonoc.tetrahedra_integration_weight( omegas, np.array(tetrahedra_omegas, dtype=, order=), function) else: integration_weights = np.zeros(len(omegas), dtype=) phonoc.tetrahedra_integration_weight_at_omegas( integration_weights, np.array(omegas, dtype=), np.array(tetrahedra_omegas, dtype=, order=), function) return integration_weights
Returns integration weights Parameters ---------- omegas : float or list of float values Energy(s) at which the integration weight(s) are computed. tetrahedra_omegas : ndarray of list of list Energies at vertices of 24 tetrahedra shape=(24, 4) dytpe='double' function : str, 'I' or 'J' 'J' is for intetration and 'I' is for its derivative.
5,764
def new_instance(settings): settings = set_default({}, settings) if not settings["class"]: Log.error("Expecting attribute with fully qualified class name") path = settings["class"].split(".") class_name = path[-1] path = ".".join(path[:-1]) constructor = None try: temp = __import__(path, globals(), locals(), [class_name], 0) constructor = object.__getattribute__(temp, class_name) except Exception as e: Log.error("Can not find class {{class}}", {"class": path}, cause=e) settings[] = None try: return constructor(kwargs=settings) except Exception as e: pass try: return constructor(**settings) except Exception as e: Log.error("Can not create instance of {{name}}", name=".".join(path), cause=e)
MAKE A PYTHON INSTANCE `settings` HAS ALL THE `kwargs`, PLUS `class` ATTRIBUTE TO INDICATE THE CLASS TO CREATE
5,765
def LSTM(nO, nI): weights = LSTM_weights(nO, nI) gates = LSTM_gates(weights.ops) return Recurrent(RNN_step(weights, gates))
Create an LSTM layer. Args: number out, number in
5,766
def management(self): endpoint = self._instance.get_endpoint_for_service_type( "management", region_name=self._instance._region_name, ) token = self._instance.auth.get_token(self._instance.session) self._management = tuskar_client.get_client( 2, os_auth_token=token, tuskar_url=endpoint) return self._management
Returns an management service client
5,767
def from_json(cls, path, fatal=True, logger=None): result = cls() result.load(path, fatal=fatal, logger=logger) return result
:param str path: Path to json file :param bool|None fatal: Abort execution on failure if True :param callable|None logger: Logger to use :return: Deserialized object
5,768
def getSupportedProtocols(self): protocols = {} for td in self.TD: if td is not None: strprotocol = "T=%d" % (td & 0x0F) protocols[strprotocol] = True if not self.hasTD[0]: protocols[] = True return protocols
Returns a dictionnary of supported protocols.
5,769
def _to_dict(self): _dict = {} if hasattr(self, ) and self.transcript is not None: _dict[] = self.transcript if hasattr(self, ) and self.confidence is not None: _dict[] = self.confidence if hasattr(self, ) and self.timestamps is not None: _dict[] = self.timestamps if hasattr(self, ) and self.word_confidence is not None: _dict[] = self.word_confidence return _dict
Return a json dictionary representing this model.
5,770
def lang(self): return str(self.graph.value(self.asNode(), DC.language))
Languages this text is in :return: List of available languages
5,771
def gen_postinits(self, cls: ClassDefinition) -> str: post_inits = [] if not cls.abstract: pkeys = self.primary_keys_for(cls) for pkey in pkeys: post_inits.append(self.gen_postinit(cls, pkey)) for slotname in cls.slots: slot = self.schema.slots[slotname] if not (slot.primary_key or slot.identifier): post_inits.append(self.gen_postinit(cls, slotname)) post_inits_line = .join([p for p in post_inits if p]) return (f + ) if post_inits_line else
Generate all the typing and existence checks post initialize
5,772
def makeLys(segID, N, CA, C, O, geo): CA_CB_length=geo.CA_CB_length C_CA_CB_angle=geo.C_CA_CB_angle N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle CB_CG_length=geo.CB_CG_length CA_CB_CG_angle=geo.CA_CB_CG_angle N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle CG_CD_length=geo.CG_CD_length CB_CG_CD_angle=geo.CB_CG_CD_angle CA_CB_CG_CD_diangle=geo.CA_CB_CG_CD_diangle CD_CE_length=geo.CD_CE_length CG_CD_CE_angle=geo.CG_CD_CE_angle CB_CG_CD_CE_diangle=geo.CB_CG_CD_CE_diangle CE_NZ_length=geo.CE_NZ_length CD_CE_NZ_angle=geo.CD_CE_NZ_angle CG_CD_CE_NZ_diangle=geo.CG_CD_CE_NZ_diangle carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle) CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C") carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle) CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C") carbon_d= calculateCoordinates(CA, CB, CG, CG_CD_length, CB_CG_CD_angle, CA_CB_CG_CD_diangle) CD= Atom("CD", carbon_d, 0.0, 1.0, " ", " CD", 0, "C") carbon_e= calculateCoordinates(CB, CG, CD, CD_CE_length, CG_CD_CE_angle, CB_CG_CD_CE_diangle) CE= Atom("CE", carbon_e, 0.0, 1.0, " ", " CE", 0, "C") nitrogen_z= calculateCoordinates(CG, CD, CE, CE_NZ_length, CD_CE_NZ_angle, CG_CD_CE_NZ_diangle) NZ= Atom("NZ", nitrogen_z, 0.0, 1.0, " ", " NZ", 0, "N") res= Residue((, segID, ), "LYS", ) res.add(N) res.add(CA) res.add(C) res.add(O) res.add(CB) res.add(CG) res.add(CD) res.add(CE) res.add(NZ) return res
Creates a Lysine residue
5,773
def get_mv_impedance(grid): omega = 2 * math.pi * 50 mv_grid = grid.grid_district.lv_load_area.mv_grid_district.mv_grid edges = mv_grid.find_path(grid._station, mv_grid._station, type=) r_mv_grid = sum([e[2][].type[] * e[2][].length / 1e3 for e in edges]) x_mv_grid = sum([e[2][].type[] / 1e3 * omega * e[2][ ].length / 1e3 for e in edges]) return [r_mv_grid, x_mv_grid]
Determine MV grid impedance (resistance and reactance separately) Parameters ---------- grid : LVGridDing0 Returns ------- :any:`list` List containing resistance and reactance of MV grid
5,774
def retire(did): try: if dao.get(did) is None: return , 404 else: dao.delete(did) return , 200 except Exception as err: return f, 500
Retire metadata of an asset --- tags: - ddo parameters: - name: did in: path description: DID of the asset. required: true type: string responses: 200: description: successfully deleted 404: description: This asset DID is not in OceanDB 500: description: Error
5,775
def update_from_stripe_data(self, stripe_coupon, exclude_fields=None, commit=True): fields_to_update = self.STRIPE_FIELDS - set(exclude_fields or []) update_data = {key: stripe_coupon[key] for key in fields_to_update} for field in ["created", "redeem_by"]: if update_data.get(field): update_data[field] = timestamp_to_timezone_aware_date(update_data[field]) if update_data.get("amount_off"): update_data["amount_off"] = Decimal(update_data["amount_off"]) / 100 for key, value in six.iteritems(update_data): setattr(self, key, value) if commit: return StripeCoupon.objects.filter(pk=self.pk).update(**update_data)
Update StripeCoupon object with data from stripe.Coupon without calling stripe.Coupon.retrieve. To only update the object, set the commit param to False. Returns the number of rows altered or None if commit is False.
5,776
def postorder(self): node_stack = [] result = [] node = self while True: while node is not None: if node.right is not None: node_stack.append(node.right) node_stack.append(node) node = node.left node = node_stack.pop() if (node.right is not None and len(node_stack) > 0 and node_stack[-1] is node.right): node_stack.pop() node_stack.append(node) node = node.right else: result.append(node) node = None if len(node_stack) == 0: break return result
Return the nodes in the binary tree using post-order_ traversal. A post-order_ traversal visits left subtree, right subtree, then root. .. _post-order: https://en.wikipedia.org/wiki/Tree_traversal :return: List of nodes. :rtype: [binarytree.Node] **Example**: .. doctest:: >>> from binarytree import Node >>> >>> root = Node(1) >>> root.left = Node(2) >>> root.right = Node(3) >>> root.left.left = Node(4) >>> root.left.right = Node(5) >>> >>> print(root) <BLANKLINE> __1 / \\ 2 3 / \\ 4 5 <BLANKLINE> >>> root.postorder [Node(4), Node(5), Node(2), Node(3), Node(1)]
5,777
def __select_nearest_ws(jsondata, latitude, longitude): log.debug("__select_nearest_ws: latitude: %s, longitude: %s", latitude, longitude) dist = 0 dist2 = 0 loc_data = None try: ws_json = jsondata[__ACTUAL] ws_json = ws_json[__STATIONMEASUREMENTS] except (KeyError, TypeError): log.warning("Missing section in Buienradar xmldata (%s)." "Can happen 00:00-01:00 CE(S)T", __STATIONMEASUREMENTS) return None for wstation in ws_json: dist2 = __get_ws_distance(wstation, latitude, longitude) if dist2 is not None: if ((loc_data is None) or (dist2 < dist)): dist = dist2 loc_data = wstation if loc_data is None: log.warning("No weatherstation selected; aborting...") return None else: try: log.debug("Selected weatherstation: code=, " "name=, lat=, lon=.", loc_data[__STATIONID], loc_data[__STATIONNAME], loc_data[__LAT], loc_data[__LON]) except KeyError: log.debug("Selected weatherstation") return loc_data
Select the nearest weatherstation.
5,778
def rt_subscription_running(self): return ( self._tibber_control.sub_manager is not None and self._tibber_control.sub_manager.is_running and self._subscription_id is not None )
Is real time subscription running.
5,779
def get_config(self): return { : [ tf.compat.v2.initializers.serialize( tf.keras.initializers.get(init)) for init in self.initializers ], : self.sizes, : self.validate_args, }
Returns initializer configuration as a JSON-serializable dict.
5,780
def modify_classes(): import copy from django.conf import settings from django.contrib.admin.sites import site from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule for app in settings.INSTALLED_APPS: mod = import_module(app) if module_has_submodule(mod, ): raise
Auto-discover INSTALLED_APPS class_modifiers.py modules and fail silently when not present. This forces an import on them to modify any classes they may want.
5,781
def _sync_to_group(self, device): config_sync_cmd = % self.name device.tm.cm.exec_cmd(, utilCmdArgs=config_sync_cmd)
Sync the device to the cluster group :param device: bigip object -- device to sync to group
5,782
def associate(op, args): args = dissociate(op, args) if len(args) == 0: return _op_identity[op] elif len(args) == 1: return args[0] else: return Expr(op, *args)
Given an associative op, return an expression with the same meaning as Expr(op, *args), but flattened -- that is, with nested instances of the same op promoted to the top level. >>> associate('&', [(A&B),(B|C),(B&C)]) (A & B & (B | C) & B & C) >>> associate('|', [A|(B|(C|(A&B)))]) (A | B | C | (A & B))
5,783
def version(): * cmd = [, ] out = __salt__[](cmd, python_shell=False).splitlines() ret = out[0].split() return ret[0]
Return server version from znc --version CLI Example: .. code-block:: bash salt '*' znc.version
5,784
def bowtie_general_stats_table(self): headers = OrderedDict() headers[] = { : , : , : 100, : 0, : , : } headers[] = { : .format(config.read_count_prefix), : .format(config.read_count_desc), : 0, : , : lambda x: x * config.read_count_multiplier, : } self.general_stats_addcols(self.bowtie_data, headers)
Take the parsed stats from the Bowtie report and add it to the basic stats table at the top of the report
5,785
def show_yticklabels_for_all(self, row_column_list=None): if row_column_list is None: for subplot in self.subplots: subplot.show_yticklabels() else: for row, column in row_column_list: self.show_yticklabels(row, column)
Show the y-axis tick labels for all specified subplots. :param row_column_list: a list containing (row, column) tuples to specify the subplots, or None to indicate *all* subplots. :type row_column_list: list or None
5,786
def _get_pkey(self): keypath = self.config.server["pkey"] with open(os.path.expanduser(keypath)) as f: pkey = paramiko.RSAKey.from_private_key(f) return pkey
Gets an RSAKey object for the private key file so that we can copy files without logging in with user/password.
5,787
def tableexists(tablename): result = True try: t = table(tablename, ack=False) except: result = False return result
Test if a table exists.
5,788
def map(self, coords): for tr in reversed(self.transforms): coords = tr.map(coords) return coords
Map coordinates Parameters ---------- coords : array-like Coordinates to map. Returns ------- coords : ndarray Coordinates.
5,789
def _depth(g): def _explore(v): if v.depth < 0: v.depth = ((1 + max([-1] + [_explore(annotated_graph[u]) for u in v.parents])) if v.parents else 0) return v.depth annotated_graph = {k: _Node(k, v) for k, v in g.items()} for v in annotated_graph.values(): _explore(v) return annotated_graph
Computes the number of edges on longest path from node to root.
5,790
def _init_mgr(self, mgr, axes=None, dtype=None, copy=False): for a, axe in axes.items(): if axe is not None: mgr = mgr.reindex_axis(axe, axis=self._get_block_manager_axis(a), copy=False) if copy: mgr = mgr.copy() if dtype is not None: if len(mgr.blocks) > 1 or mgr.blocks[0].values.dtype != dtype: mgr = mgr.astype(dtype=dtype) return mgr
passed a manager and a axes dict
5,791
def items(self, founditems=[]): l = [] for e in self.data: if e not in founditems: l.append(e) if isinstance(e, AbstractElement): l += e.items(l) return l
Returns a depth-first flat list of *all* items below this element (not limited to AbstractElement)
5,792
def cursor(self): cursor = self.mdr.cursor() with self.transaction(): try: yield cursor if cursor.rowcount != -1: self.last_row_count = cursor.rowcount self.last_row_id = getattr(cursor, , None) except: self.last_row_count = None self.last_row_id = None _safe_close(cursor) raise
Get a cursor for the current connection. For internal use only.
5,793
def folder_cls_from_folder_name(cls, folder_name, locale): for folder_cls in cls.WELLKNOWN_FOLDERS + NON_DELETEABLE_FOLDERS: if folder_name.lower() in folder_cls.localized_names(locale): return folder_cls raise KeyError()
Returns the folder class that matches a localized folder name. locale is a string, e.g. 'da_DK'
5,794
def save_xml(self, doc, element): super(TargetPort, self).save_xml(doc, element) element.setAttributeNS(XSI_NS, XSI_NS_S + , ) element.setAttributeNS(RTS_NS, RTS_NS_S + , self.port_name)
Save this target port into an xml.dom.Element object.
5,795
def get_writer(self): if self._writer is None: suffix = tasks.get_suffix(tasks.TRACK_TASK) try: base_name = re.search("(?P<base_name>.*?)\.\d*{}".format(suffix), self.filename).group() except: base_name = os.path.splitext(self.filename)[0] mpc_filename_pattern = self.output_context.get_full_path( "{}.?{}".format(base_name, suffix)) mpc_file_count = len(glob(mpc_filename_pattern)) mpc_filename = "{}.{}{}".format(base_name, mpc_file_count, suffix) self._writer = self._create_writer(mpc_filename) return self._writer
Get a writer. This method also makes the output filename be the same as the .track file but with .mpc. (Currently only works on local filesystem) :rtype MPCWriter
5,796
def run(self): try: self._connect() self._register() while True: try: body = self.command_queue.get(block=True, timeout=1 * SECOND) except queue.Empty: body = None if body is not None: result = self._send(body) if result: self.command_queue.task_done() else: self._disconnect() self._connect() self._register() if self._stop_event.is_set(): logger.debug("CoreAgentSocket thread stopping.") break except Exception: logger.debug("CoreAgentSocket thread exception.") finally: self._started_event.clear() self._stop_event.clear() self._stopped_event.set() logger.debug("CoreAgentSocket thread stopped.")
Called by the threading system
5,797
def recursive_unicode(obj): if isinstance(obj, dict): return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items()) elif isinstance(obj, list): return list(recursive_unicode(i) for i in obj) elif isinstance(obj, tuple): return tuple(recursive_unicode(i) for i in obj) elif isinstance(obj, bytes_type): return to_unicode(obj) else: return obj
Walks a simple data structure, converting byte strings to unicode. Supports lists, tuples, and dictionaries.
5,798
def GenerateNewFileName(self): if self.showInfo.showName is not None and self.showInfo.seasonNum is not None and \ self.showInfo.episodeNum is not None and self.showInfo.episodeName is not None: ext = os.path.splitext(self.fileInfo.origPath)[1] newFileName = "{0}.S{1}E{2}".format(self.showInfo.showName, self.showInfo.seasonNum, \ self.showInfo.episodeNum) for episodeNum in self.showInfo.multiPartEpisodeNumbers: newFileName = newFileName + "_{0}".format(episodeNum) newFileName = newFileName + ".{0}{1}".format(self.showInfo.episodeName, ext) newFileName = util.StripSpecialCharacters(newFileName) return newFileName
Create new file name from show name, season number, episode number and episode name in format ShowName.S<NUM>.E<NUM>.EpisodeName. Returns ---------- string New file name in format ShowName.S<NUM>.E<NUM>.EpisodeName.
5,799
def capabilities(self, keyword=None): args = keyword code, message = self.command("CAPABILITIES", args) if code != 101: raise NNTPReplyError(code, message) return [x.strip() for x in self.info_gen(code, message)]
CAPABILITIES command. Determines the capabilities of the server. Although RFC3977 states that this is a required command for servers to implement not all servers do, so expect that NNTPPermanentError may be raised when this command is issued. See <http://tools.ietf.org/html/rfc3977#section-5.2> Args: keyword: Passed directly to the server, however, this is unused by the server according to RFC3977. Returns: A list of capabilities supported by the server. The VERSION capability is the first capability in the list.