Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
17,200
def bind(self, family, type, proto=0): self.socket = sockets.Socket(family, type, proto) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.setblocking(0) self.socket.bind(self.bind_addr)
Create (or recreate) the actual socket object.
17,201
def ValidateLanguageCode(lang, column_name=None, problems=None): if util.IsEmpty(lang): return True bcp47_obj = parser.ParseLanguage(str(lang.lower())) if not bcp47_obj.wellformed: if problems: problems.InvalidValue(column_name, lang, % lang, type=problems_class.TYPE_ERROR) return False if not bcp47_obj.valid: if problems: problems.InvalidValue(column_name, lang, % (lang, bcp47_obj), type=problems_class.TYPE_WARNING) return False return True
Validates a non-required language code value using the pybcp47 module: - if invalid adds InvalidValue error (if problems accumulator is provided) - distinguishes between 'not well-formed' and 'not valid' and adds error reasons accordingly - an empty language code is regarded as valid! Otherwise we might end up with many duplicate errors because of the required field checks. - returns true if the language is valid, false if not well-formed or invalid.
17,202
def visit_ListComp(self, node: ast.ListComp) -> Any: result = self._execute_comprehension(node=node) for generator in node.generators: self.visit(generator.iter) self.recomputed_values[node] = result return result
Compile the list comprehension as a function and call it.
17,203
def remover(self, id_perms): if not is_valid_int_param(id_perms): raise InvalidParameterError( u) url = + str(id_perms) + code, xml = self.submit(None, , url) return self.response(code, xml)
Remove Administrative Permission from by the identifier. :param id_perms: Identifier of the Administrative Permission. Integer value and greater than zero. :return: None :raise InvalidParameterError: The identifier of Administrative Permission is null and invalid. :raise PermissaoAdministrativaNaoExisteError: Administrative Permission not registered. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response.
17,204
def wait(self): now = _monotonic() if now < self._ref: delay = max(0, self._ref - now) self.sleep_func(delay) self._update_ref()
Blocks until the rate is met
17,205
def update_file(url, filename): resp = urlopen(url) if resp.code != 200: raise Exception(.format(url)) with open(_get_package_path(filename), ) as fp: for l in resp: if not l.startswith(b): fp.write(l.decode()) print(.format(filename))
Update the content of a single file.
17,206
def CreateMuskingumKfacFile(in_drainage_line, river_id, length_id, slope_id, celerity, formula_type, in_connectivity_file, out_kfac_file, length_units="km", slope_percentage=False, file_geodatabase=None): r ogr_drainage_line_shapefile_lyr, ogr_drainage_line_shapefile = \ open_shapefile(in_drainage_line, file_geodatabase) number_of_features = ogr_drainage_line_shapefile_lyr.GetFeatureCount() river_id_list = np.zeros(number_of_features, dtype=np.int32) length_list = \ np.zeros(number_of_features, dtype=np.float32) slope_list = np.zeros(number_of_features, dtype=np.float32) for feature_idx, drainage_line_feature in \ enumerate(ogr_drainage_line_shapefile_lyr): river_id_list[feature_idx] = drainage_line_feature.GetField(river_id) length = drainage_line_feature.GetField(length_id) if length is not None: length_list[feature_idx] = length slope = drainage_line_feature.GetField(slope_id) if slope is not None: slope_list[feature_idx] = slope del ogr_drainage_line_shapefile if slope_percentage: slope_list /= 100.0 if length_units == "m": length_list /= 1000.0 elif length_units != "km": raise Exception("Invalid length units supplied. " "Supported units are m and km.") connectivity_table = np.loadtxt(in_connectivity_file, delimiter=",", ndmin=2, dtype=int) length_slope_array = [] kfac2_array = [] if formula_type == 1: log("River Length/Celerity") elif formula_type == 2: log("Eta*River Length/Sqrt(River Slope)") elif formula_type == 3: log("Eta*River Length/Sqrt(River Slope) [0.05, 0.95]") else: raise Exception("Invalid formula type. Valid range: 1-3 ...") with open_csv(out_kfac_file, ) as kfacfile: kfac_writer = csv_writer(kfacfile) for row in connectivity_table: stream_id = int(float(row[0])) stream_id_index = river_id_list == stream_id stream_length = length_list[stream_id_index] * 1000.0 if formula_type >= 2: stream_slope = slope_list[stream_id_index] if stream_slope <= 0: next_down_id = int(float(row[1])) next_down_slope = 0 try: next_down_index = \ np.where(river_id_list == next_down_id)[0][0] next_down_slope = slope_list[next_down_index] except IndexError: pass next_up_id = int(float(row[3])) next_up_slope = 0 try: next_up_index = \ np.where(river_id_list == next_up_id)[0][0] next_up_slope = slope_list[next_up_index] except IndexError: pass stream_slope = (next_down_slope + next_up_slope) / 2.0 if stream_slope <= 0: stream_slope = 0.001 length_slope_array.append(stream_length / stream_slope**0.5) kfac2_array.append(stream_length / celerity) else: kfac = stream_length / celerity kfac_writer.writerow(kfac) if formula_type >= 2: if formula_type == 3: log("Filtering Data by 5th and 95th Percentiles ...") length_slope_array = np.array(length_slope_array) percentile_5 = np.percentile(length_slope_array, 5) percentile_95 = np.percentile(length_slope_array, 95) length_slope_array[length_slope_array < percentile_5] = \ percentile_5 length_slope_array[length_slope_array > percentile_95] = \ percentile_95 eta = np.mean(kfac2_array) / np.mean(length_slope_array) log("Kfac2_Avg {0}".format(np.mean(kfac2_array))) log("Length_Slope Avg {0}".format(np.mean(length_slope_array))) log("Eta {0}".format(eta)) log("Writing Data ...") for len_slope in length_slope_array: kfac_writer.writerow(eta*len_slope)
r""" Creates the Kfac file for calibration. The improved methods using slope to generate values for Kfac were used here: Tavakoly, A. A., A. D. Snow, C. H. David, M. L. Follum, D. R. Maidment, and Z.-L. Yang, (2016) "Continental-Scale River Flow Modeling of the Mississippi River Basin Using High-Resolution NHDPlus Dataset", Journal of the American Water Resources Association (JAWRA) 1-22. DOI: 10.1111/1752-1688.12456 Formula Type Options: 1. :math:`Kfac_n = \frac{RiverLength_n}{Celerity_n}` 2. :math:`Kfac_n = \eta*\frac{RiverLength_n}{\sqrt{RiverSlope_n}}` 3. :math:`Kfac_n = \eta*\frac{RiverLength_n}{\sqrt{RiverSlope_n}}\left[0.05, 0.95\right]` Where: :math:`a = \frac{\sum_{n=1}^{r} \frac{RiverLength_n}{Celerity_n}}{r}` :math:`b = \frac{\sum_{n=1}^{r} \frac{RiverLength_n}{\sqrt{RiverSlope_n}}}{r}` :math:`\eta = \frac{a}{b}` r = Number of river segments. Parameters ---------- in_drainage_line: str Path to the stream network (i.e. Drainage Line) shapefile. river_id: str The name of the field with the river ID (Ex. 'HydroID', 'COMID', or 'LINKNO'). length_id: str The field name containging the length of the river segment (Ex. 'LENGTHKM' or 'Length'). slope_id: str The field name containging the slope of the river segment (Ex. 'Avg_Slope' or 'Slope'). celerity: float The flow wave celerity for the watershed in meters per second. 1 km/hr or 1000.0/3600.0 m/s is a reasonable value if unknown. formula_type: int An integer representing the formula type to use when calculating kfac. in_connectivity_file: str The path to the RAPID connectivity file. out_kfac_file: str The path to the output kfac file. length_units: str, optional The units for the length_id field. Supported types are "m" for meters and "km" for kilometers. slope_percentage: bool, optional If True, it assumes the slope given is in percentage and will divide by 100. Default is False. file_geodatabase: str, optional Path to the file geodatabase. If you use this option, in_drainage_line is the name of the stream network feature class (WARNING: Not always stable with GDAL). Example:: from RAPIDpy.gis.muskingum import CreateMuskingumKfacFile CreateMuskingumKfacFile( in_drainage_line='/path/to/drainageline.shp', river_id='LINKNO', length_id='Length', slope_id='Slope', celerity=1000.0/3600.0, formula_type=3, in_connectivity_file='/path/to/rapid_connect.csv', out_kfac_file='/path/to/kfac.csv', length_units="m", )
17,207
def plotConvergenceByObject(results, objectRange, featureRange, numTrials, linestyle=): convergence = numpy.zeros((max(featureRange), max(objectRange) + 1)) for r in results: if r["numFeatures"] in featureRange: convergence[r["numFeatures"] - 1, r["numObjects"]] += r["convergencePoint"] convergence /= numTrials legendList = [] colorList = [, , , , , , ] for i in range(len(featureRange)): f = featureRange[i] print "features={} objectRange={} convergence={}".format( f,objectRange, convergence[f-1,objectRange]) legendList.append(.format(f)) plt.plot(objectRange, convergence[f-1, objectRange], color=colorList[i], linestyle=linestyle) plt.legend(legendList, loc="lower right", prop={:10}) plt.xlabel("Number of objects in training set") plt.xticks(range(0,max(objectRange)+1,10)) plt.yticks(range(0,int(convergence.max())+2)) plt.ylabel("Average number of touches") plt.title("Number of touches to recognize one object (single column)")
Plots the convergence graph: iterations vs number of objects. Each curve shows the convergence for a given number of unique features.
17,208
def evaluate(self, batchsize): sum_loss, sum_accuracy = 0, 0 for i in range(0, self.testsize, batchsize): x = Variable(self.x_test[i: i + batchsize]) y = Variable(self.y_test[i: i + batchsize]) loss = self.model(x, y) sum_loss += loss.data * batchsize sum_accuracy += self.model.accuracy.data * batchsize return sum_loss / self.testsize, sum_accuracy / self.testsize
Evaluate how well the classifier is doing. Return mean loss and mean accuracy
17,209
def ttfautohint(in_file, out_file, args=None, **kwargs): arg_list = ["ttfautohint"] file_args = [in_file, out_file] if args is not None: if kwargs: raise TypeError("Should not provide both cmd args and kwargs.") rv = subprocess.call(arg_list + args.split() + file_args) if rv != 0: raise TTFAError(rv) return boolean_options = ( "debug", "composites", "dehint", "help", "ignore_restrictions", "detailed_info", "no_info", "adjust_subglyphs", "symbol", "ttfa_table", "verbose", "version", "windows_compatibility", ) other_options = ( "default_script", "fallback_script", "family_suffix", "hinting_limit", "fallback_stem_width", "hinting_range_min", "control_file", "hinting_range_max", "strong_stem_width", "increase_x_height", "x_height_snapping_exceptions", ) for option in boolean_options: if kwargs.pop(option, False): arg_list.append("--" + option.replace("_", "-")) for option in other_options: arg = kwargs.pop(option, None) if arg is not None: arg_list.append("--{}={}".format(option.replace("_", "-"), arg)) if kwargs: raise TypeError("Unexpected argument(s): " + ", ".join(kwargs.keys())) rv = subprocess.call(arg_list + file_args) if rv != 0: raise TTFAError(rv)
Thin wrapper around the ttfautohint command line tool. Can take in command line arguments directly as a string, or spelled out as Python keyword arguments.
17,210
def get_query_string(environ): qs = wsgi_get_bytes(environ.get("QUERY_STRING", "")) return try_coerce_native(url_quote(qs, safe=":&%=+$!*'(),"))
Returns the `QUERY_STRING` from the WSGI environment. This also takes care about the WSGI decoding dance on Python 3 environments as a native string. The string returned will be restricted to ASCII characters. .. versionadded:: 0.9 :param environ: the WSGI environment object to get the query string from.
17,211
def block(self, tofile="block.dat"): with self.client.connect(*self.bestip): data = self.client.get_and_parse_block_info(tofile) return self.client.to_df(data)
获取证券板块信息 :param tofile: :return: pd.dataFrame or None
17,212
def do_lisp(self, subcmd, opts, folder=""): client = MdClient(self.maildir, filesystem=self.filesystem) client.lisp( foldername=folder, stream=self.stdout, reverse=getattr(opts, "reverse", False), since=float(getattr(opts, "since", -1)) )
${cmd_name}: list messages in the specified folder in JSON format ${cmd_usage}
17,213
def createDataport(self, auth, desc, defer=False): return self._call(, auth, [, desc], defer)
Create a dataport resource. "format" and "retention" are required { "format": "float" | "integer" | "string", "meta": string = "", "name": string = "", "preprocess": list = [], "public": boolean = false, "retention": { "count": number | "infinity", "duration": number | "infinity" }, "subscribe": <ResourceID> | null = null }
17,214
def ReleaseObject(self, identifier): if identifier not in self._values: raise KeyError(.format( identifier)) cache_value = self._values[identifier] if not cache_value: raise RuntimeError(.format( identifier)) cache_value.DecrementReferenceCount()
Releases a cached object based on the identifier. This method decrements the cache value reference count. Args: identifier (str): VFS object identifier. Raises: KeyError: if the VFS object is not found in the cache. RuntimeError: if the cache value is missing.
17,215
def apply(self, im): from scipy.ndimage.interpolation import shift return shift(im, map(lambda x: -x, self.delta), mode=)
Apply an n-dimensional displacement by shifting an image or volume. Parameters ---------- im : ndarray The image or volume to shift
17,216
def masked(name, runtime=False, root=None): ** _check_for_unit_changes(name) root_dir = _root( if runtime else , root) link_path = os.path.join(root_dir, , , _canonical_unit_name(name)) try: return os.readlink(link_path) == except OSError as exc: if exc.errno == errno.ENOENT: log.trace( %s\ , link_path, name ) elif exc.errno == errno.EINVAL: log.error( , name, link_path ) return False
.. versionadded:: 2015.8.0 .. versionchanged:: 2015.8.5 The return data for this function has changed. If the service is masked, the return value will now be the output of the ``systemctl is-enabled`` command (so that a persistent mask can be distinguished from a runtime mask). If the service is not masked, then ``False`` will be returned. .. versionchanged:: 2017.7.0 This function now returns a boolean telling the user whether a mask specified by the new ``runtime`` argument is set. If ``runtime`` is ``False``, this function will return ``True`` if an indefinite mask is set for the named service (otherwise ``False`` will be returned). If ``runtime`` is ``False``, this function will return ``True`` if a runtime mask is set, otherwise ``False``. Check whether or not a service is masked runtime : False Set to ``True`` to check for a runtime mask .. versionadded:: 2017.7.0 In previous versions, this function would simply return the output of ``systemctl is-enabled`` when the service was found to be masked. However, since it is possible to both have both indefinite and runtime masks on a service simultaneously, this function now only checks for runtime masks if this argument is set to ``True``. Otherwise, it will check for an indefinite mask. root Enable/disable/mask unit files in the specified root directory CLI Examples: .. code-block:: bash salt '*' service.masked foo salt '*' service.masked foo runtime=True
17,217
def list_users(self, limit=None, marker=None): return self._user_manager.list(limit=limit, marker=marker)
Returns a list of the names of all users for this instance.
17,218
def build_payment_parameters(amount: Money, client_ref: str) -> PaymentParameters: merchant_id = web_merchant_id amount, currency = money_to_amount_and_currency(amount) refno = client_ref sign = sign_web(merchant_id, amount, currency, refno) parameters = PaymentParameters( merchant_id=merchant_id, amount=amount, currency=currency, refno=refno, sign=sign, use_alias=False, ) logger.info(, parameters=parameters) return parameters
Builds the parameters needed to present the user with a datatrans payment form. :param amount: The amount and currency we want the user to pay :param client_ref: A unique reference for this payment :return: The parameters needed to display the datatrans form
17,219
def gaussian(df, width=0.3, downshift=-1.8, prefix=None): df = df.copy() imputed = df.isnull() for i in mycols: data = df.iloc[:, i] mask = data.isnull().values mean = data.mean(axis=0) stddev = data.std(axis=0) m = mean + downshift[i]*stddev s = stddev*width[i] values = np.random.normal(loc=m, scale=s, size=df.shape[0]) df.iloc[mask, i] = values[mask] return df, imputed
Impute missing values by drawing from a normal distribution :param df: :param width: Scale factor for the imputed distribution relative to the standard deviation of measured values. Can be a single number or list of one per column. :param downshift: Shift the imputed values down, in units of std. dev. Can be a single number or list of one per column :param prefix: The column prefix for imputed columns :return:
17,220
def get_stock_quote(self, code_list): code_list = unique_and_normalize_list(code_list) if not code_list: error_str = ERROR_STR_PREFIX + "the type of code_list param is wrong" return RET_ERROR, error_str query_processor = self._get_sync_query_processor( StockQuoteQuery.pack_req, StockQuoteQuery.unpack_rsp, ) kargs = { "stock_list": code_list, "conn_id": self.get_sync_conn_id() } ret_code, msg, quote_list = query_processor(**kargs) if ret_code == RET_ERROR: return ret_code, msg col_list = [ , , , , , , , , , , , , , , , , , , , , , , , , , ] quote_frame_table = pd.DataFrame(quote_list, columns=col_list) return RET_OK, quote_frame_table
获取订阅股票报价的实时数据,有订阅要求限制。 对于异步推送,参见StockQuoteHandlerBase :param code_list: 股票代码列表,必须确保code_list中的股票均订阅成功后才能够执行 :return: (ret, data) ret == RET_OK 返回pd dataframe数据,数据列格式如下 ret != RET_OK 返回错误字符串 ===================== =========== ============================================================== 参数 类型 说明 ===================== =========== ============================================================== code str 股票代码 data_date str 日期 data_time str 时间(美股默认是美东时间,港股A股默认是北京时间) last_price float 最新价格 open_price float 今日开盘价 high_price float 最高价格 low_price float 最低价格 prev_close_price float 昨收盘价格 volume int 成交数量 turnover float 成交金额 turnover_rate float 换手率 amplitude int 振幅 suspension bool 是否停牌(True表示停牌) listing_date str 上市日期 (yyyy-MM-dd) price_spread float 当前价差,亦即摆盘数据的买档或卖档的相邻档位的报价差 dark_status str 暗盘交易状态,见DarkStatus strike_price float 行权价 contract_size int 每份合约数 open_interest int 未平仓合约数 implied_volatility float 隐含波动率 premium float 溢价 delta float 希腊值 Delta gamma float 希腊值 Gamma vega float 希腊值 Vega theta float 希腊值 Theta rho float 希腊值 Rho ===================== =========== ==============================================================
17,221
def _partition_data(datavol, roivol, roivalue, maskvol=None, zeroe=True): if maskvol is not None: indices = (roivol == roivalue) * (maskvol > 0) else: indices = roivol == roivalue if datavol.ndim == 4: ts = datavol[indices, :] else: ts = datavol[indices] if zeroe: if datavol.ndim == 4: ts = ts[ts.sum(axis=1) != 0, :] return ts
Extracts the values in `datavol` that are in the ROI with value `roivalue` in `roivol`. The ROI can be masked by `maskvol`. Parameters ---------- datavol: numpy.ndarray 4D timeseries volume or a 3D volume to be partitioned roivol: numpy.ndarray 3D ROIs volume roivalue: int or float A value from roivol that represents the ROI to be used for extraction. maskvol: numpy.ndarray 3D mask volume zeroe: bool If true will remove the null timeseries voxels. Only applied to timeseries (4D) data. Returns ------- values: np.array An array of the values in the indicated ROI. A 2D matrix if `datavol` is 4D or a 1D vector if `datavol` is 3D.
17,222
def generate(self, model_len=None, model_width=None): if model_len is None: model_len = Constant.MODEL_LEN if model_width is None: model_width = Constant.MODEL_WIDTH pooling_len = int(model_len / 4) graph = Graph(self.input_shape, False) temp_input_channel = self.input_shape[-1] output_node_id = 0 stride = 1 for i in range(model_len): output_node_id = graph.add_layer(StubReLU(), output_node_id) output_node_id = graph.add_layer( self.batch_norm(graph.node_list[output_node_id].shape[-1]), output_node_id ) output_node_id = graph.add_layer( self.conv(temp_input_channel, model_width, kernel_size=3, stride=stride), output_node_id, ) temp_input_channel = model_width if pooling_len == 0 or ((i + 1) % pooling_len == 0 and i != model_len - 1): output_node_id = graph.add_layer(self.pooling(), output_node_id) output_node_id = graph.add_layer(self.global_avg_pooling(), output_node_id) output_node_id = graph.add_layer( self.dropout(Constant.CONV_DROPOUT_RATE), output_node_id ) output_node_id = graph.add_layer( StubDense(graph.node_list[output_node_id].shape[0], model_width), output_node_id, ) output_node_id = graph.add_layer(StubReLU(), output_node_id) graph.add_layer(StubDense(model_width, self.n_output_node), output_node_id) return graph
Generates a CNN. Args: model_len: An integer. Number of convolutional layers. model_width: An integer. Number of filters for the convolutional layers. Returns: An instance of the class Graph. Represents the neural architecture graph of the generated model.
17,223
def _load_params(params, logger=logging): if isinstance(params, str): cur_path = os.path.dirname(os.path.realpath(__file__)) param_file_path = os.path.join(cur_path, params) logger.info( % param_file_path) save_dict = nd_load(param_file_path) arg_params = {} aux_params = {} for k, v in save_dict.items(): tp, name = k.split(, 1) if tp == : arg_params[name] = v if tp == : aux_params[name] = v return arg_params, aux_params elif isinstance(params, (tuple, list)) and len(params) == 2: return params[0], params[1] else: raise ValueError( )
Given a str as a path to the .params file or a pair of params, returns two dictionaries representing arg_params and aux_params.
17,224
def namedb_query_execute( cur, query, values, abort=True): return db_query_execute(cur, query, values, abort=abort)
Execute a query. If it fails, abort. Retry with timeouts on lock DO NOT CALL THIS DIRECTLY.
17,225
def reference(self, refobj, taskfileinfo): with common.preserve_namespace(":"): jbfile = JB_File(taskfileinfo) filepath = jbfile.get_fullpath() ns_suggestion = reftrack.get_namespace(taskfileinfo) newnodes = cmds.file(filepath, reference=True, namespace=ns_suggestion, returnNewNodes=True) for refnode in cmds.ls(newnodes, type=): if not cmds.referenceQuery(refnode, isNodeReferenced=True): node = refnode break ns = cmds.referenceQuery(node, namespace=True) content = cmds.namespaceInfo(ns, listOnlyDependencyNodes=True, dagPath=True) scenenode = self.get_scenenode(content) self.get_refobjinter().connect_reftrack_scenenode(refobj, scenenode) reccontent = cmds.namespaceInfo(ns, listOnlyDependencyNodes=True, dagPath=True, recurse=True) dagcontent = cmds.ls(reccontent, ap=True, assemblies=True) if not dagcontent: return node grpname = reftrack.get_groupname(taskfileinfo) reftrack.group_content(dagcontent, ns, grpname, "jb_asset") return node
Reference the given taskfileinfo into the scene and return the created reference node The created reference node will be used on :meth:`RefobjInterface.set_reference` to set the reference on a reftrack node. Do not call :meth:`RefobjInterface.set_reference` yourself. This will also create a group node and group all dagnodes under a appropriate node. :param refobj: the reftrack node that will be linked to the reference :type refobj: str :param taskfileinfo: The taskfileinfo that holds the information for what to reference :type taskfileinfo: :class:`jukeboxcore.filesys.TaskFileInfo` :returns: the reference node that was created and should set on the appropriate reftrack node :rtype: str :raises: None
17,226
def export_osm_file(self): osm = create_elem(, {: self.generator, : self.version}) osm.extend(obj.toosm() for obj in self) return etree.ElementTree(osm)
Generate OpenStreetMap element tree from ``Osm``.
17,227
def read_committed_file(gitref, filename): repo = Repo() commitobj = repo.commit(gitref) blob = commitobj.tree[_delta_dir() + filename] return blob.data_stream.read()
Retrieve the content of a file in an old commit and returns it. Ketword Arguments: :gitref: (str) -- full reference of the git commit :filename: (str) -- name (full path) of the file Returns: str -- content of the file
17,228
def calc_fc_size(img_height, img_width): height, width = img_height, img_width for _ in range(5): height, width = _get_conv_outsize( (height, width), 4, 2, 1) conv_out_layers = 512 return conv_out_layers, height, width
Calculates shape of data after encoding. Parameters ---------- img_height : int Height of input image. img_width : int Width of input image. Returns ------- encoded_shape : tuple(int) Gives back 3-tuple with new dims.
17,229
def set_path(self, data, path, value): self.say( + str(value) + + str(path) + + str(data)) if isinstance(path, str): path = path.split() if len(path) > 1: self.set_path(data.setdefault(path[0], {}), path[1:], value) else: data[path[0]] = value return data
Sets the given key in the given dict object to the given value. If the given path is nested, child dicts are created as appropriate. Accepts either a dot-delimited path or an array of path elements as the `path` variable.
17,230
def get_instance(cls, state): if cls.instance is None: cls.instance = UserStorageHandler(state) return cls.instance
:rtype: UserStorageHandler
17,231
def set_scale_alpha_from_selection(self): selection = self.treeview_layers.get_selection() list_store, selected_iter = selection.get_selected() if selected_iter is None: self.adjustment_alpha.set_value(100) self.scale_alpha.set_sensitive(False) return else: surface_name, alpha = list_store[selected_iter] self.adjustment_alpha.set_value(alpha * 100) self.scale_alpha.set_sensitive(True)
Set scale marker to alpha for selected layer.
17,232
def get(self, field, value=None): self.value = value val = self.input(field) if field == : while True: if val != : break print("Name cannot be empty.") val = self.input(field) elif field == : if val == : return None while True: if val in Get.PRIORITIES.values(): break c, val = val, Get.PRIORITIES.get(val) if val: break print("Unrecognized priority number or name [{}].".format(c)) val = self.input(field) val = int(val) return val
Gets user input for given field and checks if it is valid. If input is invalid, it will ask the user to enter it again. Defaults values to empty or :value:. It does not check validity of parent index. It can only be tested further down the road, so for now accept anything. :field: Field name. :value: Default value to use for field. :returns: User input.
17,233
def _writeFASTA(self, i, image): if isinstance(self._titlesAlignments.readsAlignments.reads, FastqReads): format_ = else: format_ = filename = % (self._outputDir, i, format_) titleAlignments = self._titlesAlignments[image[]] with open(filename, ) as fp: for titleAlignment in titleAlignments: fp.write(titleAlignment.read.toString(format_)) return format_
Write a FASTA file containing the set of reads that hit a sequence. @param i: The number of the image in self._images. @param image: A member of self._images. @return: A C{str}, either 'fasta' or 'fastq' indicating the format of the reads in C{self._titlesAlignments}.
17,234
def write_totals(self, file_path=, date=str(datetime.date.today()), organization=, members=0, teams=0): total_exists = os.path.isfile(file_path) with open(file_path, ) as out_total: if not total_exists: out_total.write( + + + + ) self.delete_last_line(date=date, file_path=file_path) out_total.close() with open(file_path, ) as file_read: row_count = sum(1 for row in file_read) - 1 file_read.close() with open(file_path, ) as out_total: out_total.write(date + + organization + + str(self.total_repos) + + str(members) + + str(teams) + + str(len(self.unique_contributors)) + + str(self.total_contributors) + + str(self.total_forks) + + str(self.total_stars) + + str(self.total_pull_reqs) + + str(self.total_open_issues) + + str(self.total_readmes) + + str(self.total_licenses) + + str(self.total_pull_reqs_open) + + str(self.total_pull_reqs_closed) + + str(self.total_commits) + + str(row_count) + + str(self.total_closed_issues) + + str(self.total_issues) + ) out_total.close()
Updates the total.csv file with current data.
17,235
def tiles(self) -> np.array: return self._tiles.T if self._order == "F" else self._tiles
An array of this consoles tile data. This acts as a combination of the `ch`, `fg`, and `bg` attributes. Colors include an alpha channel but how alpha works is currently undefined. Example:: >>> con = tcod.console.Console(10, 2, order="F") >>> con.tiles[0, 0] = ( ... ord("X"), ... (*tcod.white, 255), ... (*tcod.black, 255), ... ) >>> con.tiles[0, 0] (88, [255, 255, 255, 255], [ 0, 0, 0, 255]) .. versionadded:: 10.0
17,236
def Convert(self, metadata, checkresult, token=None): if checkresult.HasField("anomaly"): for anomaly in checkresult.anomaly: exported_anomaly = ExportedAnomaly( type=anomaly.type, severity=anomaly.severity, confidence=anomaly.confidence) if anomaly.symptom: exported_anomaly.symptom = anomaly.symptom if anomaly.explanation: exported_anomaly.explanation = anomaly.explanation if anomaly.generated_by: exported_anomaly.generated_by = anomaly.generated_by if anomaly.anomaly_reference_id: exported_anomaly.anomaly_reference_id = "\n".join( anomaly.anomaly_reference_id) if anomaly.finding: exported_anomaly.finding = "\n".join(anomaly.finding) yield ExportedCheckResult( metadata=metadata, check_id=checkresult.check_id, anomaly=exported_anomaly) else: yield ExportedCheckResult( metadata=metadata, check_id=checkresult.check_id)
Converts a single CheckResult. Args: metadata: ExportedMetadata to be used for conversion. checkresult: CheckResult to be converted. token: Security token. Yields: Resulting ExportedCheckResult. Empty list is a valid result and means that conversion wasn't possible.
17,237
def select_waveform_generator(approximant): if approximant in waveform.fd_approximants(): return FDomainCBCGenerator elif approximant in waveform.td_approximants(): return TDomainCBCGenerator elif approximant in ringdown.ringdown_fd_approximants: if approximant == : return FDomainMassSpinRingdownGenerator elif approximant == : return FDomainFreqTauRingdownGenerator elif approximant in ringdown.ringdown_td_approximants: if approximant == : return TDomainMassSpinRingdownGenerator elif approximant == : return TDomainFreqTauRingdownGenerator else: raise ValueError("%s is not a valid approximant." % approximant)
Returns the single-IFO generator for the approximant. Parameters ---------- approximant : str Name of waveform approximant. Valid names can be found using ``pycbc.waveform`` methods. Returns ------- generator : (PyCBC generator instance) A waveform generator object. Examples -------- Get a list of available approximants: >>> from pycbc import waveform >>> waveform.fd_approximants() >>> waveform.td_approximants() >>> from pycbc.waveform import ringdown >>> ringdown.ringdown_fd_approximants.keys() Get generator object: >>> from pycbc.waveform.generator import select_waveform_generator >>> select_waveform_generator(waveform.fd_approximants()[0])
17,238
def lat(self): try: for domname, dom in self.domains.items(): try: thislat = dom.axes[].points except: pass return thislat except: raise ValueError(t resolve a lat axis.')
Latitude of grid centers (degrees North) :getter: Returns the points of axis ``'lat'`` if availible in the process's domains. :type: array :raises: :exc:`ValueError` if no ``'lat'`` axis can be found.
17,239
def show(self, temp_file_name = , **kwargs): assert type(temp_file_name) is str self.save(temp_file_name, **kwargs) return HTML( % (640, 300, temp_file_name))
## Arguments: - 'args' and 'kwargs' will be passed to 'self.save()'
17,240
def angle(self, deg=False): if self.dtype.str[1] != : warnings.warn(, RuntimeWarning, 1) da = distob.vectorize(np.angle)(self, deg) return _dts_from_da(da, self.tspan, self.labels)
Return the angle of a complex Timeseries Args: deg (bool, optional): Return angle in degrees if True, radians if False (default). Returns: angle (Timeseries): The counterclockwise angle from the positive real axis on the complex plane, with dtype as numpy.float64.
17,241
def get_proficiency_form_for_create(self, objective_id, resource_id, proficiency_record_types): from dlkit.abstract_osid.id.primitives import Id as ABCId from dlkit.abstract_osid.type.primitives import Type as ABCType if not isinstance(objective_id, ABCId): raise errors.InvalidArgument() if not isinstance(resource_id, ABCId): raise errors.InvalidArgument() for arg in proficiency_record_types: if not isinstance(arg, ABCType): raise errors.InvalidArgument() if proficiency_record_types == []: obj_form = objects.ProficiencyForm( objective_bank_id=self._catalog_id, objective_id=objective_id, resource_id=resource_id, catalog_id=self._catalog_id, runtime=self._runtime, proxy=self._proxy) else: obj_form = objects.ProficiencyForm( objective_bank_id=self._catalog_id, record_types=proficiency_record_types, objective_id=objective_id, resource_id=resource_id, catalog_id=self._catalog_id, runtime=self._runtime, proxy=self._proxy) obj_form._for_update = False self._forms[obj_form.get_id().get_identifier()] = not CREATED return obj_form
Gets the proficiency form for creating new proficiencies. A new form should be requested for each create transaction. arg: objective_id (osid.id.Id): the ``Id`` of the ``Objective`` arg: resource_id (osid.id.Id): the ``Id`` of the ``Resource`` arg: proficiency_record_types (osid.type.Type[]): array of proficiency record types return: (osid.learning.ProficiencyForm) - the proficiency form raise: NotFound - ``objective_id`` or ``resource_id`` is not found raise: NullArgument - ``objective_id, resource_id,`` or ``proficieny_record_types`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - unable to get form for requested record types *compliance: mandatory -- This method must be implemented.*
17,242
def AddFXrefRead(self, method, classobj, field): if field not in self._fields: self._fields[field] = FieldClassAnalysis(field) self._fields[field].AddXrefRead(classobj, method)
Add a Field Read to this class :param method: :param classobj: :param field: :return:
17,243
def get_book_progress(self, asin): kbp = self._get_api_call(, % asin) return KindleCloudReaderAPI._kbp_to_progress(kbp)
Returns the progress data available for a book. NOTE: A summary of the two progress formats can be found in the docstring for `ReadingProgress`. Args: asin: The asin of the book to be queried. Returns: A `ReadingProgress` instance corresponding to the book associated with `asin`.
17,244
def collapse(cls, holomap, ranges=None, mode=): if cls.definitions == []: return holomap clone = holomap.clone(shared_data=False) data = zip(ranges[1], holomap.data.values()) if ranges else holomap.data.items() for key, overlay in data: clone[key] = cls.collapse_element(overlay, ranges, mode) return clone
Given a map of Overlays, apply all applicable compositors.
17,245
def claim_invitations(user): invitation_user_id = % ( models.User.EMAIL_INVITATION, user.email_address) invitation_user = models.User.query.get(invitation_user_id) if invitation_user: invited_build_list = list(invitation_user.builds) if not invited_build_list: return db.session.add(user) logging.debug(, len(invited_build_list), invitation_user_id, user) for build in invited_build_list: build.owners.remove(invitation_user) if not build.is_owned_by(user.id): build.owners.append(user) logging.debug(, build.id) save_admin_log(build, invite_accepted=True) else: logging.debug( , user.id, build.id) db.session.add(build) db.session.delete(invitation_user) db.session.commit() db.session.add(current_user)
Claims any pending invitations for the given user's email address.
17,246
def get_ascii(self, show_internal=True, compact=False, attributes=None): (lines, mid) = self._asciiArt(show_internal=show_internal, compact=compact, attributes=attributes) return +.join(lines)
Returns a string containing an ascii drawing of the tree. Parameters: ----------- show_internal: include internal edge names. compact: use exactly one line per tip. attributes: A list of node attributes to shown in the ASCII representation.
17,247
def log2_lut(v): res = np.zeros(v.shape, dtype=np.int32) tt = v >> 16 tt_zero = (tt == 0) tt_not_zero = ~tt_zero t_h = tt >> 8 t_zero_h = (t_h == 0) & tt_not_zero t_not_zero_h = ~t_zero_h & tt_not_zero res[t_zero_h] = LogTable256[tt[t_zero_h]] + 16 res[t_not_zero_h] = LogTable256[t_h[t_not_zero_h]] + 24 t_l = v >> 8 t_zero_l = (t_l == 0) & tt_zero t_not_zero_l = ~t_zero_l & tt_zero res[t_zero_l] = LogTable256[v[t_zero_l]] res[t_not_zero_l] = LogTable256[t_l[t_not_zero_l]] + 8 return res
See `this algo <https://graphics.stanford.edu/~seander/bithacks.html#IntegerLogLookup>`__ for computing the log2 of a 32 bit integer using a look up table Parameters ---------- v : int 32 bit integer Returns -------
17,248
def main(): parser = argparse.ArgumentParser() parser.add_argument( , default="md5", help="Digest to use", choices=sorted( getattr(hashlib, , None) or hashlib.algorithms_available)) parser.add_argument( , default="http://example.org", help="URL to load") parser.add_argument( , , type=argparse.FileType(), metavar=, default=, help="Where to write the retrieved conentent") opts = parser.parse_args() request = requestlib.Request(opts.url) reader = requestlib.urlopen(request) stream = written_hash_proxy( opts.output.buffer if hasattr(opts.output, ) else opts.output, name=opts.digest) for chunk in reader: stream.write(chunk) stream.flush() print("{} of {} is {}".format( proxy.state(stream).digest.name, opts.url, proxy.state(stream).digest.hexdigest()))
Main function of this example.
17,249
def open_ioc(fn): parsed_xml = xmlutils.read_xml_no_ns(fn) if not parsed_xml: raise IOCParseError() root = parsed_xml.getroot() metadata_node = root.find() top_level_indicator = get_top_level_indicator_node(root) parameters_node = root.find() if parameters_node is None: parameters_node = ioc_et.make_parameters_node() root.append(parameters_node) return root, metadata_node, top_level_indicator, parameters_node
Opens an IOC file, or XML string. Returns the root element, top level indicator element, and parameters element. If the IOC or string fails to parse, an IOCParseError is raised. This is a helper function used by __init__. :param fn: This is a path to a file to open, or a string containing XML representing an IOC. :return: a tuple containing three elementTree Element objects The first element, the root, contains the entire IOC itself. The second element, the top level OR indicator, allows the user to add additional IndicatorItem or Indicator nodes to the IOC easily. The third element, the parameters node, allows the user to quickly parse the parameters.
17,250
def poll(self): start_time = time.time() for agent in self.agents: for collect in agent.reader: (time.time() - start_time) * 1000) collected_data_length = len(self.__collected_data) if not self.first_data_received and self.__collected_data: self.first_data_received = True logger.info("Monitoring received first data.") else: self.send_collected_data() return collected_data_length
Poll agents for data
17,251
def convert_to_python(self, xmlrpc=None): if xmlrpc: return xmlrpc.get(self.name, self.default) elif self.default: return self.default else: return None
Extracts a value for the field from an XML-RPC response.
17,252
def rsdl(self): diff = self.Xf - self.Yfprv return sl.rfl2norm2(diff, self.X.shape, axis=self.cri.axisN)
Compute fixed point residual in Fourier domain.
17,253
def names2dnsrepr(x): if type(x) is str: if x and x[-1] == : return x.encode() x = [x.encode()] elif type(x) is bytes: if x and x[-1] == 0: return x x = [x] res = [] for n in x: if type(n) is str: n = n.encode() termin = b"\x00" if n.count(b) == 0: termin += bytes([0]) n = b"".join(map(lambda y: chr(len(y)).encode()+y, n.split(b"."))) + termin res.append(n) return b"".join(res)
Take as input a list of DNS names or a single DNS name and encode it in DNS format (with possible compression) If a string that is already a DNS name in DNS format is passed, it is returned unmodified. Result is a string. !!! At the moment, compression is not implemented !!!
17,254
def config_md5(self, source_config): file_contents = source_config + "\n" file_contents = file_contents.encode("UTF-8") return hashlib.md5(file_contents).hexdigest()
Compute MD5 hash of file.
17,255
def _get_kind(self, limit): histo = self.fetch("custom_kind") if histo: histo = [i.split("%_") for i in str(histo).split()] histo = [(int(val, 10), ext) for val, ext in histo] else: histo = traits.get_filetypes(self.fetch("files"), path=operator.attrgetter("path"), size=operator.attrgetter("size")) histo_str = .join(("%d%%_%s" % i).replace(, ) for i in histo) self._make_it_so("setting kind cache %r on" % (histo_str,), ["custom.set"], "kind", histo_str) self._fields["custom_kind"] = histo_str return set(ext for val, ext in histo if ext and val >= limit)
Get a set of dominant file types. The files must contribute at least C{limit}% to the item's total size.
17,256
def tracker_index(): stats = server.stats if stats and stats.snapshots: stats.annotate() timeseries = [] for cls in stats.tracked_classes: series = [] for snapshot in stats.snapshots: series.append(snapshot.classes.get(cls, {}).get(, 0)) timeseries.append((cls, series)) series = [s.overhead for s in stats.snapshots] timeseries.append(("Profiling overhead", series)) if stats.snapshots[0].system_total.data_segment: series = [s.system_total.data_segment - s.tracked_total - s.overhead for s in stats.snapshots] timeseries.append(("Data segment", series)) series = [s.system_total.code_segment for s in stats.snapshots] timeseries.append(("Code segment", series)) series = [s.system_total.stack_segment for s in stats.snapshots] timeseries.append(("Stack segment", series)) series = [s.system_total.shared_segment for s in stats.snapshots] timeseries.append(("Shared memory", series)) else: series = [s.total - s.tracked_total - s.overhead for s in stats.snapshots] timeseries.append(("Other", series)) return dict(snapshots=stats.snapshots, timeseries=timeseries) else: return dict(snapshots=[])
Get tracker overview.
17,257
def configure_gateway( cls, launch_jvm: bool = True, gateway: Union[GatewayParameters, Dict[str, Any]] = None, callback_server: Union[CallbackServerParameters, Dict[str, Any]] = False, javaopts: Iterable[str] = (), classpath: Iterable[str] = ): assert check_argument_types() classpath = classpath if isinstance(classpath, str) else os.pathsep.join(classpath) javaopts = list(javaopts) for match in package_re.finditer(classpath): pkgname = match.group(1) module = import_module(pkgname) module_dir = os.path.dirname(module.__file__) classpath = classpath.replace(match.group(0), module_dir) if gateway is None: gateway = {} if isinstance(gateway, dict): gateway.setdefault(, True) gateway.setdefault(, True) gateway = GatewayParameters(**gateway) if isinstance(callback_server, dict): callback_server = CallbackServerParameters(**callback_server) elif callback_server is True: callback_server = CallbackServerParameters() return launch_jvm, gateway, callback_server, classpath, javaopts
Configure a Py4J gateway. :param launch_jvm: ``True`` to spawn a Java Virtual Machine in a subprocess and connect to it, ``False`` to connect to an existing Py4J enabled JVM :param gateway: either a :class:`~py4j.java_gateway.GatewayParameters` object or a dictionary of keyword arguments for it :param callback_server: callback server parameters or a boolean indicating if a callback server is wanted :param javaopts: options passed to Java itself :param classpath: path or iterable of paths to pass to the JVM launcher as the class path
17,258
def avl_join_dir_recursive(t1, t2, node, direction): other_side = 1 - direction if _DEBUG_JOIN_DIR: print( % (direction,)) ascii_tree(t1, ) ascii_tree(t2, ) if direction == 0: large, small = t2, t1 elif direction == 1: large, small = t1, t2 else: assert False spine = large[direction] rest = large[other_side] hsmall = height(small) hspine = height(spine) hrest = height(rest) if _DEBUG_JOIN_DIR: ascii_tree(spine, ) ascii_tree(rest, ) ascii_tree(small, ) if hspine <= hsmall + 1: t_ = avl_new_top(small, spine, node, direction) if _DEBUG_JOIN_DIR: print() ascii_tree(t_, ) if height(t_) <= hrest + 1: if _DEBUG_JOIN_DIR: print() return avl_new_top(t_, rest, large, direction) else: if _DEBUG_JOIN_DIR: print() t_rotate = avl_rotate_single(t_, direction) if _DEBUG_JOIN_DIR: ascii_tree(t_rotate, ) EulerTourTree(root=t_rotate)._assert_nodes() t_merge = avl_new_top(rest, t_rotate, large, other_side) if _DEBUG_JOIN_DIR: ascii_tree(t_merge, ) EulerTourTree(root=t_merge)._assert_nodes() new_root = avl_rotate_single(t_merge, other_side) if _DEBUG_JOIN_DIR: ascii_tree(new_root, ) EulerTourTree(root=new_root)._assert_nodes() return new_root else: if _DEBUG_JOIN_DIR: print() if direction == 0: t_ = avl_join_dir_recursive(small, spine, node, direction) elif direction == 1: t_ = avl_join_dir_recursive(spine, t2, node, direction) else: raise AssertionError() t__ = avl_new_top(t_, rest, large, direction) if height(t_) <= hrest + 1: if _DEBUG_JOIN_DIR: print() return t__ else: if _DEBUG_JOIN_DIR: print() return avl_rotate_single(t__, other_side) assert False,
Recursive version of join_left and join_right TODO: make this iterative using a stack
17,259
def popen(fn, *args, **kwargs) -> subprocess.Popen: args = popen_encode(fn, *args, **kwargs) logging.getLogger(__name__).debug(, args) p = subprocess.Popen(args) return p
Please ensure you're not killing the process before it had started properly :param fn: :param args: :param kwargs: :return:
17,260
def get_tunnel_info_input_filter_type_filter_by_dip_dest_ip(self, **kwargs): config = ET.Element("config") get_tunnel_info = ET.Element("get_tunnel_info") config = get_tunnel_info input = ET.SubElement(get_tunnel_info, "input") filter_type = ET.SubElement(input, "filter-type") filter_by_dip = ET.SubElement(filter_type, "filter-by-dip") dest_ip = ET.SubElement(filter_by_dip, "dest-ip") dest_ip.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
17,261
def delete(self): return self._client._delete( self.__class__.base_url( self.sys[].id, self.sys[], environment_id=self._environment_id ) )
Deletes the resource.
17,262
def bsinPoints(pb, pe): v = pe - pb assert v.y == 0, "begin and end points must have same y coordinate" f = abs(v) * 0.5 / math.pi cp1 = 5.34295228e-01 cp2 = 1.01474288e+00 y_ampl = (0, f) y_cp1 = (0, f * cp1) y_cp2 = (0, f * cp2) p0 = pb p4 = pe p1 = pb + v * 0.25 - y_ampl p2 = pb + v * 0.5 p3 = pb + v * 0.75 + y_ampl k1 = pb + v * (1./12.) - y_cp1 k2 = pb + v * (2./12.) - y_cp2 k3 = pb + v * (4./12.) - y_cp2 k4 = pb + v * (5./12.) - y_cp1 k5 = pb + v * (7./12.) + y_cp1 k6 = pb + v * (8./12.) + y_cp2 k7 = pb + v * (10./12.) + y_cp2 k8 = pb + v * (11./12.) + y_cp1 return p0, k1, k2, p1, k3, k4, p2, k5, k6, p3, k7, k8, p4
Return Bezier control points, when pb and pe stand for a full period from (0,0) to (2*pi, 0), respectively, in the user's coordinate system. The returned points can be used to draw up to four Bezier curves for the complete phase of the sine function graph (0 to 360 degrees).
17,263
def _import_submodules( __all__, __path__, __name__, include=None, exclude=None, include_private_modules=False, require__all__=True, recursive=True): mod = sys.modules[__name__] if exclude is None: exclude = [] for (_, submodname, ispkg) in pkgutil.iter_modules(path=__path__): if submodname.startswith() and not include_private_modules: continue submod = importlib.import_module( + submodname, __name__) if submod.__name__ in exclude: continue if include is not None: if submod.__name__ not in include: continue if not hasattr(submod, ): setattr(submod, , []) if recursive and ispkg: _import_submodules( submod.__all__, submod.__path__, submod.__name__) setattr(mod, submodname, submod) for obj_name in submod.__all__: obj = getattr(submod, obj_name) if hasattr(mod, obj_name): existing_obj = getattr(mod, obj_name) if existing_obj is obj: continue else: raise ImportError( "{mod}.{attr} points to {submod1}.{attr}. " "Cannot set to {submod2}.{attr}".format( mod=mod.__name__, attr=obj_name, submod1=existing_obj.__module__, submod2=obj.__module__)) setattr(mod, obj_name, obj) __all__.append(obj_name) __all__.sort()
Import all available submodules, all objects defined in the `__all__` lists of those submodules, and extend `__all__` with the imported objects. Args: __all__ (list): The list of public objects in the "root" module __path__ (str): The path where the ``__init__.py`` file for the "root" module is located in the file system (every module has a global `__path__` variable which should be passed here) __name__ (str): The full name of the "root" module. Again, every module has a global `__name__` variable. include (list or None): If not None, list of full module names to be included. That is, every module not in the `include` list is ignored exclude (list or None): List of full module names to be excluded from the (recursive) input include_private_modules (bool): Whether to include modules whose name starts with an underscore recursive (bool): Whether to recursively act on submodules of the "root" module. This will make sub-submodules available both in the submodule, and in the "root" module
17,264
def process_query(self): self.query = wt(self.query) self.processed_query = [] for word in self.query: if word not in self.stop_words and word not in self.punctuation: self.processed_query.append(self.stemmer.stem(word))
Q.process_query() -- processes the user query, by tokenizing and stemming words.
17,265
def destination(self, value): if value is not None and (not isinstance(value, tuple) or len(value)) != 2: raise AttributeError self._destination = value
Set the destination of the message. :type value: tuple :param value: (ip, port) :raise AttributeError: if value is not a ip and a port.
17,266
def get_order(self, order_id): if order_id in self.blotter.orders: return self.blotter.orders[order_id].to_api_obj()
Lookup an order based on the order id returned from one of the order functions. Parameters ---------- order_id : str The unique identifier for the order. Returns ------- order : Order The order object.
17,267
def put(self, url, data=None, verify=False, headers=None, proxies=None, timeout=60, **kwargs): self.log.debug("Put a request to %s with data: %s", url, data) response = requests.put(url, data=data, verify=verify, headers=headers, proxies=proxies, timeout=timeout, **kwargs) if response.status_code not in [200, 201]: self.log.error(, url, response.content) response.raise_for_status() return response
Sends a PUT request. Refactor from requests module :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response
17,268
def list_flavors(self, limit=None, marker=None): return self._flavor_manager.list(limit=limit, marker=marker)
Returns a list of all available Flavors.
17,269
def response(uri, method, res, token=, keyword=, content=, raw_flag=False): if method == or (method == and not token): data = res.read() data_utf8 = data.decode() if token: datas = json.loads(data_utf8) else: token = json.loads(data_utf8)[] return token if keyword == : record = search_record(datas, )[0] del record[] record[] = int(record[]) c = JSONConverter(content[]) new_record = c.get_soa(record, content) return record, new_record elif keyword: records = search_record(datas, keyword) datas.update({"records": records}) if uri.split()[3] == : if len(uri.split()) == 5: utils.pretty_print(datas) else: for data in datas: utils.pretty_print(datas) else: if raw_flag: return datas else: if len(uri.split()) > 1: domain = uri.split()[1] else: domain = utils.pretty_print(datas, keyword, domain) else: data = res.read() print(data)
Response of tonicdns_client request Arguments: uri: TonicDNS API URI method: TonicDNS API request method res: Response of against request to TonicDNS API token: TonicDNS API token keyword: Processing keyword content: JSON data raw_flag: True is return responsed raw data, False is pretty print
17,270
def restart_listener(self, topics): if self.listener is not None: if self.listener.running: self.stop() self.__init__(topics=topics)
Restart listener after configuration update.
17,271
def is_valid_preview(preview): if not preview: return False if mimetype(preview) not in [ExportMimeType.PNG, ExportMimeType.PDF]: return False return True
Verifies that the preview is a valid filetype
17,272
def delete_records(self, domain, name, record_type=None): records = self.get_records(domain) if records is None: return False return True
Deletes records by name. You can also add a record type, which will only delete records with the specified type/name combo. If no record type is specified, ALL records that have a matching name will be deleted. This is haphazard functionality. I DO NOT recommend using this in Production code, as your entire DNS record set could be deleted, depending on the fickleness of GoDaddy. Unfortunately, they do not expose a proper "delete record" call, so there isn't much one can do here... :param domain: the domain to delete records from :param name: the name of records to remove :param record_type: the type of records to remove :return: True if no exceptions occurred
17,273
def occurrences_after(self, after=None): from schedule.models import Occurrence if after is None: after = timezone.now() occ_replacer = OccurrenceReplacer( Occurrence.objects.filter(event__in=self.events)) generators = [event._occurrences_after_generator(after) for event in self.events] occurrences = [] for generator in generators: try: heapq.heappush(occurrences, (next(generator), generator)) except StopIteration: pass while occurrences: generator = occurrences[0][1] try: next_occurrence = heapq.heapreplace(occurrences, (next(generator), generator))[0] except StopIteration: next_occurrence = heapq.heappop(occurrences)[0] yield occ_replacer.get_occurrence(next_occurrence)
It is often useful to know what the next occurrence is given a list of events. This function produces a generator that yields the the most recent occurrence after the date ``after`` from any of the events in ``self.events``
17,274
def timer_expired(self): if self._http_conn.sock is not None: self._shutdown = True self._http_conn.sock.shutdown(socket.SHUT_RDWR) else: self._timer.cancel() self._timer = threading.Timer(self._retrytime, HTTPTimeout.timer_expired, [self]) self._timer.start()
This method is invoked in context of the timer thread, so we cannot directly throw exceptions (we can, but they would be in the wrong thread), so instead we shut down the socket of the connection. When the timeout happens in early phases of the connection setup, there is no socket object on the HTTP connection yet, in that case we retry after the retry duration, indefinitely. So we do not guarantee in all cases that the overall operation times out after the specified timeout.
17,275
def make_order_string(cls, order_specification): registry = get_current_registry() visitor_cls = registry.getUtility(IOrderSpecificationVisitor, name=EXPRESSION_KINDS.CQL) visitor = visitor_cls() order_specification.accept(visitor) return str(visitor.expression)
Converts the given order specification to a CQL order expression.
17,276
def write_contents(self, table, reader): f = self.FileObjFaker(table, reader.read(table), self.process_row, self.verbose) self.copy_from(f, % table.name, [ % c[] for c in table.columns])
Write the contents of `table` :Parameters: - `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write. - `reader`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader` object that allows reading from the data source. Returns None
17,277
def support_autoupload_param_hostip(self, **kwargs): config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") hostip = ET.SubElement(autoupload_param, "hostip") hostip.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
17,278
def get_template_by_name(name,**kwargs): try: tmpl_i = db.DBSession.query(Template).filter(Template.name == name).options(joinedload_all()).one() return tmpl_i except NoResultFound: log.info("%s is not a valid identifier for a template",name) raise HydraError(%name)
Get a specific resource template, by name.
17,279
def request(self, path, method=, headers=None, **kwargs): headers = {} if headers is None else headers.copy() headers["User-Agent"] = "Bugsy" kwargs[] = headers url = % (self.bugzilla_url, path) return self._handle_errors(self.session.request(method, url, **kwargs))
Perform a HTTP request. Given a relative Bugzilla URL path, an optional request method, and arguments suitable for requests.Request(), perform a HTTP request.
17,280
def gpg_interactive_input(self, sub_keys_number): deselect_sub_key = "key 0\n" _input = self._main_key_command() for sub_key_number in range(1, sub_keys_number + 1): _input += self._sub_key_command(sub_key_number) + deselect_sub_key return "%ssave\n" % _input
processes series of inputs normally supplied on --edit-key but passed through stdin this ensures that no other --edit-key command is actually passing through.
17,281
def importobj(modpath, attrname): module = __import__(modpath, None, None, []) if not attrname: return module retval = module names = attrname.split(".") for x in names: retval = getattr(retval, x) return retval
imports a module, then resolves the attrname on it
17,282
def _get_connection(self): if getattr(self, , None): logger.debug() else: dsn = self._dsn if dsn == : dsn = else: dsn = dsn.replace(, ) logger.debug( .format(dsn, self._dsn)) self._connection = apsw.Connection(dsn) return self._connection
Returns connection to sqlite db. Returns: connection to the sqlite db who stores mpr data.
17,283
def __wrap( self, method_name ): return lambda *args, **kwargs: Facade( self.__cache( method_name, *args, **kwargs ), list(self.__exclusion_list) )
This method actually does the wrapping. When it's given a method to copy it returns that method with facilities to log the call so it can be repeated. :param str method_name: The name of the method precisely as it's called on the object to wrap. :rtype lambda function:
17,284
def check_input(prolog_file): if prolog_file == None: return for pred in illegal_predicates: if type(pred) == tuple: print_name = pred[1] pred = pred[0] else: print_name = pred if re.search(r + pred + r, prolog_file): raise Exception( % print_name)
Check for illegal predicates (like reading/writing, opening sockets, etc).
17,285
def _learnPhase1(self, activeColumns, readOnly=False): self.lrnActiveState[].fill(0) numUnpredictedColumns = 0 for c in activeColumns: predictingCells = numpy.where(self.lrnPredictedState[][c] == 1)[0] numPredictedCells = len(predictingCells) assert numPredictedCells <= 1 segUpdate.sequenceSegment = True self._adaptSegment(segUpdate) numBottomUpColumns = len(activeColumns) if numUnpredictedColumns < numBottomUpColumns / 2: return True else: return False
Compute the learning active state given the predicted state and the bottom-up input. :param activeColumns list of active bottom-ups :param readOnly True if being called from backtracking logic. This tells us not to increment any segment duty cycles or queue up any updates. :returns: True if the current input was sufficiently predicted, OR if we started over on startCells. False indicates that the current input was NOT predicted, well enough to consider it as "inSequence" This looks at: - @ref lrnActiveState['t-1'] - @ref lrnPredictedState['t-1'] This modifies: - @ref lrnActiveState['t'] - @ref lrnActiveState['t-1']
17,286
def purge_old_user_tasks(): limit = now() - settings.USER_TASKS_MAX_AGE UserTaskStatus.objects.filter(created__lt=limit).delete()
Delete any UserTaskStatus and UserTaskArtifact records older than ``settings.USER_TASKS_MAX_AGE``. Intended to be run as a scheduled task.
17,287
def copy_ifcfg_file(source_interface, dest_interface): log = logging.getLogger(mod_logger + ) if not isinstance(source_interface, basestring): msg = log.error(msg) raise TypeError(msg) if not isinstance(dest_interface, basestring): msg = log.error(msg) raise TypeError(msg) network_script = source_file = network_script + source_interface dest_file = network_script + dest_interface command = [, , source_file, dest_file] try: result = run_command(command) code = result[] except CommandError: _, ex, trace = sys.exc_info() msg = .format( s=source_interface, d=dest_interface, e=str(ex)) raise OSError, msg, trace log.info(.format(c=code)) if code != 0: msg = .format(s=source, d=dest_file) log.error(msg) raise OSError(msg) try: sed(file_path=dest_file, pattern=, replace_str=.format(i=dest_interface)) except CommandError: _, ex, trace = sys.exc_info() msg = .format( d=dest_file, e=str(ex)) log.error(msg) raise CommandError, msg, trace log.info(.format(d=dest_file)) log.info() time.sleep(10) retry_time = 10 max_retries = 10 for i in range(1, max_retries+2): if i > max_retries: msg = .format(m=max_retries) log.error(msg) raise OSError(msg) log.info(.format(i=i, m=max_retries)) try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() log.warn( .format(i=i, m=max_retries, t=retry_time, e=str(ex))) time.sleep(retry_time) else: log.info() break log.info(.format(d=dest_interface))
Copies an existing ifcfg network script to another :param source_interface: String (e.g. 1) :param dest_interface: String (e.g. 0:0) :return: None :raises TypeError, OSError
17,288
def fig_intro(params, ana_params, T=[800, 1000], fraction=0.05, rasterized=False): ana_params.set_PLOS_2column_fig_style(ratio=0.5) networkSim = CachedNetwork(**params.networkSimParams) if analysis_params.bw: networkSim.colors = phlp.get_colors(len(networkSim.X)) fig = plt.figure() gs = gridspec.GridSpec(3, 4) fig.subplots_adjust(left=0.05, right=0.95, wspace=0.5, hspace=0.) ax0_1 = fig.add_subplot(gs[:, 0], frameon=False) ax0_1.set_title(, va=) network_sketch(ax0_1, yscaling=1.3) ax0_1.xaxis.set_ticks([]) ax0_1.yaxis.set_ticks([]) phlp.annotate_subplot(ax0_1, ncols=4, nrows=1, letter=, linear_offset=0.065) ax1 = fig.add_subplot(gs[:, 1], frameon=True) phlp.remove_axis_junk(ax1) phlp.annotate_subplot(ax1, ncols=4, nrows=1, letter=, linear_offset=0.065) x, y = networkSim.get_xy(T, fraction=fraction) networkSim.plot_raster(ax1, T, x, y, markersize=0.2, marker=, alpha=1.,legend=False, pop_names=True, rasterized=rasterized) ax1.set_ylabel() ax1.xaxis.set_major_locator(plt.MaxNLocator(4)) ax1.set_title(, va=) a = ax1.axis() ax1.vlines(x[][0], a[2], a[3], , lw=0.25) ax2 = fig.add_subplot(gs[:, 2], frameon=False) ax2.xaxis.set_ticks([]) ax2.yaxis.set_ticks([]) plot_population(ax2, params, isometricangle=np.pi/24, plot_somas=False, plot_morphos=True, num_unitsE=1, num_unitsI=1, clip_dendrites=True, main_pops=True, title=, rasterized=rasterized) ax2.set_title(, va=, fontweight=) phlp.annotate_subplot(ax2, ncols=4, nrows=1, letter=, linear_offset=0.065) ax3 = fig.add_subplot(gs[:, 3], frameon=True) phlp.remove_axis_junk(ax3) plot_signal_sum(ax3, params, fname=os.path.join(params.savefolder, ), unit=, vlimround=0.8, T=T, ylim=[ax2.axis()[2], ax2.axis()[3]], rasterized=False) ax3.set_title(, va=) ax3.xaxis.set_major_locator(plt.MaxNLocator(4)) phlp.annotate_subplot(ax3, ncols=4, nrows=1, letter=, linear_offset=0.065) a = ax3.axis() ax3.vlines(x[][0], a[2], a[3], , lw=0.25) ax = plt.gca() ax.annotate("", xy=(0.27, 0.5), xytext=(.24, 0.5), xycoords="figure fraction", arrowprops=dict(facecolor=, arrowstyle=), ) ax.annotate("", xy=(0.52, 0.5), xytext=(.49, 0.5), xycoords="figure fraction", arrowprops=dict(facecolor=, arrowstyle=), ) ax.annotate("", xy=(0.78, 0.5), xytext=(.75, 0.5), xycoords="figure fraction", arrowprops=dict(facecolor=, arrowstyle=), ) return fig
set up plot for introduction
17,289
def log_queries(recipe): logger.debug( , recipe.slug, sum([float(q[]) for q in connection.queries]))
Logs recipe instance SQL queries (actually, only time).
17,290
def move_part_instance(part_instance, target_parent, part_model, name=None, include_children=True): if not name: name = part_instance.name moved_model = get_mapping_dictionary()[part_model.id] if moved_model.multiplicity == Multiplicity.ONE: moved_instance = moved_model.instances(parent_id=target_parent.id)[0] map_property_instances(part_instance, moved_instance) moved_instance = update_part_with_properties(part_instance, moved_instance, name=str(name)) elif moved_model.multiplicity == Multiplicity.ONE_MANY: if target_parent.id not in get_edited_one_many(): moved_instance = moved_model.instances(parent_id=target_parent.id)[0] map_property_instances(part_instance, moved_instance) moved_instance = update_part_with_properties(part_instance, moved_instance, name=str(name)) get_edited_one_many().append(target_parent.id) else: moved_instance = target_parent.add(name=part_instance.name, model=moved_model, suppress_kevents=True) map_property_instances(part_instance, moved_instance) moved_instance = update_part_with_properties(part_instance, moved_instance, name=str(name)) else: moved_instance = target_parent.add(name=name, model=moved_model, suppress_kevents=True) map_property_instances(part_instance, moved_instance) moved_instance = update_part_with_properties(part_instance, moved_instance, name=str(name)) if include_children: for sub_instance in part_instance._cached_children: move_part_instance(part_instance=sub_instance, target_parent=moved_instance, part_model=sub_instance.model(), name=sub_instance.name, include_children=True) return moved_instance
Move the `Part` instance to target parent and updates the properties based on the original part instance. .. versionadded:: 2.3 :param part_instance: `Part` object to be moved :type part_instance: :class:`Part` :param part_model: `Part` object representing the model of part_instance :type part_model: :class: `Part` :param target_parent: `Part` object under which the desired `Part` is moved :type target_parent: :class:`Part` :param name: how the moved top-level `Part` should be called :type name: basestring :param include_children: True to move also the descendants of `Part`. If False, the children will be lost. :type include_children: bool :return: moved :class: `Part` instance
17,291
def replace_free_shipping_promotion_by_id(cls, free_shipping_promotion_id, free_shipping_promotion, **kwargs): kwargs[] = True if kwargs.get(): return cls._replace_free_shipping_promotion_by_id_with_http_info(free_shipping_promotion_id, free_shipping_promotion, **kwargs) else: (data) = cls._replace_free_shipping_promotion_by_id_with_http_info(free_shipping_promotion_id, free_shipping_promotion, **kwargs) return data
Replace FreeShippingPromotion Replace all attributes of FreeShippingPromotion This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_free_shipping_promotion_by_id(free_shipping_promotion_id, free_shipping_promotion, async=True) >>> result = thread.get() :param async bool :param str free_shipping_promotion_id: ID of freeShippingPromotion to replace (required) :param FreeShippingPromotion free_shipping_promotion: Attributes of freeShippingPromotion to replace (required) :return: FreeShippingPromotion If the method is called asynchronously, returns the request thread.
17,292
def FindSolFile(shot=0, t=0, Dt=None, Mesh=, Deg=2, Deriv=, Sep=True, Pos=True, OutPath=): assert None in [t,Dt] and not (t is None and Dt is None), "Arg t or Dt must be None, but not both !" LF = [ff for ff in os.listdir(OutPath) if in ff] LF = [ff for ff in LF if all([ss in ff for ss in [+str(shot)+, +Mesh++str(Deg), +Deriv++str(Sep)++str(Pos)]])] if len(LF)==0: print("No matching Sol2D file in ", OutPath) out = None LDTstr = [ff[ff.index()+3:ff.index()] for ff in LF] LDTstr = [(ss[:7],ss[8:]) for ss in LDTstr] if t is None: LF = [LF[ii] for ii in range(0,len(LF)) if LDTstr[ii][0]++LDTstr[ii][1]==.format(Dt[0],Dt[1])] elif Dt is None: LF = [LF[ii] for ii in range(0,len(LF)) if t>=float(LDTstr[ii][0]) and t<=float(LDTstr[ii][1])] if len(LF)==0: print("No matching Sol2D file in ", OutPath) out = None elif len(LF)>1: print("Several matching Sol2D files in ", OutPath) print(LF) out = None else: out = LF[0] return out
Identify the good Sol2D saved file in a given folder (OutPath), based on key ToFu criteria When trying to load a Sol2D object (i.e.: solution of a tomographic inversion), it may be handy to provide the key parameters (shot, time, mesh name, degree of basis functions, regularisation functional) instead of copy-pasting the full file name. This function identifies, within the relevant repository (OutPath), the files matching the provided criteria. This function only works of the automatically generated default SaveName was preserved for the Sol2D objects. Parameters ---------- shot : int A shot number t : None / int / float A time value that must be contained in the time interval of the Sol2D file, must be provided if Dt is None Dt : None / iterable A time interval that the Sol2D file has to match, must be provided if t is None Mesh : str The name of the mesh that was used to compute the inversion Deg : int The of the b-splines (LBF2D object) that were used to discretize the solution Deriv : str The flag indicating the regularization functional that was used for the inversion Sep : bool The flag value that was used for indicating whether the boundary constraint at the separatrix should be considered Pos : bool The flag value that was used for indicating whether the positivity constraint was considered Outpath : str The absolute path of the repository where to look Returns ------- out : None / str The matching file name, if any
17,293
def invert_inventory(inventory): inverted = dict() for binding, items in inventory.iteritems(): for item in items: if isinstance(item, dict): item = item.keys()[0] item = str(item) if item in inverted: echo("Warning: Duplicate item found, " "for \"%s: %s\"" % (binding, item)) continue inverted[item] = binding return inverted
Return {item: binding} from {binding: item} Protect against items with additional metadata and items whose type is a number Returns: Dictionary of inverted inventory
17,294
def get_std_end_date(self): _, second = self._val if second != datetime.max: return second.strftime("%Y-%m-%d %H:%M:%S") else: return ""
If the date is custom, return the end datetime with the format %Y-%m-%d %H:%M:%S. Else, returns "".
17,295
def stringify(req, resp): if isinstance(resp.body, dict): try: resp.body = json.dumps(resp.body) except(nameError): resp.status = falcon.HTTP_500
dumps all valid jsons This is the latest after hook
17,296
def refresh(self): grant_type = "https://oauth.reddit.com/grants/installed_client" self._request_token(grant_type=grant_type, device_id=self._device_id)
Obtain a new access token.
17,297
def log_event(cls, event, text = None): if not text: if event.get_event_code() == win32.EXCEPTION_DEBUG_EVENT: what = event.get_exception_description() if event.is_first_chance(): what = % what else: what = % what try: address = event.get_fault_address() except NotImplementedError: address = event.get_exception_address() else: what = event.get_event_name() address = event.get_thread().get_pc() process = event.get_process() label = process.get_label_at_address(address) address = HexDump.address(address, process.get_bits()) if label: where = % (address, label) else: where = address text = % (what, where) text = % (event.get_pid(), event.get_tid(), text) return cls.log_text(text)
Log lines of text associated with a debug event. @type event: L{Event} @param event: Event object. @type text: str @param text: (Optional) Text to log. If no text is provided the default is to show a description of the event itself. @rtype: str @return: Log line.
17,298
def memoize(max_cache_size=1000): def wrapper(f): @wraps(f) def fn(*args, **kwargs): if kwargs: key = (args, tuple(kwargs.items())) else: key = args try: return fn.cache[key] except KeyError: if fn.count >= max_cache_size: fn.cache = {} fn.count = 0 result = f(*args, **kwargs) fn.cache[key] = result fn.count += 1 return result except TypeError: return f(*args, **kwargs) fn.cache = {} fn.count = 0 return fn return wrapper
Python 2.4 compatible memoize decorator. It creates a cache that has a maximum size. If the cache exceeds the max, it is thrown out and a new one made. With such behavior, it is wise to set the cache just a little larger that the maximum expected need. Parameters: max_cache_size - the size to which a cache can grow
17,299
def eth_getTransactionByBlockNumberAndIndex(self, block=BLOCK_TAG_LATEST, index=0): block = validate_block(block) result = yield from self.rpc_call(, [block, hex(index)]) return result
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionbyblocknumberandindex :param block: Block tag or number (optional) :type block: int or BLOCK_TAGS :param index: Index position (optional) :type index: int :return: transaction :rtype: dict or None